91 Commits

Author SHA1 Message Date
long2ice
6339dc86a8 Fix migrate to new database error 2020-10-14 20:33:23 +08:00
long2ice
768747140a update changelog 2020-10-12 21:01:53 +08:00
long2ice
1fde3cd04e fix init KeyError (#61) 2020-10-12 20:59:13 +08:00
long2ice
d0ce545ff5 Fix first version error 2020-10-10 15:07:09 +08:00
long2ice
09b89ed7d0 update README 2020-10-09 15:41:37 +08:00
long2ice
86c8382593 update README 2020-10-09 15:34:48 +08:00
long2ice
48e3ff48a3 update README.md 2020-10-09 12:04:36 +08:00
long2ice
1bf6d45bb0 Merge pull request #60 from tortoise/refactoring
Refactoring migrate logic
2020-10-09 11:54:37 +08:00
long2ice
342f4cdd3b complete refactoring 2020-10-09 11:53:50 +08:00
long2ice
8cace21fde refactoring migrate logic 2020-10-09 00:05:22 +08:00
long2ice
9889d9492b add ? when ask rename 2020-09-28 17:22:05 +08:00
long2ice
823368aea8 fix event loop error 2020-09-28 17:16:35 +08:00
long2ice
6b1ad46cf1 update README.md 2020-09-28 12:50:43 +08:00
long2ice
ce8c0b1f06 Support db_constraint in fk 2020-09-28 10:40:04 +08:00
long2ice
43922d3734 update CHANGELOG.md 2020-09-27 14:18:19 +08:00
long2ice
48c5318737 remove asyncclick 2020-09-25 18:27:08 +08:00
long2ice
002221e557 update README.md 2020-09-25 17:51:31 +08:00
long2ice
141d7205bf Add Rename support 2020-09-25 17:48:32 +08:00
long2ice
af4d4be19a Fix Postgres alter table 2020-09-24 15:02:28 +08:00
long2ice
3b4d9b47ce update version 2020-09-23 18:33:31 +08:00
long2ice
4b0c4ae7d0 fix test error 2020-09-21 15:03:16 +08:00
long2ice
dc821d8a02 Merge remote-tracking branch 'origin/dev' into dev 2020-09-21 11:46:21 +08:00
long2ice
d18a6b5be0 add sqlite not support error 2020-09-21 11:44:34 +08:00
long2ice
1e56a70f21 Merge pull request #44 from saintlyzero/dev
append new line in cp_models
2020-09-16 22:21:09 +08:00
saintlyzero
ab1e1aab75 append new line in cp_models 2020-09-16 19:30:03 +05:30
long2ice
00dd04f97d update conftest.py 2020-09-10 18:42:19 +08:00
long2ice
fc914acc80 update README.md 2020-09-09 21:10:10 +08:00
long2ice
ac03ecb002 update README.md 2020-09-02 13:34:53 +08:00
long2ice
235ef3f7ea fix datetime string format 2020-08-19 09:28:36 +08:00
long2ice
e00eb7f3d9 update README.md 2020-08-17 12:40:39 +08:00
long2ice
d6c8941676 update CHANGELOG.rst 2020-08-07 18:12:40 +08:00
long2ice
cf062c9310 update CHANGELOG.rst 2020-08-07 09:29:30 +08:00
long2ice
309adec8c9 Merge pull request #34 from moubctez/postgresql_unique
PostgreSQL add/drop index/unique
2020-08-07 09:27:03 +08:00
Adam Ciarciński
8674142ba8 Fix test_migrate 2020-08-06 17:59:12 +02:00
Adam Ciarciński
cda9bd1c47 Save 1 space in tests 2020-08-06 17:50:28 +02:00
Adam Ciarciński
198e4e0032 Save 1 space 2020-08-06 17:49:26 +02:00
Adam Ciarciński
1b440477a2 PostgreSQL add/drop index/unique 2020-08-06 17:45:56 +02:00
long2ice
1263c6f735 Fix tortoise ssl config 2020-07-30 11:30:02 +08:00
long2ice
6504384879 update README.md 2020-07-29 10:30:25 +08:00
long2ice
17ab0a1421 fix version sort and add test 2020-07-29 10:21:43 +08:00
long2ice
e1ffcb609b fix version sort 2020-07-28 18:31:45 +08:00
long2ice
18cb75f555 Merge pull request #32 from psbleep/close-tortoise-connections
Close database connections
2020-07-26 16:52:08 +08:00
Patrick Schneeweis
dfe13ea250 add type hint 2020-07-25 15:22:05 -04:00
Patrick Schneeweis
b97ce0ff2f Use close_db decorator on history command 2020-07-24 09:26:29 -04:00
Patrick Schneeweis
21001c0eda Make history command into async function
The close_db decorator requires async functions
2020-07-24 09:24:43 -04:00
Patrick Schneeweis
9bd96a9487 Use close_db decorator on command functions 2020-07-24 09:22:57 -04:00
Patrick Schneeweis
5bdaa32a9e Decorator for closing db connection after func run 2020-07-24 09:17:19 -04:00
long2ice
d74e7b5630 Merge pull request #30 from psbleep/longer-migration-version-names
Allow longer migration version names
2020-07-24 10:28:52 +08:00
Patrick Schneeweis
119b15d597 style fixes 2020-07-23 21:29:26 -04:00
Patrick Schneeweis
f93ab6bbff Raise name length error when creating migration.
Currently this error gets raised when trying to apply the migration, but
it seems better to learn about it when trying to create the migration.
2020-07-23 21:22:30 -04:00
Patrick Schneeweis
bd8eb94a6e Increase max length of version column 2020-07-23 21:21:58 -04:00
long2ice
2fcb2626fd Fix postgres drop fk 2020-07-20 10:20:08 +08:00
long2ice
3728db4279 Merge remote-tracking branch 'origin/dev' into dev 2020-07-15 12:58:56 +08:00
long2ice
6ac1fb5332 add ads 2020-07-15 12:58:34 +08:00
long2ice
87a17d443c update py version 3.7 2020-07-13 20:32:14 +08:00
long2ice
55f18a69ed Merge pull request #23 from long2ice/master
Master
2020-07-09 14:34:37 +08:00
long2ice
86a9c4cedd Merge branch 'dev' 2020-07-09 14:28:09 +08:00
long2ice
86e1d3defb fix test error 2020-07-08 21:00:30 +08:00
long2ice
01fa7fbbdb Merge pull request #21 from moubctez/postgresql_fixes
Enhance PostgreSQL support
2020-07-08 20:54:40 +08:00
Adam Ciarciński
90196eb1bf Apply black 2020-07-08 14:38:42 +02:00
Adam Ciarciński
3c111792a9 Enhance PostgreSQL support 2020-07-07 19:54:55 +02:00
long2ice
77e9d7bc91 update deps 2020-07-06 00:10:07 +08:00
long2ice
fe2ddff88b update version 2020-06-24 15:17:52 +08:00
long2ice
0d23297f46 Fix bug in windows. 2020-06-24 15:17:30 +08:00
long2ice
6be6d55e5b Merge remote-tracking branch 'origin/dev' into dev 2020-06-22 12:55:46 +08:00
long2ice
25674bc73a update Makefile 2020-06-22 12:55:26 +08:00
long2ice
1715eda1a3 Merge pull request #11 from FutureSenzhong/dev
updata:File operations use utf8 encoding
2020-06-22 12:27:22 +08:00
邓森中
f5775049dd updata:File operations use utf8 encoding 2020-06-22 11:18:52 +08:00
long2ice
6fd0f8a42f update README.md licence 2020-06-20 12:49:43 +08:00
long2ice
f52dc009af update README.md 2020-06-20 12:48:32 +08:00
long2ice
9248d456f9 update license 2020-06-17 11:39:15 +08:00
long2ice
c24f2f6b09 update pyproject.toml 2020-06-12 18:11:19 +08:00
long2ice
19c9c2c30f Merge branch 'dev' 2020-06-12 17:53:30 +08:00
long2ice
73b75349ee update version 0.2.0 2020-06-12 17:53:17 +08:00
long2ice
7bc553221a raise NotImplementedError 2020-06-12 09:31:01 +08:00
long2ice
7413a05e19 set --safe bool 2020-06-08 18:07:41 +08:00
long2ice
bf194ca8ce Update model file find method 2020-06-03 18:42:35 +08:00
long2ice
b06da0223a add --build 2020-06-03 09:39:52 +08:00
long2ice
83554cdc5d Merge remote-tracking branch 'origin/dev' into dev
# Conflicts:
#	.github/workflows/pypi.yml
#	.github/workflows/test.yml
#	Makefile
2020-06-03 09:38:20 +08:00
long2ice
6c76bfccad Merge remote-tracking branch 'origin/dev' into dev 2020-06-02 22:23:26 +08:00
long2ice
a1746e457c update github actions
update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions
2020-06-02 22:23:15 +08:00
long2ice
2a0435dea9 update github actions 2020-06-02 22:14:44 +08:00
long2ice
e87f67f1e1 update github actions
update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions
2020-06-02 22:02:46 +08:00
long2ice
7b4b7ac749 update github actions 2020-06-02 18:58:59 +08:00
long2ice
5b9b51db3f update github actions 2020-06-02 18:38:39 +08:00
long2ice
ffeee3c901 update github actions 2020-06-02 18:28:50 +08:00
long2ice
b4366d2427 update github actions 2020-06-02 18:20:55 +08:00
long2ice
ec1c80f3a9 remove requirements 2020-06-01 14:57:29 +08:00
long2ice
d2083632eb add cli -V 2020-05-27 12:44:49 +08:00
long2ice
dc8b4c2263 Merge remote-tracking branch 'origin/master' 2020-05-27 11:22:01 +08:00
long2ice
cb5dffeeb8 Merge branch 'dev' 2020-05-26 20:05:35 +08:00
34 changed files with 1435 additions and 910 deletions

View File

@@ -8,12 +8,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v1
- uses: actions/setup-python@v2
with:
python-version: '3.x'
- uses: dschep/install-poetry-action@v1.3
- name: Build dists
run: |
python3 setup.py sdist
run: make build
- name: Pypi Publish
uses: pypa/gh-action-pypi-publish@master
with:

View File

@@ -19,10 +19,7 @@ jobs:
- uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements-dev.txt
- uses: dschep/install-poetry-action@v1.3
- name: CI
env:
MYSQL_PASS: root
@@ -31,4 +28,4 @@ jobs:
POSTGRES_PASS: 123456
POSTGRES_HOST: 127.0.0.1
POSTGRES_PORT: 5432
run: make testall
run: make ci

106
CHANGELOG.md Normal file
View File

@@ -0,0 +1,106 @@
# ChangeLog
## 0.3
### 0.3.2
- Fix migrate to new database error. (#62)
### 0.3.1
- Fix first version error.
- Fix init error. (#61)
### 0.3.0
- Refactoring migrate logic, and this version is not compatible with previous version.
- Now there don't need `old_models.py` and it store in database.
- Upgrade steps:
1. Upgrade aerich version.
2. Drop aerich table in database.
3. Delete `migrations/{app}` folder and rerun `aerich init-db`.
4. Update model and `aerich migrate` normally.
## 0.2
### 0.2.5
- Fix windows support. (#46)
- Support `db_constraint` in fk, m2m should manual define table with fk. (#52)
### 0.2.4
- Raise error with SQLite unsupported features.
- Fix Postgres alter table. (#48)
- Add `Rename` support.
### 0.2.3
- Fix tortoise ssl config.
- PostgreSQL add/drop index/unique.
### 0.2.2
- Fix postgres drop fk.
- Fix version sort.
### 0.2.1
- Fix bug in windows.
- Enhance PostgreSQL support.
### 0.2.0
- Update model file find method.
- Set `--safe` bool.
## 0.1
### 0.1.9
- Fix default_connection when upgrade
- Find default app instead of default.
- Diff MySQL ddl.
- Check tortoise config.
### 0.1.8
- Fix upgrade error when migrate.
- Fix init db sql error.
- Support change column.
### 0.1.7
- Exclude models.Aerich.
- Add init record when init-db.
- Fix version num str.
### 0.1.6
- update dependency_links
### 0.1.5
- Add sqlite and postgres support.
- Fix dependency import.
- Store versions in db.
### 0.1.4
- Fix transaction and fields import.
- Make unique index worked.
- Add cli --version.
### 0.1.3
- Support indexes and unique_together.
### 0.1.2
- Now aerich support m2m.
- Add cli cmd init-db.
- Change cli options.
### 0.1.1
- Now aerich is basic worked.

View File

@@ -1,54 +0,0 @@
=========
ChangeLog
=========
0.1
===
0.1.9
-----
- Fix default_connection when upgrade
- Find default app instead of default.
- Diff MySQL ddl.
- Check tortoise config.
0.1.8
-----
- Fix upgrade error when migrate.
- Fix init db sql error.
- Support change column.
0.1.7
-----
- Exclude models.Aerich.
- Add init record when init-db.
- Fix version num str.
0.1.6
-----
- update dependency_links
0.1.5
-----
- Add sqlite and postgres support.
- Fix dependency import.
- Store versions in db.
0.1.4
-----
- Fix transaction and fields import.
- Make unique index worked.
- Add cli --version.
0.1.3
-----
- Support indexes and unique_together.
0.1.2
-----
- Now aerich support m2m.
- Add cli cmd init-db.
- Change cli options.
0.1.1
-----
- Now aerich is basic worked.

214
LICENSE
View File

@@ -1,21 +1,201 @@
The MIT License (MIT)
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
Copyright (c) 2020 long2ice
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
1. Definitions.
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2020 long2ice
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -1,3 +0,0 @@
include LICENSE
include README.rst
include requirements.txt

View File

@@ -18,23 +18,20 @@ help:
@echo " test Runs all tests"
@echo " style Auto-formats the code"
up:
@poetry update
deps:
@which pip-sync > /dev/null || pip install -q pip-tools
@pip install -r requirements-dev.txt
@poetry install -E dbdrivers --no-root
style: deps
isort -rc $(checkfiles)
isort -src $(checkfiles)
black $(black_opts) $(checkfiles)
check: deps
ifneq ($(shell which black),)
black --check $(black_opts) $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false)
endif
flake8 $(checkfiles)
mypy $(checkfiles)
pylint -d C,W,R $(checkfiles)
bandit -r $(checkfiles)
python setup.py check -mrs
bandit -x tests -r $(checkfiles)
test: deps
$(py_warn) TEST_DB=sqlite://:memory: py.test
@@ -43,17 +40,14 @@ test_sqlite:
$(py_warn) TEST_DB=sqlite://:memory: py.test
test_mysql:
$(py_warn) TEST_DB="mysql://root:$(MYSQL_PASS)@$(MYSQL_HOST):$(MYSQL_PORT)/test_\{\}" py.test
$(py_warn) TEST_DB="mysql://root:$(MYSQL_PASS)@$(MYSQL_HOST):$(MYSQL_PORT)/test_\{\}" pytest -vv -s
test_postgres:
$(py_warn) TEST_DB="postgres://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" py.test
$(py_warn) TEST_DB="postgres://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" pytest
testall: deps test_sqlite test_postgres test_mysql
publish: deps
rm -fR dist/
python setup.py sdist
twine upload dist/*
build: deps
@poetry build
ci:
@act -P ubuntu-latest=nektos/act-environments-ubuntu:18.04 -b
ci: check testall

176
README.md Normal file
View File

@@ -0,0 +1,176 @@
# Aerich
[![image](https://img.shields.io/pypi/v/aerich.svg?style=flat)](https://pypi.python.org/pypi/aerich)
[![image](https://img.shields.io/github/license/long2ice/aerich)](https://github.com/long2ice/aerich)
[![image](https://github.com/long2ice/aerich/workflows/pypi/badge.svg)](https://github.com/long2ice/aerich/actions?query=workflow:pypi)
[![image](https://github.com/long2ice/aerich/workflows/test/badge.svg)](https://github.com/long2ice/aerich/actions?query=workflow:test)
## Introduction
Aerich is a database migrations tool for Tortoise-ORM, which like alembic for SQLAlchemy, or Django ORM with it\'s
own migrations solution.
**If you upgrade aerich from <= 0.2.5 to >= 0.3.0, see [changelog](https://github.com/tortoise/aerich/blob/dev/CHANGELOG.md) for upgrade steps.**
## Install
Just install from pypi:
```shell
> pip install aerich
```
## Quick Start
```shell
> aerich -h
Usage: aerich [OPTIONS] COMMAND [ARGS]...
Options:
-c, --config TEXT Config file. [default: aerich.ini]
--app TEXT Tortoise-ORM app name. [default: models]
-n, --name TEXT Name of section in .ini file to use for aerich config.
[default: aerich]
-h, --help Show this message and exit.
Commands:
downgrade Downgrade to specified version.
heads Show current available heads in migrate location.
history List all migrate items.
init Init config file and generate root migrate location.
init-db Generate schema and generate app migrate location.
migrate Generate migrate changes file.
upgrade Upgrade to latest version.
```
## Usage
You need add `aerich.models` to your `Tortoise-ORM` config first,
example:
```python
TORTOISE_ORM = {
"connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"},
"apps": {
"models": {
"models": ["tests.models", "aerich.models"],
"default_connection": "default",
},
},
}
```
### Initialization
```shell
> aerich init -h
Usage: aerich init [OPTIONS]
Init config file and generate root migrate location.
Options:
-t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like settings.TORTOISE_ORM.
[required]
--location TEXT Migrate store location. [default: ./migrations]
-h, --help Show this message and exit.
```
Init config file and location:
```shell
> aerich init -t tests.backends.mysql.TORTOISE_ORM
Success create migrate location ./migrations
Success generate config file aerich.ini
```
### Init db
```shell
> aerich init-db
Success create app migrate location ./migrations/models
Success generate schema for app "models"
```
If your Tortoise-ORM app is not default `models`, you must specify
`--app` like `aerich --app other_models init-db`.
### Update models and make migrate
```shell
> aerich migrate --name drop_column
Success migrate 1_202029051520102929_drop_column.json
```
Format of migrate filename is
`{version_num}_{datetime}_{name|update}.json`.
And if `aerich` guess you are renaming a column, it will ask `Rename {old_column} to {new_column} [True]`, you can choice `True` to rename column without column drop, or choice `False` to drop column then create.
If you use `MySQL`, only MySQL8.0+ support `rename..to` syntax.
### Upgrade to latest version
```shell
> aerich upgrade
Success upgrade 1_202029051520102929_drop_column.json
```
Now your db is migrated to latest.
### Downgrade to specified version
```shell
> aerich init -h
Usage: aerich downgrade [OPTIONS]
Downgrade to specified version.
Options:
-v, --version INTEGER Specified version, default to last. [default: -1]
-h, --help Show this message and exit.
```
```shell
> aerich downgrade
Success downgrade 1_202029051520102929_drop_column.json
```
Now your db rollback to specified version.
### Show history
```shell
> aerich history
1_202029051520102929_drop_column.json
```
### Show heads to be migrated
```shell
> aerich heads
1_202029051520102929_drop_column.json
```
## Support this project
- Just give a star!
- Donation.
| AliPay | WeChatPay | PayPal |
| -------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------- | ---------------------------------------------------------------- |
| <img width="200" src="https://github.com/long2ice/aerich/raw/dev/images/alipay.jpeg"/> | <img width="200" src="https://github.com/long2ice/aerich/raw/dev/images/wechatpay.jpeg"/> | [PayPal](https://www.paypal.me/long2ice) to my account long2ice. |
## License
This project is licensed under the
[Apache-2.0](https://github.com/long2ice/aerich/blob/master/LICENSE) License.

View File

@@ -1,169 +0,0 @@
======
Aerich
======
.. image:: https://img.shields.io/pypi/v/aerich.svg?style=flat
:target: https://pypi.python.org/pypi/aerich
.. image:: https://img.shields.io/github/license/long2ice/aerich
:target: https://github.com/long2ice/aerich
.. image:: https://github.com/long2ice/aerich/workflows/pypi/badge.svg
:target: https://github.com/long2ice/aerich/actions?query=workflow:pypi
.. image:: https://github.com/long2ice/aerich/workflows/test/badge.svg
:target: https://github.com/long2ice/aerich/actions?query=workflow:test
Introduction
============
Tortoise-ORM is the best asyncio ORM now, but it lacks a database migrations tool like alembic for SQLAlchemy, or Django ORM with it's own migrations tool.
This project aim to be a best migrations tool for Tortoise-ORM and which written by one of contributors of Tortoise-ORM.
Install
=======
Just install from pypi:
.. code-block:: shell
$ pip install aerich
Quick Start
===========
.. code-block:: shell
$ aerich -h
Usage: aerich [OPTIONS] COMMAND [ARGS]...
Options:
-c, --config TEXT Config file. [default: aerich.ini]
--app TEXT Tortoise-ORM app name. [default: models]
-n, --name TEXT Name of section in .ini file to use for aerich config.
[default: aerich]
-h, --help Show this message and exit.
Commands:
downgrade Downgrade to previous version.
heads Show current available heads in migrate location.
history List all migrate items.
init Init config file and generate root migrate location.
init-db Generate schema and generate app migrate location.
migrate Generate migrate changes file.
upgrade Upgrade to latest version.
Usage
=====
You need add ``aerich.models`` to your ``Tortoise-ORM`` config first, example:
.. code-block:: python
TORTOISE_ORM = {
"connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"},
"apps": {
"models": {
"models": ["tests.models", "aerich.models"],
"default_connection": "default",
},
},
}
Initialization
--------------
.. code-block:: shell
$ aerich init -h
Usage: aerich init [OPTIONS]
Init config file and generate root migrate location.
Options:
-t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like settings.TORTOISE_ORM.
[required]
--location TEXT Migrate store location. [default: ./migrations]
-h, --help Show this message and exit.
Init config file and location:
.. code-block:: shell
$ aerich init -t tests.backends.mysql.TORTOISE_ORM
Success create migrate location ./migrations
Success generate config file aerich.ini
Init db
-------
.. code-block:: shell
$ aerich init-db
Success create app migrate location ./migrations/models
Success generate schema for app "models"
.. note::
If your Tortoise-ORM app is not default ``models``, you must specify ``--app`` like ``aerich --app other_models init-db``.
Update models and make migrate
------------------------------
.. code-block:: shell
$ aerich migrate --name drop_column
Success migrate 1_202029051520102929_drop_column.json
Format of migrate filename is ``{version_num}_{datetime}_{name|update}.json``
Upgrade to latest version
-------------------------
.. code-block:: shell
$ aerich upgrade
Success upgrade 1_202029051520102929_drop_column.json
Now your db is migrated to latest.
Downgrade to previous version
-----------------------------
.. code-block:: shell
$ aerich downgrade
Success downgrade 1_202029051520102929_drop_column.json
Now your db rollback to previous version.
Show history
------------
.. code-block:: shell
$ aerich history
1_202029051520102929_drop_column.json
Show heads to be migrated
-------------------------
.. code-block:: shell
$ aerich heads
1_202029051520102929_drop_column.json
Limitations
===========
* Not support ``rename column`` now.
* ``Sqlite`` and ``Postgres`` may not work as expected because I don't use those in my work.
License
=======
This project is licensed under the `MIT <https://github.com/long2ice/aerich/blob/master/LICENSE>`_ License.

View File

@@ -1 +1 @@
__version__ = "0.1.9"
__version__ = "0.3.2"

View File

@@ -1,11 +1,12 @@
import asyncio
import json
import os
import sys
from configparser import ConfigParser
from enum import Enum
from functools import wraps
import asyncclick as click
from asyncclick import Context, UsageError
import click
from click import Context, UsageError
from tortoise import Tortoise, generate_schema_for_client
from tortoise.exceptions import OperationalError
from tortoise.transactions import in_transaction
@@ -15,20 +16,27 @@ from aerich.migrate import Migrate
from aerich.utils import get_app_connection, get_app_connection_name, get_tortoise_config
from . import __version__
from .enums import Color
from .models import Aerich
class Color(str, Enum):
green = "green"
red = "red"
yellow = "yellow"
parser = ConfigParser()
def coro(f):
@wraps(f)
def wrapper(*args, **kwargs):
loop = asyncio.get_event_loop()
ctx = args[0]
loop.run_until_complete(f(*args, **kwargs))
app = ctx.obj.get("app")
if app:
Migrate.remove_old_model_file(app, ctx.obj["location"])
return wrapper
@click.group(context_settings={"help_option_names": ["-h", "--help"]})
@click.version_option(__version__)
@click.version_option(__version__, "-V", "--version")
@click.option(
"-c", "--config", default="aerich.ini", show_default=True, help="Config file.",
)
@@ -41,6 +49,7 @@ parser = ConfigParser()
help="Name of section in .ini file to use for aerich config.",
)
@click.pass_context
@coro
async def cli(ctx: Context, config, app, name):
ctx.ensure_object(dict)
ctx.obj["config_file"] = config
@@ -62,7 +71,7 @@ async def cli(ctx: Context, config, app, name):
ctx.obj["config"] = tortoise_config
ctx.obj["location"] = location
ctx.obj["app"] = app
Migrate.app = app
if invoked_subcommand != "init-db":
await Migrate.init_with_old_models(tortoise_config, app, location)
@@ -70,68 +79,89 @@ async def cli(ctx: Context, config, app, name):
@cli.command(help="Generate migrate changes file.")
@click.option("--name", default="update", show_default=True, help="Migrate name.")
@click.pass_context
@coro
async def migrate(ctx: Context, name):
config = ctx.obj["config"]
location = ctx.obj["location"]
app = ctx.obj["app"]
ret = await Migrate.migrate(name)
if not ret:
return click.secho("No changes detected", fg=Color.yellow)
Migrate.write_old_models(config, app, location)
click.secho(f"Success migrate {ret}", fg=Color.green)
@cli.command(help="Upgrade to latest version.")
@cli.command(help="Upgrade to specified version.")
@click.pass_context
@coro
async def upgrade(ctx: Context):
config = ctx.obj["config"]
app = ctx.obj["app"]
location = ctx.obj["location"]
migrated = False
for version in Migrate.get_all_version_files():
for version_file in Migrate.get_all_version_files():
try:
exists = await Aerich.exists(version=version, app=app)
exists = await Aerich.exists(version=version_file, app=app)
except OperationalError:
exists = False
if not exists:
async with in_transaction(get_app_connection_name(config, app)) as conn:
file_path = os.path.join(Migrate.migrate_location, version)
with open(file_path, "r") as f:
file_path = os.path.join(Migrate.migrate_location, version_file)
with open(file_path, "r", encoding="utf-8") as f:
content = json.load(f)
upgrade_query_list = content.get("upgrade")
for upgrade_query in upgrade_query_list:
await conn.execute_query(upgrade_query)
await Aerich.create(version=version, app=app)
click.secho(f"Success upgrade {version}", fg=Color.green)
await conn.execute_script(upgrade_query)
await Aerich.create(
version=version_file,
app=app,
content=Migrate.get_models_content(config, app, location),
)
click.secho(f"Success upgrade {version_file}", fg=Color.green)
migrated = True
if not migrated:
click.secho("No migrate items", fg=Color.yellow)
@cli.command(help="Downgrade to previous version.")
@cli.command(help="Downgrade to specified version.")
@click.option(
"-v",
"--version",
default=-1,
type=int,
show_default=True,
help="Specified version, default to last.",
)
@click.pass_context
async def downgrade(ctx: Context):
@coro
async def downgrade(ctx: Context, version: int):
app = ctx.obj["app"]
config = ctx.obj["config"]
last_version = await Migrate.get_last_version()
if not last_version:
return click.secho("No last version found", fg=Color.yellow)
file = last_version.version
async with in_transaction(get_app_connection_name(config, app)) as conn:
file_path = os.path.join(Migrate.migrate_location, file)
with open(file_path, "r") as f:
content = json.load(f)
downgrade_query_list = content.get("downgrade")
if not downgrade_query_list:
return click.secho(f"No downgrade item dound", fg=Color.yellow)
for downgrade_query in downgrade_query_list:
await conn.execute_query(downgrade_query)
await last_version.delete()
return click.secho(f"Success downgrade {file}", fg=Color.green)
if version == -1:
specified_version = await Migrate.get_last_version()
else:
specified_version = await Aerich.filter(app=app, version__startswith=f"{version}_").first()
if not specified_version:
return click.secho("No specified version found", fg=Color.yellow)
if version == -1:
versions = [specified_version]
else:
versions = await Aerich.filter(app=app, pk__gte=specified_version.pk)
for version in versions:
file = version.version
async with in_transaction(get_app_connection_name(config, app)) as conn:
file_path = os.path.join(Migrate.migrate_location, file)
with open(file_path, "r", encoding="utf-8") as f:
content = json.load(f)
downgrade_query_list = content.get("downgrade")
if not downgrade_query_list:
return click.secho("No downgrade item found", fg=Color.yellow)
for downgrade_query in downgrade_query_list:
await conn.execute_query(downgrade_query)
await version.delete()
os.unlink(file_path)
click.secho(f"Success downgrade {file}", fg=Color.green)
@cli.command(help="Show current available heads in migrate location.")
@click.pass_context
@coro
async def heads(ctx: Context):
app = ctx.obj["app"]
versions = Migrate.get_all_version_files()
@@ -141,12 +171,13 @@ async def heads(ctx: Context):
click.secho(version, fg=Color.green)
is_heads = True
if not is_heads:
click.secho("No available heads,try migrate", fg=Color.green)
click.secho("No available heads,try migrate first", fg=Color.green)
@cli.command(help="List all migrate items.")
@click.pass_context
def history(ctx):
@coro
async def history(ctx: Context):
versions = Migrate.get_all_version_files()
for version in versions:
click.secho(version, fg=Color.green)
@@ -165,6 +196,7 @@ def history(ctx):
"--location", default="./migrations", show_default=True, help="Migrate store location."
)
@click.pass_context
@coro
async def init(
ctx: Context, tortoise_orm, location,
):
@@ -177,7 +209,7 @@ async def init(
parser.set(name, "tortoise_orm", tortoise_orm)
parser.set(name, "location", location)
with open(config_file, "w") as f:
with open(config_file, "w", encoding="utf-8") as f:
parser.write(f)
if not os.path.isdir(location):
@@ -190,12 +222,13 @@ async def init(
@cli.command(help="Generate schema and generate app migrate location.")
@click.option(
"--safe",
is_flag=True,
type=bool,
default=True,
help="When set to true, creates the table only when it does not already exist.",
show_default=True,
)
@click.pass_context
@coro
async def init_db(ctx: Context, safe):
config = ctx.obj["config"]
location = ctx.obj["location"]
@@ -208,8 +241,6 @@ async def init_db(ctx: Context, safe):
else:
return click.secho(f"Inited {app} already", fg=Color.yellow)
Migrate.write_old_models(config, app, location)
await Tortoise.init(config=config)
connection = get_app_connection(config, app)
await generate_schema_for_client(connection, safe)
@@ -217,8 +248,10 @@ async def init_db(ctx: Context, safe):
schema = get_schema_sql(connection, safe)
version = await Migrate.generate_version()
await Aerich.create(version=version, app=app)
with open(os.path.join(dirname, version), "w") as f:
await Aerich.create(
version=version, app=app, content=Migrate.get_models_content(config, app, location)
)
with open(os.path.join(dirname, version), "w", encoding="utf-8") as f:
content = {
"upgrade": [schema],
}
@@ -228,4 +261,4 @@ async def init_db(ctx: Context, safe):
def main():
sys.path.insert(0, ".")
cli(_anyio_backend="asyncio")
cli()

View File

@@ -2,7 +2,7 @@ from typing import List, Type
from tortoise import BaseDBAsyncClient, ForeignKeyFieldInstance, ManyToManyFieldInstance, Model
from tortoise.backends.base.schema_generator import BaseSchemaGenerator
from tortoise.fields import Field, JSONField, TextField, UUIDField
from tortoise.fields import CASCADE, Field, JSONField, TextField, UUIDField
class BaseDDL:
@@ -11,13 +11,16 @@ class BaseDDL:
_DROP_TABLE_TEMPLATE = 'DROP TABLE IF EXISTS "{table_name}"'
_ADD_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ADD {column}'
_DROP_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" DROP COLUMN "{column_name}"'
_RENAME_COLUMN_TEMPLATE = (
'ALTER TABLE "{table_name}" RENAME COLUMN "{old_column_name}" TO "{new_column_name}"'
)
_ADD_INDEX_TEMPLATE = (
'ALTER TABLE "{table_name}" ADD {unique} INDEX "{index_name}" ({column_names})'
)
_DROP_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" DROP INDEX "{index_name}"'
_ADD_FK_TEMPLATE = 'ALTER TABLE "{table_name}" ADD CONSTRAINT "{fk_name}" FOREIGN KEY ("{db_column}") REFERENCES "{table}" ("{field}") ON DELETE {on_delete}'
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP FOREIGN KEY "{fk_name}"'
_M2M_TABLE_TEMPLATE = 'CREATE TABLE "{table_name}" ("{backward_key}" {backward_type} NOT NULL REFERENCES "{backward_table}" ("{backward_field}") ON DELETE CASCADE,"{forward_key}" {forward_type} NOT NULL REFERENCES "{forward_table}" ("{forward_field}") ON DELETE CASCADE){extra}{comment};'
_M2M_TABLE_TEMPLATE = 'CREATE TABLE "{table_name}" ("{backward_key}" {backward_type} NOT NULL REFERENCES "{backward_table}" ("{backward_field}") ON DELETE CASCADE,"{forward_key}" {forward_type} NOT NULL REFERENCES "{forward_table}" ("{forward_field}") ON DELETE {on_delete}){extra}{comment};'
_MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" MODIFY COLUMN {column}'
def __init__(self, client: "BaseDBAsyncClient"):
@@ -41,6 +44,7 @@ class BaseDDL:
backward_type=model._meta.pk.get_for_dialect(self.DIALECT, "SQL_TYPE"),
forward_key=field.forward_key,
forward_type=field.related_model._meta.pk.get_for_dialect(self.DIALECT, "SQL_TYPE"),
on_delete=CASCADE,
extra=self.schema_generator._table_generate_extra(table=field.through),
comment=self.schema_generator._table_comment_generator(
table=field.through, comment=field.description
@@ -125,6 +129,13 @@ class BaseDDL:
),
)
def rename_column(self, model: "Type[Model]", old_column_name: str, new_column_name: str):
return self._RENAME_COLUMN_TEMPLATE.format(
table_name=model._meta.db_table,
old_column_name=old_column_name,
new_column_name=new_column_name,
)
def add_index(self, model: "Type[Model]", field_names: List[str], unique=False):
return self._ADD_INDEX_TEMPLATE.format(
unique="UNIQUE" if unique else "",
@@ -179,3 +190,12 @@ class BaseDDL:
to_field=to_field_name,
),
)
def alter_column_default(self, model: "Type[Model]", field_object: Field):
pass
def alter_column_null(self, model: "Type[Model]", field_object: Field):
pass
def set_comment(self, model: "Type[Model]", field_object: Field):
pass

View File

@@ -9,6 +9,9 @@ class MysqlDDL(BaseDDL):
_DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS `{table_name}`"
_ADD_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` ADD {column}"
_DROP_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` DROP COLUMN `{column_name}`"
_RENAME_COLUMN_TEMPLATE = (
"ALTER TABLE `{table_name}` RENAME COLUMN `{old_column_name}` TO `{new_column_name}`"
)
_ADD_INDEX_TEMPLATE = (
"ALTER TABLE `{table_name}` ADD {unique} INDEX `{index_name}` ({column_names})"
)

View File

@@ -1,4 +1,8 @@
from typing import List, Type
from tortoise import Model
from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator
from tortoise.fields import Field
from aerich.ddl import BaseDDL
@@ -6,3 +10,66 @@ from aerich.ddl import BaseDDL
class PostgresDDL(BaseDDL):
schema_generator_cls = AsyncpgSchemaGenerator
DIALECT = AsyncpgSchemaGenerator.DIALECT
_ADD_INDEX_TEMPLATE = 'CREATE INDEX "{index_name}" ON "{table_name}" ({column_names})'
_ADD_UNIQUE_TEMPLATE = (
'ALTER TABLE "{table_name}" ADD CONSTRAINT "{index_name}" UNIQUE ({column_names})'
)
_DROP_INDEX_TEMPLATE = 'DROP INDEX "{index_name}"'
_DROP_UNIQUE_TEMPLATE = 'ALTER TABLE "{table_name}" DROP CONSTRAINT "{index_name}"'
_ALTER_DEFAULT_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {default}'
_ALTER_NULL_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {set_drop} NOT NULL'
_MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" TYPE {datatype}'
_SET_COMMENT_TEMPLATE = 'COMMENT ON COLUMN "{table_name}"."{column}" IS {comment}'
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP CONSTRAINT "{fk_name}"'
def alter_column_default(self, model: "Type[Model]", field_object: Field):
db_table = model._meta.db_table
default = self._get_default(model, field_object)
return self._ALTER_DEFAULT_TEMPLATE.format(
table_name=db_table,
column=field_object.model_field_name,
default="SET" + default if default else "DROP DEFAULT",
)
def alter_column_null(self, model: "Type[Model]", field_object: Field):
db_table = model._meta.db_table
return self._ALTER_NULL_TEMPLATE.format(
table_name=db_table,
column=field_object.model_field_name,
set_drop="DROP" if field_object.null else "SET",
)
def modify_column(self, model: "Type[Model]", field_object: Field):
db_table = model._meta.db_table
return self._MODIFY_COLUMN_TEMPLATE.format(
table_name=db_table,
column=field_object.model_field_name,
datatype=field_object.get_for_dialect(self.DIALECT, "SQL_TYPE"),
)
def add_index(self, model: "Type[Model]", field_names: List[str], unique=False):
template = self._ADD_UNIQUE_TEMPLATE if unique else self._ADD_INDEX_TEMPLATE
return template.format(
index_name=self.schema_generator._generate_index_name(
"uid" if unique else "idx", model, field_names
),
table_name=model._meta.db_table,
column_names=", ".join([self.schema_generator.quote(f) for f in field_names]),
)
def drop_index(self, model: "Type[Model]", field_names: List[str], unique=False):
template = self._DROP_UNIQUE_TEMPLATE if unique else self._DROP_INDEX_TEMPLATE
return template.format(
index_name=self.schema_generator._generate_index_name(
"uid" if unique else "idx", model, field_names
),
table_name=model._meta.db_table,
)
def set_comment(self, model: "Type[Model]", field_object: Field):
db_table = model._meta.db_table
return self._SET_COMMENT_TEMPLATE.format(
table_name=db_table,
column=field_object.model_field_name,
comment="'{}'".format(field_object.description) if field_object.description else "NULL",
)

View File

@@ -1,8 +1,19 @@
from typing import Type
from tortoise import Model
from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator
from tortoise.fields import Field
from aerich.ddl import BaseDDL
from aerich.exceptions import NotSupportError
class SqliteDDL(BaseDDL):
schema_generator_cls = SqliteSchemaGenerator
DIALECT = SqliteSchemaGenerator.DIALECT
def drop_column(self, model: "Type[Model]", column_name: str):
raise NotSupportError("Drop column is unsupported in SQLite.")
def modify_column(self, model: "Type[Model]", field_object: Field):
raise NotSupportError("Modify column is unsupported in SQLite.")

7
aerich/enums.py Normal file
View File

@@ -0,0 +1,7 @@
from enum import Enum
class Color(str, Enum):
green = "green"
red = "red"
yellow = "yellow"

View File

@@ -1,6 +1,4 @@
class ConfigurationError(Exception):
class NotSupportError(Exception):
"""
config error
raise when features not support
"""
pass

View File

@@ -1,10 +1,12 @@
import json
import os
import re
from copy import deepcopy
from datetime import datetime
from typing import Dict, List, Tuple, Type
from importlib import import_module
from io import StringIO
from typing import Dict, List, Optional, Tuple, Type
import click
from tortoise import (
BackwardFKRelation,
BackwardOneToOneRelation,
@@ -13,10 +15,11 @@ from tortoise import (
Model,
Tortoise,
)
from tortoise.exceptions import OperationalError
from tortoise.fields import Field
from aerich.ddl import BaseDDL
from aerich.models import Aerich
from aerich.models import MAX_VERSION_LENGTH, Aerich
from aerich.utils import get_app_connection
@@ -28,6 +31,8 @@ class Migrate:
_upgrade_m2m: List[str] = []
_downgrade_m2m: List[str] = []
_aerich = Aerich.__name__
_rename_old = []
_rename_new = []
ddl: BaseDDL
migrate_config: dict
@@ -35,44 +40,62 @@ class Migrate:
diff_app = "diff_models"
app: str
migrate_location: str
dialect: str
@classmethod
def get_old_model_file(cls):
return cls.old_models + ".py"
def get_old_model_file(cls, app: str, location: str):
return os.path.join(location, app, cls.old_models + ".py")
@classmethod
def get_all_version_files(cls) -> List[str]:
return sorted(filter(lambda x: x.endswith("json"), os.listdir(cls.migrate_location)))
return sorted(
filter(lambda x: x.endswith("json"), os.listdir(cls.migrate_location)),
key=lambda x: int(x.split("_")[0]),
)
@classmethod
async def get_last_version(cls) -> Aerich:
return await Aerich.filter(app=cls.app).first()
async def get_last_version(cls) -> Optional[Aerich]:
try:
return await Aerich.filter(app=cls.app).first()
except OperationalError:
pass
@classmethod
def remove_old_model_file(cls, app: str, location: str):
try:
os.unlink(cls.get_old_model_file(app, location))
except FileNotFoundError:
pass
@classmethod
async def init_with_old_models(cls, config: dict, app: str, location: str):
migrate_config = cls._get_migrate_config(config, app, location)
await Tortoise.init(config=config)
last_version = await cls.get_last_version()
cls.app = app
cls.migrate_config = migrate_config
cls.migrate_location = os.path.join(location, app)
if last_version:
content = last_version.content
with open(cls.get_old_model_file(app, location), "w") as f:
f.write(content)
await Tortoise.init(config=migrate_config)
migrate_config = cls._get_migrate_config(config, app, location)
cls.migrate_config = migrate_config
await Tortoise.init(config=migrate_config)
connection = get_app_connection(config, app)
if connection.schema_generator.DIALECT == "mysql":
cls.dialect = connection.schema_generator.DIALECT
if cls.dialect == "mysql":
from aerich.ddl.mysql import MysqlDDL
cls.ddl = MysqlDDL(connection)
elif connection.schema_generator.DIALECT == "sqlite":
elif cls.dialect == "sqlite":
from aerich.ddl.sqlite import SqliteDDL
cls.ddl = SqliteDDL(connection)
elif connection.schema_generator.DIALECT == "postgres":
elif cls.dialect == "postgres":
from aerich.ddl.postgres import PostgresDDL
cls.ddl = PostgresDDL(connection)
else:
raise NotImplementedError("Current only support MySQL")
@classmethod
async def _get_last_version_num(cls):
@@ -80,24 +103,31 @@ class Migrate:
if not last_version:
return None
version = last_version.version
return int(version.split("_")[0])
return int(version.split("_", 1)[0])
@classmethod
async def generate_version(cls, name=None):
now = datetime.now().strftime("%Y%M%D%H%M%S").replace("/", "")
now = datetime.now().strftime("%Y%m%d%H%M%S").replace("/", "")
last_version_num = await cls._get_last_version_num()
if last_version_num is None:
return f"0_{now}_init.json"
return f"{last_version_num + 1}_{now}_{name}.json"
version = f"{last_version_num + 1}_{now}_{name}.json"
if len(version) > MAX_VERSION_LENGTH:
raise ValueError(f"Version name exceeds maximum length ({MAX_VERSION_LENGTH})")
return version
@classmethod
async def _generate_diff_sql(cls, name):
version = await cls.generate_version(name)
# delete if same version exists
for version_file in cls.get_all_version_files():
if version_file.startswith(version.split("_")[0]):
os.unlink(os.path.join(cls.migrate_location, version_file))
content = {
"upgrade": cls.upgrade_operators,
"downgrade": cls.downgrade_operators,
}
with open(os.path.join(cls.migrate_location, version), "w") as f:
with open(os.path.join(cls.migrate_location, version), "w", encoding="utf-8") as f:
json.dump(content, f, indent=2, ensure_ascii=False)
return version
@@ -123,7 +153,7 @@ class Migrate:
return await cls._generate_diff_sql(name)
@classmethod
def _add_operator(cls, operator: str, upgrade=True, fk=False):
def _add_operator(cls, operator: str, upgrade=True, fk_m2m=False):
"""
add operator,differentiate fk because fk is order limit
:param operator:
@@ -132,36 +162,16 @@ class Migrate:
:return:
"""
if upgrade:
if fk:
if fk_m2m:
cls._upgrade_fk_m2m_index_operators.append(operator)
else:
cls.upgrade_operators.append(operator)
else:
if fk:
if fk_m2m:
cls._downgrade_fk_m2m_index_operators.append(operator)
else:
cls.downgrade_operators.append(operator)
@classmethod
def cp_models(
cls, app: str, model_files: List[str], old_model_file,
):
"""
cp currents models to old_model_files
:param app:
:param model_files:
:param old_model_file:
:return:
"""
pattern = rf"(\n)?('|\")({app})(.\w+)('|\")"
for i, model_file in enumerate(model_files):
with open(model_file, "r") as f:
content = f.read()
ret = re.sub(pattern, rf"\2{cls.diff_app}\4\5", content)
mode = "w" if i == 0 else "a"
with open(old_model_file, mode) as f:
f.write(ret)
@classmethod
def _get_migrate_config(cls, config: dict, app: str, location: str):
"""
@@ -171,17 +181,16 @@ class Migrate:
:param location:
:return:
"""
temp_config = deepcopy(config)
path = os.path.join(location, app, cls.old_models)
path = path.replace("/", ".").lstrip(".")
temp_config["apps"][cls.diff_app] = {
path = path.replace(os.sep, ".").lstrip(".")
config["apps"][cls.diff_app] = {
"models": [path],
"default_connection": config.get("apps").get(app).get("default_connection", "default"),
}
return temp_config
return config
@classmethod
def write_old_models(cls, config: dict, app: str, location: str):
def get_models_content(cls, config: dict, app: str, location: str):
"""
write new models to old models
:param config:
@@ -189,15 +198,18 @@ class Migrate:
:param location:
:return:
"""
cls.app = app
old_model_files = []
models = config.get("apps").get(app).get("models")
for model in models:
if model != "aerich.models":
old_model_files.append(model.replace(".", "/") + ".py")
cls.cp_models(app, old_model_files, os.path.join(location, app, cls.get_old_model_file()))
old_model_files.append(import_module(model).__file__)
pattern = rf"(\n)?('|\")({app})(.\w+)('|\")"
str_io = StringIO()
for i, model_file in enumerate(old_model_files):
with open(model_file, "r", encoding="utf-8") as f:
content = f.read()
ret = re.sub(pattern, rf"\2{cls.diff_app}\4\5", content)
str_io.write(f"{ret}\n")
return str_io.getvalue()
@classmethod
def diff_models(
@@ -260,11 +272,35 @@ class Migrate:
if cls._exclude_field(new_field, upgrade):
continue
if new_key not in old_keys:
cls._add_operator(
cls._add_field(new_model, new_field),
upgrade,
isinstance(new_field, (ForeignKeyFieldInstance, ManyToManyFieldInstance)),
)
new_field_dict = new_field.describe(serializable=True)
new_field_dict.pop("name", None)
new_field_dict.pop("db_column", None)
for diff_key in old_keys - new_keys:
old_field = old_fields_map.get(diff_key)
old_field_dict = old_field.describe(serializable=True)
old_field_dict.pop("name", None)
old_field_dict.pop("db_column", None)
if old_field_dict == new_field_dict:
if upgrade:
is_rename = click.prompt(
f"Rename {diff_key} to {new_key}?",
default=True,
type=bool,
show_choices=True,
)
cls._rename_new.append(new_key)
cls._rename_old.append(diff_key)
else:
is_rename = diff_key in cls._rename_new
if is_rename:
cls._add_operator(
cls._rename_field(new_model, old_field, new_field), upgrade,
)
break
else:
cls._add_operator(
cls._add_field(new_model, new_field), upgrade, cls._is_fk_m2m(new_field),
)
else:
old_field = old_fields_map.get(new_key)
new_field_dict = new_field.describe(serializable=True)
@@ -274,7 +310,25 @@ class Migrate:
old_field_dict.pop("unique")
old_field_dict.pop("indexed")
if not cls._is_fk_m2m(new_field) and new_field_dict != old_field_dict:
cls._add_operator(cls._modify_field(new_model, new_field), upgrade=upgrade)
if cls.dialect == "postgres":
if new_field.null != old_field.null:
cls._add_operator(
cls._alter_null(new_model, new_field), upgrade=upgrade
)
if new_field.default != old_field.default:
cls._add_operator(
cls._alter_default(new_model, new_field), upgrade=upgrade
)
if new_field.description != old_field.description:
cls._add_operator(
cls._set_comment(new_model, new_field), upgrade=upgrade
)
if new_field.field_type != old_field.field_type:
cls._add_operator(
cls._modify_field(new_model, new_field), upgrade=upgrade
)
else:
cls._add_operator(cls._modify_field(new_model, new_field), upgrade=upgrade)
if (old_field.index and not new_field.index) or (
old_field.unique and not new_field.unique
):
@@ -293,13 +347,25 @@ class Migrate:
upgrade,
cls._is_fk_m2m(new_field),
)
if isinstance(new_field, ForeignKeyFieldInstance):
if old_field.db_constraint and not new_field.db_constraint:
cls._add_operator(
cls._drop_fk(new_model, new_field), upgrade, True,
)
if new_field.db_constraint and not old_field.db_constraint:
cls._add_operator(
cls._add_fk(new_model, new_field), upgrade, True,
)
for old_key in old_keys:
field = old_fields_map.get(old_key)
if old_key not in new_keys and not cls._exclude_field(field, upgrade):
cls._add_operator(
cls._remove_field(old_model, field), upgrade, cls._is_fk_m2m(field),
)
if (upgrade and old_key not in cls._rename_old) or (
not upgrade and old_key not in cls._rename_new
):
cls._add_operator(
cls._remove_field(old_model, field), upgrade, cls._is_fk_m2m(field),
)
for new_index in new_indexes:
if new_index not in old_indexes:
@@ -367,10 +433,26 @@ class Migrate:
return cls.ddl.create_m2m_table(model, field)
return cls.ddl.add_column(model, field)
@classmethod
def _alter_default(cls, model: Type[Model], field: Field):
return cls.ddl.alter_column_default(model, field)
@classmethod
def _alter_null(cls, model: Type[Model], field: Field):
return cls.ddl.alter_column_null(model, field)
@classmethod
def _set_comment(cls, model: Type[Model], field: Field):
return cls.ddl.set_comment(model, field)
@classmethod
def _modify_field(cls, model: Type[Model], field: Field):
return cls.ddl.modify_column(model, field)
@classmethod
def _drop_fk(cls, model: Type[Model], field: ForeignKeyFieldInstance):
return cls.ddl.drop_fk(model, field)
@classmethod
def _remove_field(cls, model: Type[Model], field: Field):
if isinstance(field, ForeignKeyFieldInstance):
@@ -379,6 +461,10 @@ class Migrate:
return cls.ddl.drop_m2m(field)
return cls.ddl.drop_column(model, field.model_field_name)
@classmethod
def _rename_field(cls, model: Type[Model], old_field: Field, new_field: Field):
return cls.ddl.rename_column(model, old_field.model_field_name, new_field.model_field_name)
@classmethod
def _add_fk(cls, model: Type[Model], field: ForeignKeyFieldInstance):
"""

View File

@@ -1,9 +1,12 @@
from tortoise import Model, fields
MAX_VERSION_LENGTH = 255
class Aerich(Model):
version = fields.CharField(max_length=50)
version = fields.CharField(max_length=MAX_VERSION_LENGTH)
app = fields.CharField(max_length=20)
content = fields.TextField()
class Meta:
ordering = ["-id"]

View File

@@ -1,6 +1,6 @@
import importlib
from asyncclick import BadOptionUsage, Context
from click import BadOptionUsage, Context
from tortoise import BaseDBAsyncClient, Tortoise
@@ -11,7 +11,7 @@ def get_app_connection_name(config, app) -> str:
:param app:
:return:
"""
return config.get("apps").get(app).get("default_connection")
return config.get("apps").get(app).get("default_connection", "default")
def get_app_connection(config, app) -> BaseDBAsyncClient:

View File

@@ -16,7 +16,7 @@ db_url = os.getenv("TEST_DB", "sqlite://:memory:")
tortoise_orm = {
"connections": {"default": expand_db_url(db_url, True)},
"apps": {
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default",},
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"},
},
}
@@ -31,24 +31,29 @@ def reset_migrate():
Migrate._downgrade_m2m = []
@pytest.fixture(scope="session")
def loop():
loop = asyncio.get_event_loop()
return loop
@pytest.yield_fixture(scope="session")
def event_loop():
policy = asyncio.get_event_loop_policy()
res = policy.new_event_loop()
asyncio.set_event_loop(res)
res._close = res.close
res.close = lambda: None
yield res
res._close()
@pytest.fixture(scope="session", autouse=True)
def initialize_tests(loop, request):
async def initialize_tests(event_loop, request):
tortoise_orm["connections"]["diff_models"] = "sqlite://:memory:"
tortoise_orm["apps"]["diff_models"] = {
"models": ["tests.diff_models"],
"default_connection": "diff_models",
}
loop.run_until_complete(Tortoise.init(config=tortoise_orm, _create_db=True))
loop.run_until_complete(
generate_schema_for_client(Tortoise.get_connection("default"), safe=True)
)
await Tortoise.init(config=tortoise_orm, _create_db=True)
await generate_schema_for_client(Tortoise.get_connection("default"), safe=True)
client = Tortoise.get_connection("default")
if client.schema_generator is MySQLSchemaGenerator:
@@ -58,4 +63,4 @@ def initialize_tests(loop, request):
elif client.schema_generator is AsyncpgSchemaGenerator:
Migrate.ddl = PostgresDDL(client)
request.addfinalizer(lambda: loop.run_until_complete(Tortoise._drop_databases()))
request.addfinalizer(lambda: event_loop.run_until_complete(Tortoise._drop_databases()))

BIN
images/alipay.jpeg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 75 KiB

BIN
images/wechatpay.jpeg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 76 KiB

725
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,32 +1,43 @@
[tool.poetry]
name = "aerich"
version = "0.1.9"
version = "0.3.2"
description = "A database migrations tool for Tortoise ORM."
authors = ["long2ice <long2ice@gmail.com>"]
license = "Apache-2.0"
readme = "README.md"
homepage = "https://github.com/long2ice/aerich"
repository = "https://github.com/long2ice/aerich.git"
documentation = "https://github.com/long2ice/aerich"
keywords = ["migrate", "Tortoise-ORM", "mysql"]
packages = [
{ include = "aerich" }
]
include = ["CHANGELOG.md", "LICENSE", "README.md"]
[tool.poetry.dependencies]
python = "^3.8"
python = "^3.7"
tortoise-orm = "*"
asyncclick = "*"
click = "*"
pydantic = "*"
aiomysql = {version = "*", optional = true}
asyncpg = {version = "*", optional = true}
[tool.poetry.dev-dependencies]
taskipy = "*"
flake8 = "*"
isort = "*"
black = "^19.10b0"
pytest = "*"
aiomysql = "*"
asyncpg = "*"
pytest-xdist = "*"
mypy = "*"
pytest-asyncio = "*"
bandit = "*"
pytest-mock = "*"
[tool.taskipy.tasks]
export = "poetry export -f requirements.txt --without-hashes > requirements.txt"
export-dev = "poetry export -f requirements.txt --dev --without-hashes > requirements-dev.txt"
[tool.poetry.extras]
dbdrivers = ["aiomysql", "asyncpg"]
[build-system]
requires = ["poetry>=0.12"]
build-backend = "poetry.masonry.api"
[tool.poetry.scripts]
aerich = "aerich.cli:main"

View File

@@ -1,2 +0,0 @@
[pytest]
addopts = -p no:warnings --ignore=src

View File

@@ -1,48 +0,0 @@
aiomysql==0.0.20
aiosqlite==0.13.0
anyio==1.3.0
apipkg==1.5
appdirs==1.4.4
async-generator==1.10
asyncclick==7.0.9
asyncpg==0.20.1
atomicwrites==1.4.0; sys_platform == "win32"
attrs==19.3.0
black==19.10b0
cffi==1.14.0
ciso8601==2.1.3; sys_platform != "win32" and implementation_name == "cpython"
click==7.1.2
colorama==0.4.3; sys_platform == "win32"
cryptography==2.9.2
execnet==1.7.1
flake8==3.8.2
iso8601==0.1.12; sys_platform == "win32" or implementation_name != "cpython"
isort==4.3.21
mccabe==0.6.1
more-itertools==8.3.0
mypy==0.770
mypy-extensions==0.4.3
packaging==20.4
pathspec==0.8.0
pluggy==0.13.1
py==1.8.1
pycodestyle==2.6.0
pycparser==2.20
pydantic==1.5.1
pyflakes==2.2.0
pymysql==0.9.2
pyparsing==2.4.7
pypika==0.37.6
pytest==5.4.2
pytest-asyncio==0.12.0
pytest-forked==1.1.3
pytest-xdist==1.32.0
regex==2020.5.14
six==1.15.0
sniffio==1.1.0
taskipy==1.2.1
toml==0.10.1
tortoise-orm==0.16.12
typed-ast==1.4.1
typing-extensions==3.7.4.2
wcwidth==0.1.9

View File

@@ -1,11 +0,0 @@
aiosqlite==0.13.0
anyio==1.3.0
async-generator==1.10
asyncclick==7.0.9
ciso8601==2.1.3; sys_platform != "win32" and implementation_name == "cpython"
iso8601==0.1.12; sys_platform == "win32" or implementation_name != "cpython"
pydantic==1.5.1
pypika==0.37.6
sniffio==1.1.0
tortoise-orm==0.16.12
typing-extensions==3.7.4.2

View File

@@ -1,47 +1,2 @@
[flake8]
max-line-length = 100
exclude =
ignore = E501,W503,DAR101,DAR201,DAR402
[darglint]
docstring_style=sphinx
[isort]
not_skip=__init__.py
multi_line_output=3
include_trailing_comma=True
force_grid_wrap=0
use_parentheses=True
line_length=100
[tool:pytest]
addopts = -n auto --tb=native -q
[mypy]
pretty = True
ignore_missing_imports = True
check_untyped_defs = True
disallow_subclassing_any = True
disallow_untyped_calls = True
disallow_untyped_defs = False
disallow_incomplete_defs = False
disallow_untyped_decorators = True
no_implicit_optional = True
warn_redundant_casts = True
warn_unused_ignores = True
warn_no_return = True
warn_return_any = False
warn_unused_configs = True
warn_unreachable = True
allow_redefinition = True
strict_equality = True
show_error_context = True
[mypy-tests.*]
check_untyped_defs = False
disallow_untyped_defs = False
disallow_incomplete_defs = False
warn_unreachable = False
[mypy-conftest]
disallow_untyped_defs = False
ignore = E501,W503

View File

@@ -1,44 +0,0 @@
import os
import re
from setuptools import find_packages, setup
def version():
ver_str_line = open('aerich/__init__.py', 'rt').read()
mob = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", ver_str_line, re.M)
if not mob:
raise RuntimeError("Unable to find version string")
return mob.group(1)
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as f:
long_description = f.read()
def requirements():
return open('requirements.txt', 'rt').read().splitlines()
setup(
name='aerich',
version=version(),
description='A database migrations tool for Tortoise-ORM.',
author='long2ice',
long_description_content_type='text/x-rst',
long_description=long_description,
author_email='long2ice@gmail.com',
url='https://github.com/long2ice/aerich',
license='MIT License',
packages=find_packages(include=['aerich*']),
include_package_data=True,
zip_safe=True,
entry_points={
'console_scripts': ['aerich = aerich.cli:main'],
},
platforms='any',
keywords=(
'migrate Tortoise-ORM mysql'
),
dependency_links=['https://github.com/tortoise-orm/tortoise-orm.git@develop#egg=tortoise-orm'],
install_requires=requirements(),
)

View File

@@ -22,15 +22,21 @@ class Status(IntEnum):
class User(Model):
username = fields.CharField(max_length=20,)
username = fields.CharField(max_length=20)
password = fields.CharField(max_length=200)
last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now)
last_login_at = fields.DatetimeField(description="Last Login", default=datetime.datetime.now)
is_active = fields.BooleanField(default=True, description="Is Active")
is_superuser = fields.BooleanField(default=False, description="Is SuperUser")
avatar = fields.CharField(max_length=200, default="")
intro = fields.TextField(default="")
class Email(Model):
email = fields.CharField(max_length=200)
is_primary = fields.BooleanField(default=False)
user = fields.ForeignKeyField("diff_models.User", db_constraint=True)
class Category(Model):
slug = fields.CharField(max_length=200)
user = fields.ForeignKeyField("diff_models.User", description="User")

View File

@@ -31,6 +31,12 @@ class User(Model):
intro = fields.TextField(default="")
class Email(Model):
email = fields.CharField(max_length=200)
is_primary = fields.BooleanField(default=False)
user = fields.ForeignKeyField("models.User", db_constraint=False)
class Category(Model):
slug = fields.CharField(max_length=200)
name = fields.CharField(max_length=200)

View File

@@ -1,8 +1,11 @@
import pytest
from aerich.ddl.mysql import MysqlDDL
from aerich.ddl.postgres import PostgresDDL
from aerich.ddl.sqlite import SqliteDDL
from aerich.exceptions import NotSupportError
from aerich.migrate import Migrate
from tests.models import Category
from tests.models import Category, User
def test_create_table():
@@ -63,18 +66,81 @@ def test_add_column():
def test_modify_column():
ret = Migrate.ddl.modify_column(Category, Category._meta.fields_map.get("name"))
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200) NOT NULL"
if isinstance(Migrate.ddl, SqliteDDL):
with pytest.raises(NotSupportError):
ret0 = Migrate.ddl.modify_column(Category, Category._meta.fields_map.get("name"))
ret1 = Migrate.ddl.modify_column(User, User._meta.fields_map.get("is_active"))
else:
assert ret == 'ALTER TABLE "category" MODIFY COLUMN "name" VARCHAR(200) NOT NULL'
ret0 = Migrate.ddl.modify_column(Category, Category._meta.fields_map.get("name"))
ret1 = Migrate.ddl.modify_column(User, User._meta.fields_map.get("is_active"))
if isinstance(Migrate.ddl, MysqlDDL):
assert ret0 == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200) NOT NULL"
elif isinstance(Migrate.ddl, PostgresDDL):
assert ret0 == 'ALTER TABLE "category" ALTER COLUMN "name" TYPE VARCHAR(200)'
if isinstance(Migrate.ddl, MysqlDDL):
assert (
ret1
== "ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1"
)
elif isinstance(Migrate.ddl, PostgresDDL):
assert ret1 == 'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL'
def test_alter_column_default():
ret = Migrate.ddl.alter_column_default(Category, Category._meta.fields_map.get("name"))
if isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'ALTER TABLE "category" ALTER COLUMN "name" DROP DEFAULT'
else:
assert ret is None
ret = Migrate.ddl.alter_column_default(Category, Category._meta.fields_map.get("created_at"))
if isinstance(Migrate.ddl, PostgresDDL):
assert (
ret == 'ALTER TABLE "category" ALTER COLUMN "created_at" SET DEFAULT CURRENT_TIMESTAMP'
)
else:
assert ret is None
ret = Migrate.ddl.alter_column_default(User, User._meta.fields_map.get("avatar"))
if isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'ALTER TABLE "user" ALTER COLUMN "avatar" SET DEFAULT \'\''
else:
assert ret is None
def test_alter_column_null():
ret = Migrate.ddl.alter_column_null(Category, Category._meta.fields_map.get("name"))
if isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'ALTER TABLE "category" ALTER COLUMN "name" SET NOT NULL'
else:
assert ret is None
def test_set_comment():
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("name"))
if isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'COMMENT ON COLUMN "category"."name" IS NULL'
else:
assert ret is None
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("user"))
if isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'COMMENT ON COLUMN "category"."user" IS \'User\''
else:
assert ret is None
def test_drop_column():
ret = Migrate.ddl.drop_column(Category, "name")
if isinstance(Migrate.ddl, SqliteDDL):
with pytest.raises(NotSupportError):
ret = Migrate.ddl.drop_column(Category, "name")
else:
ret = Migrate.ddl.drop_column(Category, "name")
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` DROP COLUMN `name`"
else:
elif isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'ALTER TABLE "category" DROP COLUMN "name"'
@@ -86,6 +152,12 @@ def test_add_index():
assert (
index_u == "ALTER TABLE `category` ADD UNIQUE INDEX `uid_category_name_8b0cb9` (`name`)"
)
elif isinstance(Migrate.ddl, PostgresDDL):
assert index == 'CREATE INDEX "idx_category_name_8b0cb9" ON "category" ("name")'
assert (
index_u
== 'ALTER TABLE "category" ADD CONSTRAINT "uid_category_name_8b0cb9" UNIQUE ("name")'
)
else:
assert index == 'ALTER TABLE "category" ADD INDEX "idx_category_name_8b0cb9" ("name")'
assert (
@@ -95,10 +167,16 @@ def test_add_index():
def test_drop_index():
ret = Migrate.ddl.drop_index(Category, ["name"])
ret_u = Migrate.ddl.drop_index(Category, ["name"], True)
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` DROP INDEX `idx_category_name_8b0cb9`"
assert ret_u == "ALTER TABLE `category` DROP INDEX `uid_category_name_8b0cb9`"
elif isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'DROP INDEX "idx_category_name_8b0cb9"'
assert ret_u == 'ALTER TABLE "category" DROP CONSTRAINT "uid_category_name_8b0cb9"'
else:
assert ret == 'ALTER TABLE "category" DROP INDEX "idx_category_name_8b0cb9"'
assert ret_u == 'ALTER TABLE "category" DROP INDEX "uid_category_name_8b0cb9"'
def test_add_fk():
@@ -119,5 +197,7 @@ def test_drop_fk():
ret = Migrate.ddl.drop_fk(Category, Category._meta.fields_map.get("user"))
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_e2e3874c`"
elif isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'ALTER TABLE "category" DROP CONSTRAINT "fk_category_user_e2e3874c"'
else:
assert ret == 'ALTER TABLE "category" DROP FOREIGN KEY "fk_category_user_e2e3874c"'

View File

@@ -1,30 +1,79 @@
import pytest
from pytest_mock import MockerFixture
from tortoise import Tortoise
from aerich.ddl.mysql import MysqlDDL
from aerich.ddl.postgres import PostgresDDL
from aerich.ddl.sqlite import SqliteDDL
from aerich.exceptions import NotSupportError
from aerich.migrate import Migrate
def test_migrate():
def test_migrate(mocker: MockerFixture):
mocker.patch("click.prompt", return_value=True)
apps = Tortoise.apps
models = apps.get("models")
diff_models = apps.get("diff_models")
Migrate.diff_models(diff_models, models)
Migrate.diff_models(models, diff_models, False)
if isinstance(Migrate.ddl, SqliteDDL):
with pytest.raises(NotSupportError):
Migrate.diff_models(models, diff_models, False)
else:
Migrate.diff_models(models, diff_models, False)
Migrate._merge_operators()
if isinstance(Migrate.ddl, MysqlDDL):
assert Migrate.upgrade_operators == [
"ALTER TABLE `email` DROP FOREIGN KEY `fk_email_user_5b58673d`",
"ALTER TABLE `category` ADD `name` VARCHAR(200) NOT NULL",
"ALTER TABLE `user` ADD UNIQUE INDEX `uid_user_usernam_9987ab` (`username`)",
"ALTER TABLE `user` RENAME COLUMN `last_login_at` TO `last_login`",
]
assert Migrate.downgrade_operators == [
"ALTER TABLE `category` DROP COLUMN `name`",
"ALTER TABLE `user` DROP INDEX `uid_user_usernam_9987ab`",
"ALTER TABLE `user` RENAME COLUMN `last_login` TO `last_login_at`",
"ALTER TABLE `email` ADD CONSTRAINT `fk_email_user_5b58673d` FOREIGN KEY "
"(`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE",
]
else:
elif isinstance(Migrate.ddl, PostgresDDL):
assert Migrate.upgrade_operators == [
'ALTER TABLE "email" DROP CONSTRAINT "fk_email_user_5b58673d"',
'ALTER TABLE "category" ADD "name" VARCHAR(200) NOT NULL',
'ALTER TABLE "user" ADD UNIQUE INDEX "uid_user_usernam_9987ab" ("username")',
'ALTER TABLE "user" ADD CONSTRAINT "uid_user_usernam_9987ab" UNIQUE ("username")',
'ALTER TABLE "user" RENAME COLUMN "last_login_at" TO "last_login"',
]
assert Migrate.downgrade_operators == [
'ALTER TABLE "category" DROP COLUMN "name"',
'ALTER TABLE "user" DROP INDEX "uid_user_usernam_9987ab"',
'ALTER TABLE "user" DROP CONSTRAINT "uid_user_usernam_9987ab"',
'ALTER TABLE "user" RENAME COLUMN "last_login" TO "last_login_at"',
'ALTER TABLE "email" ADD CONSTRAINT "fk_email_user_5b58673d" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE',
]
elif isinstance(Migrate.ddl, SqliteDDL):
assert Migrate.upgrade_operators == [
'ALTER TABLE "email" DROP FOREIGN KEY "fk_email_user_5b58673d"',
'ALTER TABLE "category" ADD "name" VARCHAR(200) NOT NULL',
'ALTER TABLE "user" ADD UNIQUE INDEX "uid_user_usernam_9987ab" ("username")',
'ALTER TABLE "user" RENAME COLUMN "last_login_at" TO "last_login"',
]
assert Migrate.downgrade_operators == []
def test_sort_all_version_files(mocker):
mocker.patch(
"os.listdir",
return_value=[
"1_datetime_update.json",
"11_datetime_update.json",
"10_datetime_update.json",
"2_datetime_update.json",
],
)
Migrate.migrate_location = "."
assert Migrate.get_all_version_files() == [
"1_datetime_update.json",
"2_datetime_update.json",
"10_datetime_update.json",
"11_datetime_update.json",
]