Compare commits
422 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
9c3ba7e273 | ||
|
074ba9b743 | ||
|
5d9adbdb54 | ||
|
8609435815 | ||
|
a624d1b43b | ||
|
e299f8e1d6 | ||
|
db0cf656fc | ||
|
49bfbf4e6b | ||
|
0364ae3f83 | ||
|
91adf9334e | ||
|
41df464e8b | ||
|
c35282c2a3 | ||
|
557271c8e1 | ||
|
7f8c5dcddc | ||
|
1793dab43d | ||
|
6bdfdfc6db | ||
|
0be5c1b545 | ||
|
d6b35ab0ac | ||
|
b46ceafb2e | ||
|
ac847ba616 | ||
|
f5d7d56fa5 | ||
|
d6a51bd20e | ||
|
c1dea4e846 | ||
|
5e8a7c7e91 | ||
|
7d22518c74 | ||
|
f93faa8afb | ||
|
1acb9ed1e7 | ||
|
69ce0cafa1 | ||
|
4fc7f324d4 | ||
|
d8addadb37 | ||
|
0780919ef3 | ||
|
5af8c9cd56 | ||
|
56da0e7e3c | ||
|
6270c4781e | ||
|
12d0a5dad1 | ||
|
56eff1b22f | ||
|
e4a3863f80 | ||
|
5572876714 | ||
|
3d840395f1 | ||
|
accceef24f | ||
|
9c81bc6036 | ||
|
c2ebe9b5e4 | ||
|
8cefe68c9b | ||
|
44025823ee | ||
|
252cb97767 | ||
|
ac3ef9e2eb | ||
|
5b04b4422d | ||
|
b2f4029a4a | ||
|
4e46d9d969 | ||
|
c0fd3ec63c | ||
|
103470f4c1 | ||
|
dc020358b6 | ||
|
095eb48196 | ||
|
fac1de6299 | ||
|
e049fcee26 | ||
|
ee144d4a9b | ||
|
dbf96a17d3 | ||
|
15d56121ef | ||
|
4851ecfe82 | ||
|
ad0e7006bc | ||
|
27b29e401b | ||
|
7bb35c10e4 | ||
|
ed113d491e | ||
|
9e46fbf55d | ||
|
fc68f99c86 | ||
|
480087df07 | ||
|
24a2087b78 | ||
|
bceeb236c2 | ||
|
c42fdab74d | ||
|
ceb1e0ffef | ||
|
13dd44bef7 | ||
|
219633a926 | ||
|
e6302a920a | ||
|
79a77d368f | ||
|
4a1fc4cfa0 | ||
|
aee706e29b | ||
|
572c13f9dd | ||
|
7b733495fb | ||
|
c0c217392c | ||
|
c7a3d164cb | ||
|
50add58981 | ||
|
f3b6f8f264 | ||
|
d33638471b | ||
|
e764bb56f7 | ||
|
84d31d63f6 | ||
|
234495d291 | ||
|
e971653851 | ||
|
58d31b3a05 | ||
|
affffbdae3 | ||
|
6466a852c8 | ||
|
a917f253c9 | ||
|
dd11bed5a0 | ||
|
8756f64e3f | ||
|
1addda8178 | ||
|
716638752b | ||
|
51117867a6 | ||
|
b25c7671bb | ||
|
b63fbcc7a4 | ||
|
4370b5ed08 | ||
|
2b2e465362 | ||
|
ede53ade86 | ||
|
ad54b5e9dd | ||
|
b1ff2418f5 | ||
|
01264f3f27 | ||
|
ea234a5799 | ||
|
b724f24f1a | ||
|
2bc23103dc | ||
|
6d83c370ad | ||
|
e729bb9b60 | ||
|
8edd834da6 | ||
|
4adc89d441 | ||
|
fd4b9fe7d3 | ||
|
467406ed20 | ||
|
484b5900ce | ||
|
b8b6df0b65 | ||
|
f0bc3126e9 | ||
|
dbc0d9e7ef | ||
|
818dd29991 | ||
|
e199e03b53 | ||
|
d79dc25ee8 | ||
|
c6d51a4dcf | ||
|
241b30a710 | ||
|
8cf50c58d7 | ||
|
1c9b65cc37 | ||
|
3fbf9febfb | ||
|
7b6545d4e1 | ||
|
52b50a2161 | ||
|
90943a473c | ||
|
d7ecd97e88 | ||
|
20aebc4413 | ||
|
f8e1f9ff44 | ||
|
ab31445fb2 | ||
|
28d19a4b7b | ||
|
9da99824fe | ||
|
75db7cea60 | ||
|
d777c9c278 | ||
|
e9b76bdd35 | ||
|
8b7864d886 | ||
|
bef45941f2 | ||
|
7b472d7a84 | ||
|
1f0a6dfb50 | ||
|
36282f123f | ||
|
3cd4e24050 | ||
|
f8c2f1b551 | ||
|
131d97a3d6 | ||
|
1a0371e977 | ||
|
e5b092fd08 | ||
|
7a109f3c79 | ||
|
8c2ecbaef1 | ||
|
b141363c51 | ||
|
9dd474d79f | ||
|
e4bb9d838e | ||
|
029d522c79 | ||
|
d6627906c7 | ||
|
3c88833154 | ||
|
8f68f08eba | ||
|
60ba6963fd | ||
|
4c35c44bd2 | ||
|
bdeaf5495e | ||
|
db33059ec9 | ||
|
44b96058f8 | ||
|
abff753b6a | ||
|
dcd8441a05 | ||
|
b4a735b814 | ||
|
83ba13e99a | ||
|
d7b1c07d13 | ||
|
1ac16188fc | ||
|
4abc464ce0 | ||
|
d4430cec0d | ||
|
0b01fa38d8 | ||
|
801dde15be | ||
|
75480e2041 | ||
|
45129cef9f | ||
|
3a0dd2355d | ||
|
0e71bc16ae | ||
|
c39462820c | ||
|
f15cbaf9e0 | ||
|
15131469df | ||
|
c60c1610f0 | ||
|
63e8d06157 | ||
|
68ef8ac676 | ||
|
8b5cf6faa0 | ||
|
40c7ef7fd6 | ||
|
7a826df43f | ||
|
b1b9cc1454 | ||
|
fac00d45cc | ||
|
6f7893d376 | ||
|
b1521c4cc7 | ||
|
24c1f4cb7d | ||
|
661f241dac | ||
|
01787558d6 | ||
|
699b0321a4 | ||
|
4a83021892 | ||
|
af63221875 | ||
|
359525716c | ||
|
7d3eb2e151 | ||
|
d8abf79449 | ||
|
aa9f40ae27 | ||
|
79b7ae343a | ||
|
6f5a9ab78c | ||
|
1e5a83c281 | ||
|
180420843d | ||
|
58f66b91cf | ||
|
064d7ff675 | ||
|
2da794d823 | ||
|
77005f3793 | ||
|
5a873b8b69 | ||
|
3989b7c674 | ||
|
694b05356f | ||
|
919d56c936 | ||
|
7bcf9b2fed | ||
|
9f663299cf | ||
|
28dbdf2663 | ||
|
e71a4b60a5 | ||
|
62840136be | ||
|
185514f711 | ||
|
8e783e031e | ||
|
10b7272ca8 | ||
|
0c763c6024 | ||
|
c6371a5c16 | ||
|
1dbf9185b6 | ||
|
9bf2de0b9a | ||
|
bf1cf21324 | ||
|
8b08329493 | ||
|
5bc7d23d95 | ||
|
a253aa96cb | ||
|
15a6e874dd | ||
|
19a5dcbf3f | ||
|
922e3eef16 | ||
|
44fd2fe6ae | ||
|
b147859960 | ||
|
793cf2532c | ||
|
fa85e05d1d | ||
|
3f52ac348b | ||
|
f8aa7a8f34 | ||
|
44d520cc82 | ||
|
364735f804 | ||
|
505d361597 | ||
|
a19edd3a35 | ||
|
84d1f78019 | ||
|
8fb07a6c9e | ||
|
54da8b22af | ||
|
4c0308ff22 | ||
|
38c4a15661 | ||
|
52151270e0 | ||
|
49897dc4fd | ||
|
d4ad0e270f | ||
|
e74fc304a5 | ||
|
14d20455e6 | ||
|
bd9ecfd6e1 | ||
|
de8500b9a1 | ||
|
90b47c5af7 | ||
|
02fe5a9d31 | ||
|
be41a1332a | ||
|
09661c1d46 | ||
|
abfa60133f | ||
|
048e428eac | ||
|
38a3df9b5a | ||
|
0d94b22b3f | ||
|
f1f0074255 | ||
|
e3a14a2f60 | ||
|
608ff8f071 | ||
|
d3a1342293 | ||
|
01e3de9522 | ||
|
c6c398fdf0 | ||
|
c60bdd290e | ||
|
f443dc68db | ||
|
36f84702b7 | ||
|
b4cc2de0e3 | ||
|
4780b90c1c | ||
|
cd176c1fd6 | ||
|
c2819fc8dc | ||
|
530e7cfce5 | ||
|
47824a100b | ||
|
78a15f9f19 | ||
|
5ae8b9e85f | ||
|
55a6d4bbc7 | ||
|
c5535f16e1 | ||
|
840cd71e44 | ||
|
e0d52b1210 | ||
|
4dc45f723a | ||
|
d2e0a68351 | ||
|
ee6cc20c7d | ||
|
4e917495a0 | ||
|
bfa66f6dd4 | ||
|
f00715d4c4 | ||
|
6e3105690a | ||
|
c707f7ecb2 | ||
|
0bbc471e00 | ||
|
fb6cc62047 | ||
|
e9ceaf471f | ||
|
85fc3b2aa2 | ||
|
a677d506a9 | ||
|
9879004fee | ||
|
5760fe2040 | ||
|
b229c30558 | ||
|
5d2f1604c3 | ||
|
499c4e1c02 | ||
|
1463ee30bc | ||
|
3b801932f5 | ||
|
c2eb4dc9e3 | ||
|
5927febd0c | ||
|
a1c10ff330 | ||
|
f2013c931a | ||
|
b21b954d32 | ||
|
f5588a35c5 | ||
|
f5dff84476 | ||
|
e399821116 | ||
|
648f25a951 | ||
|
fa73e132e2 | ||
|
1bac33cd33 | ||
|
4e76f12ccf | ||
|
724379700e | ||
|
bb929f2b55 | ||
|
6339dc86a8 | ||
|
768747140a | ||
|
1fde3cd04e | ||
|
d0ce545ff5 | ||
|
09b89ed7d0 | ||
|
86c8382593 | ||
|
48e3ff48a3 | ||
|
1bf6d45bb0 | ||
|
342f4cdd3b | ||
|
8cace21fde | ||
|
9889d9492b | ||
|
823368aea8 | ||
|
6b1ad46cf1 | ||
|
ce8c0b1f06 | ||
|
43922d3734 | ||
|
48c5318737 | ||
|
002221e557 | ||
|
141d7205bf | ||
|
af4d4be19a | ||
|
3b4d9b47ce | ||
|
4b0c4ae7d0 | ||
|
dc821d8a02 | ||
|
d18a6b5be0 | ||
|
1e56a70f21 | ||
|
ab1e1aab75 | ||
|
00dd04f97d | ||
|
fc914acc80 | ||
|
ac03ecb002 | ||
|
235ef3f7ea | ||
|
e00eb7f3d9 | ||
|
d6c8941676 | ||
|
cf062c9310 | ||
|
309adec8c9 | ||
|
8674142ba8 | ||
|
cda9bd1c47 | ||
|
198e4e0032 | ||
|
1b440477a2 | ||
|
1263c6f735 | ||
|
6504384879 | ||
|
17ab0a1421 | ||
|
e1ffcb609b | ||
|
18cb75f555 | ||
|
dfe13ea250 | ||
|
b97ce0ff2f | ||
|
21001c0eda | ||
|
9bd96a9487 | ||
|
5bdaa32a9e | ||
|
d74e7b5630 | ||
|
119b15d597 | ||
|
f93ab6bbff | ||
|
bd8eb94a6e | ||
|
2fcb2626fd | ||
|
3728db4279 | ||
|
6ac1fb5332 | ||
|
87a17d443c | ||
|
55f18a69ed | ||
|
86a9c4cedd | ||
|
86e1d3defb | ||
|
01fa7fbbdb | ||
|
90196eb1bf | ||
|
3c111792a9 | ||
|
77e9d7bc91 | ||
|
fe2ddff88b | ||
|
0d23297f46 | ||
|
6be6d55e5b | ||
|
25674bc73a | ||
|
1715eda1a3 | ||
|
f5775049dd | ||
|
6fd0f8a42f | ||
|
f52dc009af | ||
|
9248d456f9 | ||
|
c24f2f6b09 | ||
|
19c9c2c30f | ||
|
73b75349ee | ||
|
7bc553221a | ||
|
7413a05e19 | ||
|
bf194ca8ce | ||
|
b06da0223a | ||
|
83554cdc5d | ||
|
6c76bfccad | ||
|
a1746e457c | ||
|
2a0435dea9 | ||
|
e87f67f1e1 | ||
|
7b4b7ac749 | ||
|
5b9b51db3f | ||
|
ffeee3c901 | ||
|
b4366d2427 | ||
|
ec1c80f3a9 | ||
|
d2083632eb | ||
|
dc8b4c2263 | ||
|
2fc43cb0d8 | ||
|
cb5dffeeb8 | ||
|
125389461f | ||
|
c09c878eaf | ||
|
ef3e0c11d5 | ||
|
881f70f748 | ||
|
615d9747dc | ||
|
6ffca1a0c7 | ||
|
95e41720cb | ||
|
40c0008e6e | ||
|
ce75e55d60 | ||
|
4d4f951e09 | ||
|
354e861dad | ||
|
3a76486993 | ||
|
4d0a6b4de6 | ||
|
c01d2993e0 | ||
|
bab5ebf2f0 | ||
|
7e5cefd7d6 |
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@ -0,0 +1 @@
|
||||
custom: ["https://sponsor.long2ice.io"]
|
109
.github/workflows/ci.yml
vendored
Normal file
109
.github/workflows/ci.yml
vendored
Normal file
@ -0,0 +1,109 @@
|
||||
name: ci
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- main
|
||||
pull_request:
|
||||
branches-ignore:
|
||||
- main
|
||||
jobs:
|
||||
ci:
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:latest
|
||||
ports:
|
||||
- 5432:5432
|
||||
env:
|
||||
POSTGRES_PASSWORD: 123456
|
||||
POSTGRES_USER: postgres
|
||||
options: --health-cmd=pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
|
||||
tortoise-orm:
|
||||
- tortoise021
|
||||
- tortoise022
|
||||
- tortoise023
|
||||
- tortoise024
|
||||
# TODO: add dev back when drop python3.8 support
|
||||
# - tortoisedev
|
||||
steps:
|
||||
- name: Start MySQL
|
||||
run: sudo systemctl start mysql.service
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/poetry.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
- name: Install and configure Poetry
|
||||
run: |
|
||||
pip install -U pip
|
||||
if [[ "${{ matrix.python-version }}" == "3.8" ]]; then
|
||||
# poetry2.0+ does not support installed by python3.8, but can manage project using py38
|
||||
python3.12 -m pip install "poetry>=2.0"
|
||||
else
|
||||
pip install "poetry>=2.0"
|
||||
fi
|
||||
poetry env use python${{ matrix.python-version }}
|
||||
- name: Install dependencies and check style
|
||||
run: poetry run make check
|
||||
- name: Install TortoiseORM v0.21
|
||||
if: matrix.tortoise-orm == 'tortoise021'
|
||||
run: poetry run pip install --upgrade "tortoise-orm>=0.21,<0.22"
|
||||
- name: Install TortoiseORM v0.22
|
||||
if: matrix.tortoise-orm == 'tortoise022'
|
||||
run: poetry run pip install --upgrade "tortoise-orm>=0.22,<0.23"
|
||||
- name: Install TortoiseORM v0.23
|
||||
if: matrix.tortoise-orm == 'tortoise023'
|
||||
run: poetry run pip install --upgrade "tortoise-orm>=0.23,<0.24"
|
||||
- name: Install TortoiseORM v0.24
|
||||
if: matrix.tortoise-orm == 'tortoise024'
|
||||
run: |
|
||||
if [[ "${{ matrix.python-version }}" == "3.8" ]]; then
|
||||
echo "Skip test for tortoise v0.24 as it does not support Python3.8"
|
||||
else
|
||||
poetry run pip install --upgrade "tortoise-orm>=0.24,<0.25"
|
||||
fi
|
||||
- name: Install TortoiseORM develop branch
|
||||
if: matrix.tortoise-orm == 'tortoisedev'
|
||||
run: |
|
||||
if [[ "${{ matrix.python-version }}" == "3.8" ]]; then
|
||||
echo "Skip test for tortoise develop branch as it does not support Python3.8"
|
||||
else
|
||||
poetry run pip uninstall -y tortoise-orm
|
||||
poetry run pip install --upgrade "git+https://github.com/tortoise/tortoise-orm"
|
||||
fi
|
||||
- name: CI
|
||||
env:
|
||||
MYSQL_PASS: root
|
||||
MYSQL_HOST: 127.0.0.1
|
||||
MYSQL_PORT: 3306
|
||||
POSTGRES_PASS: 123456
|
||||
POSTGRES_HOST: 127.0.0.1
|
||||
POSTGRES_PORT: 5432
|
||||
run: poetry run make _testall
|
||||
- name: Verify aiomysql support
|
||||
# Only check the latest version of tortoise
|
||||
if: matrix.tortoise-orm == 'tortoise024'
|
||||
run: |
|
||||
poetry run pip uninstall -y asyncmy
|
||||
poetry run make test_mysql
|
||||
poetry run pip install asyncmy
|
||||
env:
|
||||
MYSQL_PASS: root
|
||||
MYSQL_HOST: 127.0.0.1
|
||||
MYSQL_PORT: 3306
|
||||
- name: Verify psycopg support
|
||||
# Only check the latest version of tortoise
|
||||
if: matrix.tortoise-orm == 'tortoise024'
|
||||
run: poetry run make test_psycopg
|
||||
env:
|
||||
POSTGRES_PASS: 123456
|
||||
POSTGRES_HOST: 127.0.0.1
|
||||
POSTGRES_PORT: 5432
|
13
.github/workflows/pypi.yml
vendored
13
.github/workflows/pypi.yml
vendored
@ -7,15 +7,18 @@ jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- name: Build dists
|
||||
- name: Install and configure Poetry
|
||||
run: |
|
||||
python3 setup.py sdist
|
||||
pip install -U pip poetry
|
||||
poetry config virtualenvs.create false
|
||||
- name: Build dists
|
||||
run: make build
|
||||
- name: Pypi Publish
|
||||
uses: pypa/gh-action-pypi-publish@master
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.pypi_password }}
|
||||
password: ${{ secrets.pypi_password }}
|
||||
|
34
.github/workflows/test.yml
vendored
34
.github/workflows/test.yml
vendored
@ -1,34 +0,0 @@
|
||||
name: test
|
||||
on: [push, pull_request]
|
||||
jobs:
|
||||
testall:
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:latest
|
||||
ports:
|
||||
- 5432:5432
|
||||
env:
|
||||
POSTGRES_PASSWORD: 123456
|
||||
POSTGRES_USER: postgres
|
||||
options: --health-cmd=pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
|
||||
steps:
|
||||
- name: Start MySQL
|
||||
run: sudo systemctl start mysql.service
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements-dev.txt
|
||||
- name: CI
|
||||
env:
|
||||
MYSQL_PASS: root
|
||||
MYSQL_HOST: 127.0.0.1
|
||||
MYSQL_PORT: 3306
|
||||
POSTGRES_PASS: 123456
|
||||
POSTGRES_HOST: 127.0.0.1
|
||||
POSTGRES_PORT: 5432
|
||||
run: make testall
|
5
.gitignore
vendored
5
.gitignore
vendored
@ -143,4 +143,7 @@ cython_debug/
|
||||
.idea
|
||||
migrations
|
||||
aerich.ini
|
||||
src
|
||||
src
|
||||
.vscode
|
||||
.DS_Store
|
||||
.python-version
|
338
CHANGELOG.md
Normal file
338
CHANGELOG.md
Normal file
@ -0,0 +1,338 @@
|
||||
# ChangeLog
|
||||
|
||||
## 0.8
|
||||
|
||||
### [0.8.3]**(Unreleased)**
|
||||
|
||||
#### Fixed
|
||||
- fix: `aerich init-db` process is suspended. ([#435])
|
||||
|
||||
[#435]: https://github.com/tortoise/aerich/pull/435
|
||||
|
||||
### [0.8.2](../../releases/tag/v0.8.2) - 2025-02-28
|
||||
|
||||
#### Added
|
||||
- Support changes `max_length` or int type for primary key field. ([#428])
|
||||
- feat: support psycopg. ([#425])
|
||||
- Support run `poetry add aerich` in project that inited by poetry v2. ([#424])
|
||||
- feat: support command `python -m aerich`. ([#417])
|
||||
- feat: add --fake to upgrade/downgrade. ([#398])
|
||||
- Support ignore table by settings `managed=False` in `Meta` class. ([#397])
|
||||
|
||||
#### Fixed
|
||||
- fix: aerich migrate raises tortoise.exceptions.FieldError when `index.INDEX_TYPE` is not empty. ([#415])
|
||||
- No migration occurs as expected when adding `unique=True` to indexed field. ([#404])
|
||||
- fix: inspectdb raise KeyError 'int2' for smallint. ([#401])
|
||||
- fix: inspectdb not match data type 'DOUBLE' and 'CHAR' for MySQL. ([#187])
|
||||
|
||||
### Changed
|
||||
- Refactored version management to use `importlib.metadata.version(__package__)` instead of hardcoded version string ([#412])
|
||||
|
||||
[#397]: https://github.com/tortoise/aerich/pull/397
|
||||
[#398]: https://github.com/tortoise/aerich/pull/398
|
||||
[#401]: https://github.com/tortoise/aerich/pull/401
|
||||
[#404]: https://github.com/tortoise/aerich/pull/404
|
||||
[#412]: https://github.com/tortoise/aerich/pull/412
|
||||
[#415]: https://github.com/tortoise/aerich/pull/415
|
||||
[#417]: https://github.com/tortoise/aerich/pull/417
|
||||
[#424]: https://github.com/tortoise/aerich/pull/424
|
||||
[#425]: https://github.com/tortoise/aerich/pull/425
|
||||
|
||||
### [0.8.1](../../releases/tag/v0.8.1) - 2024-12-27
|
||||
|
||||
#### Fixed
|
||||
- fix: add o2o field does not create constraint when migrating. ([#396])
|
||||
- Migration with duplicate renaming of columns in some cases. ([#395])
|
||||
- fix: intermediate table for m2m relation not created. ([#394])
|
||||
- Migrate add m2m field with custom through generate duplicated table. ([#393])
|
||||
- Migrate drop the wrong m2m field when model have multi m2m fields. ([#376])
|
||||
- KeyError raised when removing or renaming an existing model. ([#386])
|
||||
- fix: error when there is `__init__.py` in the migration folder. ([#272])
|
||||
- Setting null=false on m2m field causes migration to fail. ([#334])
|
||||
- Fix NonExistentKey when running `aerich init` without `[tool]` section in config file. ([#284])
|
||||
- Fix configuration file reading error when containing Chinese characters. ([#286])
|
||||
- sqlite: failed to create/drop index. ([#302])
|
||||
- PostgreSQL: Cannot drop constraint after deleting or rename FK on a model. ([#378])
|
||||
- Fix create/drop indexes in every migration. ([#377])
|
||||
- Sort m2m fields before comparing them with diff. ([#271])
|
||||
|
||||
#### Changed
|
||||
- Allow run `aerich init-db` with empty migration directories instead of abort with warnings. ([#286])
|
||||
- Add version constraint(>=0.21) for tortoise-orm. ([#388])
|
||||
- Move `tomlkit` to optional and support `pip install aerich[toml]`. ([#392])
|
||||
|
||||
[#396]: https://github.com/tortoise/aerich/pull/396
|
||||
[#395]: https://github.com/tortoise/aerich/pull/395
|
||||
[#394]: https://github.com/tortoise/aerich/pull/394
|
||||
[#393]: https://github.com/tortoise/aerich/pull/393
|
||||
[#392]: https://github.com/tortoise/aerich/pull/392
|
||||
[#388]: https://github.com/tortoise/aerich/pull/388
|
||||
[#386]: https://github.com/tortoise/aerich/pull/386
|
||||
[#378]: https://github.com/tortoise/aerich/pull/378
|
||||
[#377]: https://github.com/tortoise/aerich/pull/377
|
||||
[#376]: https://github.com/tortoise/aerich/pull/376
|
||||
[#334]: https://github.com/tortoise/aerich/pull/334
|
||||
[#302]: https://github.com/tortoise/aerich/pull/302
|
||||
[#286]: https://github.com/tortoise/aerich/pull/286
|
||||
[#284]: https://github.com/tortoise/aerich/pull/284
|
||||
[#272]: https://github.com/tortoise/aerich/pull/272
|
||||
[#271]: https://github.com/tortoise/aerich/pull/271
|
||||
|
||||
### [0.8.0](../../releases/tag/v0.8.0) - 2024-12-04
|
||||
|
||||
- Fix the issue of parameter concatenation when generating ORM with inspectdb (#331)
|
||||
- Fix KeyError when deleting a field with unqiue=True. (#364)
|
||||
- Correct the click import. (#360)
|
||||
- Improve CLI help text and output. (#355)
|
||||
- Fix mysql drop unique index raises OperationalError. (#346)
|
||||
|
||||
**Upgrade note:**
|
||||
1. Use column name as unique key name for mysql
|
||||
2. Drop support for Python3.7
|
||||
|
||||
## 0.7
|
||||
|
||||
### [0.7.2](../../releases/tag/v0.7.2) - 2023-07-20
|
||||
|
||||
- Support virtual fields.
|
||||
- Fix modify multiple times. (#279)
|
||||
- Added `-i` and `--in-transaction` options to `aerich migrate` command. (#296)
|
||||
- Fix generates two semicolons in a row. (#301)
|
||||
|
||||
### 0.7.1
|
||||
|
||||
- Fix syntax error with python3.8.10. (#265)
|
||||
- Fix sql generate error. (#263)
|
||||
- Fix initialize an empty database. (#267)
|
||||
|
||||
### 0.7.1rc1
|
||||
|
||||
- Fix postgres sql error (#263)
|
||||
|
||||
### 0.7.0
|
||||
|
||||
**Now aerich use `.py` file to record versions.**
|
||||
|
||||
Upgrade Note:
|
||||
|
||||
1. Drop `aerich` table
|
||||
2. Delete `migrations/models` folder
|
||||
3. Run `aerich init-db`
|
||||
|
||||
- Improve `inspectdb` adding support to `postgresql::numeric` data type
|
||||
- Add support for dynamically load DDL classes easing to add support to
|
||||
new databases without changing `Migrate` class logic
|
||||
- Fix decimal field change. (#246)
|
||||
- Support add/remove field with index.
|
||||
|
||||
## 0.6
|
||||
|
||||
### 0.6.3
|
||||
|
||||
- Improve `inspectdb` and support `postgres` & `sqlite`.
|
||||
|
||||
### 0.6.2
|
||||
|
||||
- Support migration for specified index. (#203)
|
||||
|
||||
### 0.6.1
|
||||
|
||||
- Fix `pyproject.toml` not existing error. (#217)
|
||||
|
||||
### 0.6.0
|
||||
|
||||
- Change default config file from `aerich.ini` to `pyproject.toml`. (#197)
|
||||
|
||||
**Upgrade note:**
|
||||
1. Run `aerich init -t config.TORTOISE_ORM`.
|
||||
2. Remove `aerich.ini`.
|
||||
- Remove `pydantic` dependency. (#198)
|
||||
- `inspectdb` support `DATE`. (#215)
|
||||
|
||||
## 0.5
|
||||
|
||||
### 0.5.8
|
||||
|
||||
- Support `indexes` change. (#193)
|
||||
|
||||
### 0.5.7
|
||||
|
||||
- Fix no module found error. (#188) (#189)
|
||||
|
||||
### 0.5.6
|
||||
|
||||
- Add `Command` class. (#148) (#141) (#123) (#106)
|
||||
- Fix: migrate doesn't use source_field in unique_together. (#181)
|
||||
|
||||
### 0.5.5
|
||||
|
||||
- Fix KeyError: 'src_folder' after upgrading aerich to 0.5.4. (#176)
|
||||
- Fix MySQL 5.X rename column.
|
||||
- Fix `db_constraint` when fk changed. (#179)
|
||||
|
||||
### 0.5.4
|
||||
|
||||
- Fix incorrect index creation order. (#151)
|
||||
- Not catch exception when import config. (#164)
|
||||
- Support `drop column` for sqlite. (#40)
|
||||
|
||||
### 0.5.3
|
||||
|
||||
- Fix postgre alter null. (#142)
|
||||
- Fix default function when migrate. (#147)
|
||||
|
||||
### 0.5.2
|
||||
|
||||
- Fix rename field on the field add. (#134)
|
||||
- Fix postgres field type change error. (#135)
|
||||
- Fix inspectdb for `FloatField`. (#138)
|
||||
- Support `rename table`. (#139)
|
||||
|
||||
### 0.5.1
|
||||
|
||||
- Fix tortoise connections not being closed properly. (#120)
|
||||
- Fix bug for field change. (#119)
|
||||
- Fix drop model in the downgrade. (#132)
|
||||
|
||||
### 0.5.0
|
||||
|
||||
- Refactor core code, now has no limitation for everything.
|
||||
|
||||
## 0.4
|
||||
|
||||
### 0.4.4
|
||||
|
||||
- Fix unnecessary import. (#113)
|
||||
|
||||
### 0.4.3
|
||||
|
||||
- Replace migrations separator to sql standard comment.
|
||||
- Add `inspectdb` command.
|
||||
|
||||
### 0.4.2
|
||||
|
||||
- Use `pathlib` for path resolving. (#89)
|
||||
- Fix upgrade in new db. (#96)
|
||||
- Fix packaging error. (#92)
|
||||
|
||||
### 0.4.1
|
||||
|
||||
- Bug fix. (#91 #93)
|
||||
|
||||
### 0.4.0
|
||||
|
||||
- Use `.sql` instead of `.json` to store version file.
|
||||
- Add `rename` column support MySQL5.
|
||||
- Remove callable detection for defaults. (#87)
|
||||
- Fix `sqlite` stuck. (#90)
|
||||
|
||||
## 0.3
|
||||
|
||||
### 0.3.3
|
||||
|
||||
- Fix encoding error. (#75)
|
||||
- Support multiple databases. (#68)
|
||||
- Compatible with models file in directory. (#70)
|
||||
|
||||
### 0.3.2
|
||||
|
||||
- Fix migrate to new database error. (#62)
|
||||
|
||||
### 0.3.1
|
||||
|
||||
- Fix first version error.
|
||||
- Fix init error. (#61)
|
||||
|
||||
### 0.3.0
|
||||
|
||||
- Refactoring migrate logic, and this version is not compatible with previous version.
|
||||
- Now there don't need `old_models.py` and it store in database.
|
||||
- Upgrade steps:
|
||||
1. Upgrade aerich version.
|
||||
2. Drop aerich table in database.
|
||||
3. Delete `migrations/{app}` folder and rerun `aerich init-db`.
|
||||
4. Update model and `aerich migrate` normally.
|
||||
|
||||
## 0.2
|
||||
|
||||
### 0.2.5
|
||||
|
||||
- Fix windows support. (#46)
|
||||
- Support `db_constraint` in fk, m2m should manual define table with fk. (#52)
|
||||
|
||||
### 0.2.4
|
||||
|
||||
- Raise error with SQLite unsupported features.
|
||||
- Fix Postgres alter table. (#48)
|
||||
- Add `Rename` support.
|
||||
|
||||
### 0.2.3
|
||||
|
||||
- Fix tortoise ssl config.
|
||||
- PostgreSQL add/drop index/unique.
|
||||
|
||||
### 0.2.2
|
||||
|
||||
- Fix postgres drop fk.
|
||||
- Fix version sort.
|
||||
|
||||
### 0.2.1
|
||||
|
||||
- Fix bug in windows.
|
||||
- Enhance PostgreSQL support.
|
||||
|
||||
### 0.2.0
|
||||
|
||||
- Update model file find method.
|
||||
- Set `--safe` bool.
|
||||
|
||||
## 0.1
|
||||
|
||||
### 0.1.9
|
||||
|
||||
- Fix default_connection when upgrade
|
||||
- Find default app instead of default.
|
||||
- Diff MySQL ddl.
|
||||
- Check tortoise config.
|
||||
|
||||
### 0.1.8
|
||||
|
||||
- Fix upgrade error when migrate.
|
||||
- Fix init db sql error.
|
||||
- Support change column.
|
||||
|
||||
### 0.1.7
|
||||
|
||||
- Exclude models.Aerich.
|
||||
- Add init record when init-db.
|
||||
- Fix version num str.
|
||||
|
||||
### 0.1.6
|
||||
|
||||
- update dependency_links
|
||||
|
||||
### 0.1.5
|
||||
|
||||
- Add sqlite and postgres support.
|
||||
- Fix dependency import.
|
||||
- Store versions in db.
|
||||
|
||||
### 0.1.4
|
||||
|
||||
- Fix transaction and fields import.
|
||||
- Make unique index worked.
|
||||
- Add cli --version.
|
||||
|
||||
### 0.1.3
|
||||
|
||||
- Support indexes and unique_together.
|
||||
|
||||
### 0.1.2
|
||||
|
||||
- Now aerich support m2m.
|
||||
- Add cli cmd init-db.
|
||||
- Change cli options.
|
||||
|
||||
### 0.1.1
|
||||
|
||||
- Now aerich is basic worked.
|
@ -1,31 +0,0 @@
|
||||
=========
|
||||
ChangeLog
|
||||
=========
|
||||
|
||||
0.1
|
||||
===
|
||||
0.1.5
|
||||
-----
|
||||
- Add sqlite and postgres support.
|
||||
- Fix dependency import.
|
||||
- Store versions in db.
|
||||
|
||||
0.1.4
|
||||
-----
|
||||
- Fix transaction and fields import.
|
||||
- Make unique index worked.
|
||||
- Add cli --version.
|
||||
|
||||
0.1.3
|
||||
-----
|
||||
- Support indexes and unique_together.
|
||||
|
||||
0.1.2
|
||||
-----
|
||||
- Now aerich support m2m.
|
||||
- Add cli cmd init-db.
|
||||
- Change cli options.
|
||||
|
||||
0.1.1
|
||||
-----
|
||||
- Now aerich is basic worked.
|
214
LICENSE
214
LICENSE
@ -1,21 +1,201 @@
|
||||
The MIT License (MIT)
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
Copyright (c) 2020 long2ice
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
1. Definitions.
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2020 long2ice
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
@ -1,3 +0,0 @@
|
||||
include LICENSE
|
||||
include README.rst
|
||||
include requirements.txt
|
73
Makefile
73
Makefile
@ -1,59 +1,58 @@
|
||||
checkfiles = aerich/ tests/
|
||||
black_opts = -l 100 -t py38
|
||||
checkfiles = aerich/ tests/ conftest.py
|
||||
py_warn = PYTHONDEVMODE=1
|
||||
MYSQL_HOST ?= "127.0.0.1"
|
||||
MYSQL_PORT ?= 3306
|
||||
MYSQL_PASS ?= "123456"
|
||||
POSTGRES_HOST ?= "127.0.0.1"
|
||||
POSTGRES_PORT ?= 5432
|
||||
POSTGRES_PASS ?= 123456
|
||||
|
||||
help:
|
||||
@echo "Aerich development makefile"
|
||||
@echo
|
||||
@echo "usage: make <target>"
|
||||
@echo "Targets:"
|
||||
@echo " up Updates dev/test dependencies"
|
||||
@echo " deps Ensure dev/test dependencies are installed"
|
||||
@echo " check Checks that build is sane"
|
||||
@echo " lint Reports all linter violations"
|
||||
@echo " test Runs all tests"
|
||||
@echo " style Auto-formats the code"
|
||||
up:
|
||||
@poetry update
|
||||
|
||||
deps:
|
||||
@which pip-sync > /dev/null || pip install -q pip-tools
|
||||
@pip install -r requirements-dev.txt
|
||||
@poetry install --all-extras --all-groups
|
||||
|
||||
style: deps
|
||||
isort -rc $(checkfiles)
|
||||
black $(black_opts) $(checkfiles)
|
||||
_style:
|
||||
@ruff check --fix $(checkfiles)
|
||||
@ruff format $(checkfiles)
|
||||
style: deps _style
|
||||
|
||||
check: deps
|
||||
ifneq ($(shell which black),)
|
||||
black --check $(black_opts) $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false)
|
||||
endif
|
||||
flake8 $(checkfiles)
|
||||
_check:
|
||||
@ruff format --check $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false)
|
||||
@ruff check $(checkfiles)
|
||||
@mypy $(checkfiles)
|
||||
@bandit -r aerich
|
||||
check: deps _check
|
||||
|
||||
_lint: _build
|
||||
@ruff format $(checkfiles)
|
||||
ruff check --fix $(checkfiles)
|
||||
mypy $(checkfiles)
|
||||
pylint -d C,W,R $(checkfiles)
|
||||
bandit -r $(checkfiles)
|
||||
python setup.py check -mrs
|
||||
bandit -c pyproject.toml -r $(checkfiles)
|
||||
twine check dist/*
|
||||
lint: deps _lint
|
||||
|
||||
test: deps
|
||||
$(py_warn) TEST_DB=sqlite://:memory: py.test
|
||||
$(py_warn) TEST_DB=sqlite://:memory: pytest
|
||||
|
||||
test_sqlite:
|
||||
$(py_warn) TEST_DB=sqlite://:memory: py.test
|
||||
$(py_warn) TEST_DB=sqlite://:memory: pytest
|
||||
|
||||
test_mysql:
|
||||
$(py_warn) TEST_DB="mysql://root:$(MYSQL_PASS)@$(MYSQL_HOST):$(MYSQL_PORT)/test_\{\}" py.test
|
||||
$(py_warn) TEST_DB="mysql://root:$(MYSQL_PASS)@$(MYSQL_HOST):$(MYSQL_PORT)/test_\{\}" pytest -vv -s
|
||||
|
||||
test_postgres:
|
||||
$(py_warn) TEST_DB="postgres://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" py.test
|
||||
$(py_warn) TEST_DB="postgres://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" pytest -vv -s
|
||||
|
||||
testall: deps test_sqlite test_postgres test_mysql
|
||||
test_psycopg:
|
||||
$(py_warn) TEST_DB="psycopg://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" pytest -vv -s
|
||||
|
||||
publish: deps
|
||||
rm -fR dist/
|
||||
python setup.py sdist
|
||||
twine upload dist/*
|
||||
_testall: test_sqlite test_postgres test_mysql
|
||||
testall: deps _testall
|
||||
|
||||
ci:
|
||||
@act -P ubuntu-latest=nektos/act-environments-ubuntu:18.04 -b
|
||||
_build:
|
||||
@poetry build
|
||||
build: deps _build
|
||||
|
||||
ci: build _check _testall
|
||||
|
311
README.md
Normal file
311
README.md
Normal file
@ -0,0 +1,311 @@
|
||||
# Aerich
|
||||
|
||||
[](https://pypi.python.org/pypi/aerich)
|
||||
[](https://github.com/tortoise/aerich)
|
||||
[](https://github.com/tortoise/aerich/actions?query=workflow:pypi)
|
||||
[](https://github.com/tortoise/aerich/actions?query=workflow:ci)
|
||||
|
||||
English | [Русский](./README_RU.md)
|
||||
|
||||
## Introduction
|
||||
|
||||
Aerich is a database migrations tool for TortoiseORM, which is like alembic for SQLAlchemy, or like Django ORM with
|
||||
it\'s own migration solution.
|
||||
|
||||
## Install
|
||||
|
||||
Just install from pypi:
|
||||
|
||||
```shell
|
||||
pip install "aerich[toml]"
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
```shell
|
||||
> aerich -h
|
||||
|
||||
Usage: aerich [OPTIONS] COMMAND [ARGS]...
|
||||
|
||||
Options:
|
||||
-V, --version Show the version and exit.
|
||||
-c, --config TEXT Config file. [default: pyproject.toml]
|
||||
--app TEXT Tortoise-ORM app name.
|
||||
-h, --help Show this message and exit.
|
||||
|
||||
Commands:
|
||||
downgrade Downgrade to specified version.
|
||||
heads Show current available heads in migrate location.
|
||||
history List all migrate items.
|
||||
init Init config file and generate root migrate location.
|
||||
init-db Generate schema and generate app migrate location.
|
||||
inspectdb Introspects the database tables to standard output as...
|
||||
migrate Generate migrate changes file.
|
||||
upgrade Upgrade to specified version.
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
You need to add `aerich.models` to your `Tortoise-ORM` config first. Example:
|
||||
|
||||
```python
|
||||
TORTOISE_ORM = {
|
||||
"connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"},
|
||||
"apps": {
|
||||
"models": {
|
||||
"models": ["tests.models", "aerich.models"],
|
||||
"default_connection": "default",
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Initialization
|
||||
|
||||
```shell
|
||||
> aerich init -h
|
||||
|
||||
Usage: aerich init [OPTIONS]
|
||||
|
||||
Init config file and generate root migrate location.
|
||||
|
||||
Options:
|
||||
-t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like
|
||||
settings.TORTOISE_ORM. [required]
|
||||
--location TEXT Migrate store location. [default: ./migrations]
|
||||
-s, --src_folder TEXT Folder of the source, relative to the project root.
|
||||
-h, --help Show this message and exit.
|
||||
```
|
||||
|
||||
Initialize the config file and migrations location:
|
||||
|
||||
```shell
|
||||
> aerich init -t tests.backends.mysql.TORTOISE_ORM
|
||||
|
||||
Success create migrate location ./migrations
|
||||
Success write config to pyproject.toml
|
||||
```
|
||||
|
||||
### Init db
|
||||
|
||||
```shell
|
||||
> aerich init-db
|
||||
|
||||
Success create app migrate location ./migrations/models
|
||||
Success generate schema for app "models"
|
||||
```
|
||||
|
||||
If your Tortoise-ORM app is not the default `models`, you must specify the correct app via `--app`,
|
||||
e.g. `aerich --app other_models init-db`.
|
||||
|
||||
### Update models and make migrate
|
||||
|
||||
```shell
|
||||
> aerich migrate --name drop_column
|
||||
|
||||
Success migrate 1_202029051520102929_drop_column.py
|
||||
```
|
||||
|
||||
Format of migrate filename is
|
||||
`{version_num}_{datetime}_{name|update}.py`.
|
||||
|
||||
If `aerich` guesses you are renaming a column, it will ask `Rename {old_column} to {new_column} [True]`. You can choose
|
||||
`True` to rename column without column drop, or choose `False` to drop the column then create. Note that the latter may
|
||||
lose data.
|
||||
|
||||
If you need to manually write migration, you could generate empty file:
|
||||
|
||||
```shell
|
||||
> aerich migrate --name add_index --empty
|
||||
|
||||
Success migrate 1_202326122220101229_add_index.py
|
||||
```
|
||||
|
||||
### Upgrade to latest version
|
||||
|
||||
```shell
|
||||
> aerich upgrade
|
||||
|
||||
Success upgrade 1_202029051520102929_drop_column.py
|
||||
```
|
||||
|
||||
Now your db is migrated to latest.
|
||||
|
||||
### Downgrade to specified version
|
||||
|
||||
```shell
|
||||
> aerich downgrade -h
|
||||
|
||||
Usage: aerich downgrade [OPTIONS]
|
||||
|
||||
Downgrade to specified version.
|
||||
|
||||
Options:
|
||||
-v, --version INTEGER Specified version, default to last. [default: -1]
|
||||
-d, --delete Delete version files at the same time. [default:
|
||||
False]
|
||||
|
||||
--yes Confirm the action without prompting.
|
||||
-h, --help Show this message and exit.
|
||||
```
|
||||
|
||||
```shell
|
||||
> aerich downgrade
|
||||
|
||||
Success downgrade 1_202029051520102929_drop_column.py
|
||||
```
|
||||
|
||||
Now your db is rolled back to the specified version.
|
||||
|
||||
### Show history
|
||||
|
||||
```shell
|
||||
> aerich history
|
||||
|
||||
1_202029051520102929_drop_column.py
|
||||
```
|
||||
|
||||
### Show heads to be migrated
|
||||
|
||||
```shell
|
||||
> aerich heads
|
||||
|
||||
1_202029051520102929_drop_column.py
|
||||
```
|
||||
|
||||
### Inspect db tables to TortoiseORM model
|
||||
|
||||
Currently `inspectdb` support MySQL & Postgres & SQLite.
|
||||
|
||||
```shell
|
||||
Usage: aerich inspectdb [OPTIONS]
|
||||
|
||||
Introspects the database tables to standard output as TortoiseORM model.
|
||||
|
||||
Options:
|
||||
-t, --table TEXT Which tables to inspect.
|
||||
-h, --help Show this message and exit.
|
||||
```
|
||||
|
||||
Inspect all tables and print to console:
|
||||
|
||||
```shell
|
||||
aerich --app models inspectdb
|
||||
```
|
||||
|
||||
Inspect a specified table in the default app and redirect to `models.py`:
|
||||
|
||||
```shell
|
||||
aerich inspectdb -t user > models.py
|
||||
```
|
||||
|
||||
For example, you table is:
|
||||
|
||||
```sql
|
||||
CREATE TABLE `test`
|
||||
(
|
||||
`id` int NOT NULL AUTO_INCREMENT,
|
||||
`decimal` decimal(10, 2) NOT NULL,
|
||||
`date` date DEFAULT NULL,
|
||||
`datetime` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
`time` time DEFAULT NULL,
|
||||
`float` float DEFAULT NULL,
|
||||
`string` varchar(200) COLLATE utf8mb4_general_ci DEFAULT NULL,
|
||||
`tinyint` tinyint DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `asyncmy_string_index` (`string`)
|
||||
) ENGINE = InnoDB
|
||||
DEFAULT CHARSET = utf8mb4
|
||||
COLLATE = utf8mb4_general_ci
|
||||
```
|
||||
|
||||
Now run `aerich inspectdb -t test` to see the generated model:
|
||||
|
||||
```python
|
||||
from tortoise import Model, fields
|
||||
|
||||
|
||||
class Test(Model):
|
||||
date = fields.DateField(null=True)
|
||||
datetime = fields.DatetimeField(auto_now=True)
|
||||
decimal = fields.DecimalField(max_digits=10, decimal_places=2)
|
||||
float = fields.FloatField(null=True)
|
||||
id = fields.IntField(primary_key=True)
|
||||
string = fields.CharField(max_length=200, null=True)
|
||||
time = fields.TimeField(null=True)
|
||||
tinyint = fields.BooleanField(null=True)
|
||||
```
|
||||
|
||||
Note that this command is limited and can't infer some fields, such as `IntEnumField`, `ForeignKeyField`, and others.
|
||||
|
||||
### Multiple databases
|
||||
|
||||
```python
|
||||
tortoise_orm = {
|
||||
"connections": {
|
||||
"default": "postgres://postgres_user:postgres_pass@127.0.0.1:5432/db1",
|
||||
"second": "postgres://postgres_user:postgres_pass@127.0.0.1:5432/db2",
|
||||
},
|
||||
"apps": {
|
||||
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"},
|
||||
"models_second": {"models": ["tests.models_second"], "default_connection": "second", },
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
You only need to specify `aerich.models` in one app, and must specify `--app` when running `aerich migrate` and so on, e.g. `aerich --app models_second migrate`.
|
||||
|
||||
## Restore `aerich` workflow
|
||||
|
||||
In some cases, such as broken changes from upgrade of `aerich`, you can't run `aerich migrate` or `aerich upgrade`, you
|
||||
can make the following steps:
|
||||
|
||||
1. drop `aerich` table.
|
||||
2. delete `migrations/{app}` directory.
|
||||
3. rerun `aerich init-db`.
|
||||
|
||||
Note that these actions is safe, also you can do that to reset your migrations if your migration files is too many.
|
||||
|
||||
## Use `aerich` in application
|
||||
|
||||
You can use `aerich` out of cli by use `Command` class.
|
||||
|
||||
```python
|
||||
from aerich import Command
|
||||
|
||||
async with Command(tortoise_config=config, app='models') as command:
|
||||
await command.migrate('test')
|
||||
await command.upgrade()
|
||||
```
|
||||
|
||||
## Upgrade/Downgrade with `--fake` option
|
||||
|
||||
Marks the migrations up to the latest one(or back to the target one) as applied, but without actually running the SQL to change your database schema.
|
||||
|
||||
- Upgrade
|
||||
|
||||
```bash
|
||||
aerich upgrade --fake
|
||||
aerich --app models upgrade --fake
|
||||
```
|
||||
- Downgrade
|
||||
|
||||
```bash
|
||||
aerich downgrade --fake -v 2
|
||||
aerich --app models downgrade --fake -v 2
|
||||
```
|
||||
|
||||
### Ignore tables
|
||||
|
||||
You can tell aerich to ignore table by setting `managed=False` in the `Meta` class, e.g.:
|
||||
```py
|
||||
class MyModel(Model):
|
||||
class Meta:
|
||||
managed = False
|
||||
```
|
||||
**Note** `managed=False` does not recognized by `tortoise-orm` and `aerich init-db`, it is only for `aerich migrate`.
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the
|
||||
[Apache-2.0](https://github.com/long2ice/aerich/blob/master/LICENSE) License.
|
160
README.rst
160
README.rst
@ -1,160 +0,0 @@
|
||||
======
|
||||
Aerich
|
||||
======
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/aerich.svg?style=flat
|
||||
:target: https://pypi.python.org/pypi/aerich
|
||||
.. image:: https://img.shields.io/github/license/long2ice/aerich
|
||||
:target: https://github.com/long2ice/aerich
|
||||
.. image:: https://github.com/long2ice/aerich/workflows/pypi/badge.svg
|
||||
:target: https://github.com/long2ice/aerich/actions?query=workflow:pypi
|
||||
.. image:: https://github.com/long2ice/aerich/workflows/test/badge.svg
|
||||
:target: https://github.com/long2ice/aerich/actions?query=workflow:test
|
||||
|
||||
Introduction
|
||||
============
|
||||
|
||||
Tortoise-ORM is the best asyncio ORM now, but it lacks a database migrations tool like alembic for SQLAlchemy, or Django ORM with it's own migrations tool.
|
||||
|
||||
This project aim to be a best migrations tool for Tortoise-ORM and which written by one of contributors of Tortoise-ORM.
|
||||
|
||||
Install
|
||||
=======
|
||||
|
||||
Just install from pypi:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ pip install aerich
|
||||
|
||||
Quick Start
|
||||
===========
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ aerich -h
|
||||
|
||||
Usage: aerich [OPTIONS] COMMAND [ARGS]...
|
||||
|
||||
Options:
|
||||
-c, --config TEXT Config file. [default: aerich.ini]
|
||||
--app TEXT Tortoise-ORM app name. [default: models]
|
||||
-n, --name TEXT Name of section in .ini file to use for aerich config.
|
||||
[default: aerich]
|
||||
-h, --help Show this message and exit.
|
||||
|
||||
Commands:
|
||||
downgrade Downgrade to previous version.
|
||||
heads Show current available heads in migrate location.
|
||||
history List all migrate items.
|
||||
init Init config file and generate root migrate location.
|
||||
init-db Generate schema and generate app migrate location.
|
||||
migrate Generate migrate changes file.
|
||||
upgrade Upgrade to latest version.
|
||||
|
||||
Usage
|
||||
=====
|
||||
You need add ``aerich.models`` to your ``Tortoise-ORM`` config first, example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
TORTOISE_ORM = {
|
||||
"connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"},
|
||||
"apps": {
|
||||
"models": {
|
||||
"models": ["tests.models", "aerich.models"],
|
||||
"default_connection": "default",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
Initialization
|
||||
--------------
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ aerich init -h
|
||||
|
||||
Usage: aerich init [OPTIONS]
|
||||
|
||||
Init config file and generate root migrate location.
|
||||
|
||||
Options:
|
||||
-t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like settings.TORTOISE_ORM.
|
||||
[required]
|
||||
--location TEXT Migrate store location. [default: ./migrations]
|
||||
-h, --help Show this message and exit.
|
||||
|
||||
Init config file and location:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ aerich init -t tests.backends.mysql.TORTOISE_ORM
|
||||
|
||||
Success create migrate location ./migrations
|
||||
Success generate config file aerich.ini
|
||||
|
||||
Init db
|
||||
-------
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ aerich init-db
|
||||
|
||||
Success create app migrate location ./migrations/models
|
||||
Success generate schema for app "models"
|
||||
|
||||
Update models and make migrate
|
||||
------------------------------
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ aerich migrate --name drop_column
|
||||
|
||||
Success migrate 1_202029051520102929_drop_column.json
|
||||
|
||||
Format of migrate filename is ``{version_num}_{datetime}_{name|update}.json``
|
||||
|
||||
Upgrade to latest version
|
||||
-------------------------
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ aerich upgrade
|
||||
|
||||
Success upgrade 1_202029051520102929_drop_column.json
|
||||
|
||||
Now your db is migrated to latest.
|
||||
|
||||
Downgrade to previous version
|
||||
-----------------------------
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ aerich downgrade
|
||||
|
||||
Success downgrade 1_202029051520102929_drop_column.json
|
||||
|
||||
Now your db rollback to previous version.
|
||||
|
||||
Show history
|
||||
------------
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ aerich history
|
||||
|
||||
1_202029051520102929_drop_column.json
|
||||
|
||||
Show heads to be migrated
|
||||
-------------------------
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ aerich heads
|
||||
|
||||
1_202029051520102929_drop_column.json
|
||||
|
||||
License
|
||||
=======
|
||||
This project is licensed under the `MIT <https://github.com/long2ice/aerich/blob/master/LICENSE>`_ License.
|
274
README_RU.md
Normal file
274
README_RU.md
Normal file
@ -0,0 +1,274 @@
|
||||
# Aerich
|
||||
|
||||
[](https://pypi.python.org/pypi/aerich)
|
||||
[](https://github.com/tortoise/aerich)
|
||||
[](https://github.com/tortoise/aerich/actions?query=workflow:pypi)
|
||||
[](https://github.com/tortoise/aerich/actions?query=workflow:ci)
|
||||
|
||||
[English](./README.md) | Русский
|
||||
|
||||
## Введение
|
||||
|
||||
Aerich - это инструмент для миграции базы данных для TortoiseORM, который аналогичен Alembic для SQLAlchemy или встроенному решению миграций в Django ORM.
|
||||
|
||||
## Установка
|
||||
|
||||
Просто установите из pypi:
|
||||
|
||||
```shell
|
||||
pip install aerich
|
||||
```
|
||||
|
||||
## Быстрый старт
|
||||
|
||||
```shell
|
||||
> aerich -h
|
||||
|
||||
Usage: aerich [OPTIONS] COMMAND [ARGS]...
|
||||
|
||||
Options:
|
||||
-V, --version Show the version and exit.
|
||||
-c, --config TEXT Config file. [default: pyproject.toml]
|
||||
--app TEXT Tortoise-ORM app name.
|
||||
-h, --help Show this message and exit.
|
||||
|
||||
Commands:
|
||||
downgrade Downgrade to specified version.
|
||||
heads Show current available heads in migrate location.
|
||||
history List all migrate items.
|
||||
init Init config file and generate root migrate location.
|
||||
init-db Generate schema and generate app migrate location.
|
||||
inspectdb Introspects the database tables to standard output as...
|
||||
migrate Generate migrate changes file.
|
||||
upgrade Upgrade to specified version.
|
||||
```
|
||||
|
||||
## Использование
|
||||
|
||||
Сначала вам нужно добавить aerich.models в конфигурацию вашего Tortoise-ORM. Пример:
|
||||
|
||||
```python
|
||||
TORTOISE_ORM = {
|
||||
"connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"},
|
||||
"apps": {
|
||||
"models": {
|
||||
"models": ["tests.models", "aerich.models"],
|
||||
"default_connection": "default",
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Инициализация
|
||||
|
||||
```shell
|
||||
> aerich init -h
|
||||
|
||||
Usage: aerich init [OPTIONS]
|
||||
|
||||
Init config file and generate root migrate location.
|
||||
|
||||
Options:
|
||||
-t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like
|
||||
settings.TORTOISE_ORM. [required]
|
||||
--location TEXT Migrate store location. [default: ./migrations]
|
||||
-s, --src_folder TEXT Folder of the source, relative to the project root.
|
||||
-h, --help Show this message and exit.
|
||||
```
|
||||
|
||||
Инициализируйте файл конфигурации и задайте местоположение миграций:
|
||||
|
||||
```shell
|
||||
> aerich init -t tests.backends.mysql.TORTOISE_ORM
|
||||
|
||||
Success create migrate location ./migrations
|
||||
Success write config to pyproject.toml
|
||||
```
|
||||
|
||||
### Инициализация базы данных
|
||||
|
||||
```shell
|
||||
> aerich init-db
|
||||
|
||||
Success create app migrate location ./migrations/models
|
||||
Success generate schema for app "models"
|
||||
```
|
||||
|
||||
Если ваше приложение Tortoise-ORM не является приложением по умолчанию с именем models, вы должны указать правильное имя приложения с помощью параметра --app, например: aerich --app other_models init-db.
|
||||
|
||||
### Обновление моделей и создание миграции
|
||||
|
||||
```shell
|
||||
> aerich migrate --name drop_column
|
||||
|
||||
Success migrate 1_202029051520102929_drop_column.py
|
||||
```
|
||||
|
||||
Формат имени файла миграции следующий: `{версия}_{дата_и_время}_{имя|обновление}.py`.
|
||||
|
||||
Если aerich предполагает, что вы переименовываете столбец, он спросит:
|
||||
Переименовать `{старый_столбец} в {новый_столбец} [True]`. Вы можете выбрать `True`,
|
||||
чтобы переименовать столбец без удаления столбца, или выбрать `False`, чтобы удалить столбец,
|
||||
а затем создать новый. Обратите внимание, что последний вариант может привести к потере данных.
|
||||
|
||||
|
||||
### Обновление до последней версии
|
||||
|
||||
```shell
|
||||
> aerich upgrade
|
||||
|
||||
Success upgrade 1_202029051520102929_drop_column.py
|
||||
```
|
||||
|
||||
Теперь ваша база данных обновлена до последней версии.
|
||||
|
||||
### Откат до указанной версии
|
||||
|
||||
```shell
|
||||
> aerich downgrade -h
|
||||
|
||||
Usage: aerich downgrade [OPTIONS]
|
||||
|
||||
Downgrade to specified version.
|
||||
|
||||
Options:
|
||||
-v, --version INTEGER Specified version, default to last. [default: -1]
|
||||
-d, --delete Delete version files at the same time. [default:
|
||||
False]
|
||||
|
||||
--yes Confirm the action without prompting.
|
||||
-h, --help Show this message and exit.
|
||||
```
|
||||
|
||||
```shell
|
||||
> aerich downgrade
|
||||
|
||||
Success downgrade 1_202029051520102929_drop_column.py
|
||||
```
|
||||
|
||||
Теперь ваша база данных откатилась до указанной версии.
|
||||
|
||||
### Показать историю
|
||||
|
||||
```shell
|
||||
> aerich history
|
||||
|
||||
1_202029051520102929_drop_column.py
|
||||
```
|
||||
|
||||
### Чтобы узнать, какие миграции должны быть применены, можно использовать команду:
|
||||
|
||||
```shell
|
||||
> aerich heads
|
||||
|
||||
1_202029051520102929_drop_column.py
|
||||
```
|
||||
|
||||
### Осмотр таблиц базы данных для модели TortoiseORM
|
||||
|
||||
В настоящее время inspectdb поддерживает MySQL, Postgres и SQLite.
|
||||
|
||||
```shell
|
||||
Usage: aerich inspectdb [OPTIONS]
|
||||
|
||||
Introspects the database tables to standard output as TortoiseORM model.
|
||||
|
||||
Options:
|
||||
-t, --table TEXT Which tables to inspect.
|
||||
-h, --help Show this message and exit.
|
||||
```
|
||||
|
||||
Посмотреть все таблицы и вывести их на консоль:
|
||||
|
||||
```shell
|
||||
aerich --app models inspectdb
|
||||
```
|
||||
|
||||
Осмотреть указанную таблицу в приложении по умолчанию и перенаправить в models.py:
|
||||
|
||||
```shell
|
||||
aerich inspectdb -t user > models.py
|
||||
```
|
||||
|
||||
Например, ваша таблица выглядит следующим образом:
|
||||
|
||||
```sql
|
||||
CREATE TABLE `test`
|
||||
(
|
||||
`id` int NOT NULL AUTO_INCREMENT,
|
||||
`decimal` decimal(10, 2) NOT NULL,
|
||||
`date` date DEFAULT NULL,
|
||||
`datetime` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
`time` time DEFAULT NULL,
|
||||
`float` float DEFAULT NULL,
|
||||
`string` varchar(200) COLLATE utf8mb4_general_ci DEFAULT NULL,
|
||||
`tinyint` tinyint DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `asyncmy_string_index` (`string`)
|
||||
) ENGINE = InnoDB
|
||||
DEFAULT CHARSET = utf8mb4
|
||||
COLLATE = utf8mb4_general_ci
|
||||
```
|
||||
|
||||
Теперь выполните команду aerich inspectdb -t test, чтобы увидеть сгенерированную модель:
|
||||
|
||||
```python
|
||||
from tortoise import Model, fields
|
||||
|
||||
|
||||
class Test(Model):
|
||||
date = fields.DateField(null=True, )
|
||||
datetime = fields.DatetimeField(auto_now=True, )
|
||||
decimal = fields.DecimalField(max_digits=10, decimal_places=2, )
|
||||
float = fields.FloatField(null=True, )
|
||||
id = fields.IntField(pk=True, )
|
||||
string = fields.CharField(max_length=200, null=True, )
|
||||
time = fields.TimeField(null=True, )
|
||||
tinyint = fields.BooleanField(null=True, )
|
||||
```
|
||||
|
||||
Обратите внимание, что эта команда имеет ограничения и не может автоматически определить некоторые поля, такие как `IntEnumField`, `ForeignKeyField` и другие.
|
||||
|
||||
### Несколько баз данных
|
||||
|
||||
```python
|
||||
tortoise_orm = {
|
||||
"connections": {
|
||||
"default": expand_db_url(db_url, True),
|
||||
"second": expand_db_url(db_url_second, True),
|
||||
},
|
||||
"apps": {
|
||||
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"},
|
||||
"models_second": {"models": ["tests.models_second"], "default_connection": "second", },
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
Вам нужно указать `aerich.models` только в одном приложении и должны указывать `--app` при запуске команды `aerich migrate` и т.д.
|
||||
|
||||
## Восстановление рабочего процесса aerich
|
||||
|
||||
В некоторых случаях, например, при возникновении проблем после обновления `aerich`, вы не можете запустить `aerich migrate` или `aerich upgrade`. В таком случае вы можете выполнить следующие шаги:
|
||||
|
||||
1. удалите таблицы `aerich`.
|
||||
2. удалите директорию `migrations/{app}`.
|
||||
3. rerun `aerich init-db`.
|
||||
|
||||
Обратите внимание, что эти действия безопасны, и вы можете использовать их для сброса миграций, если у вас слишком много файлов миграции.
|
||||
|
||||
## Использование aerich в приложении
|
||||
|
||||
Вы можете использовать `aerich` вне командной строки, используя класс `Command`.
|
||||
|
||||
```python
|
||||
from aerich import Command
|
||||
|
||||
command = Command(tortoise_config=config, app='models')
|
||||
await command.init()
|
||||
await command.migrate('test')
|
||||
```
|
||||
|
||||
## Лицензия
|
||||
|
||||
Этот проект лицензирован в соответствии с лицензией
|
||||
[Apache-2.0](https://github.com/long2ice/aerich/blob/master/LICENSE) Лицензия.
|
@ -1 +1,282 @@
|
||||
__version__ = "0.1.6"
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import platform
|
||||
from contextlib import AbstractAsyncContextManager
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import tortoise
|
||||
from tortoise import Tortoise, connections, generate_schema_for_client
|
||||
from tortoise.exceptions import OperationalError
|
||||
from tortoise.transactions import in_transaction
|
||||
from tortoise.utils import get_schema_sql
|
||||
|
||||
from aerich.exceptions import DowngradeError
|
||||
from aerich.inspectdb.mysql import InspectMySQL
|
||||
from aerich.inspectdb.postgres import InspectPostgres
|
||||
from aerich.inspectdb.sqlite import InspectSQLite
|
||||
from aerich.migrate import MIGRATE_TEMPLATE, Migrate
|
||||
from aerich.models import Aerich
|
||||
from aerich.utils import (
|
||||
get_app_connection,
|
||||
get_app_connection_name,
|
||||
get_models_describe,
|
||||
import_py_file,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from tortoise import Model
|
||||
from tortoise.fields.relational import ManyToManyFieldInstance # NOQA:F401
|
||||
|
||||
from aerich.inspectdb import Inspect
|
||||
|
||||
|
||||
def _init_asyncio_patch():
|
||||
"""
|
||||
Select compatible event loop for psycopg3.
|
||||
|
||||
As of Python 3.8+, the default event loop on Windows is `proactor`,
|
||||
however psycopg3 requires the old default "selector" event loop.
|
||||
See https://www.psycopg.org/psycopg3/docs/advanced/async.html
|
||||
"""
|
||||
if platform.system() == "Windows":
|
||||
try:
|
||||
from asyncio import WindowsSelectorEventLoopPolicy # type:ignore
|
||||
except ImportError:
|
||||
pass # Can't assign a policy which doesn't exist.
|
||||
else:
|
||||
from asyncio import get_event_loop_policy, set_event_loop_policy
|
||||
|
||||
if not isinstance(get_event_loop_policy(), WindowsSelectorEventLoopPolicy):
|
||||
set_event_loop_policy(WindowsSelectorEventLoopPolicy())
|
||||
|
||||
|
||||
def _init_tortoise_0_24_1_patch():
|
||||
# this patch is for "tortoise-orm==0.24.1" to fix:
|
||||
# https://github.com/tortoise/tortoise-orm/issues/1893
|
||||
if tortoise.__version__ != "0.24.1":
|
||||
return
|
||||
from tortoise.backends.base.schema_generator import BaseSchemaGenerator, cast, re
|
||||
|
||||
def _get_m2m_tables(
|
||||
self, model: type[Model], db_table: str, safe: bool, models_tables: list[str]
|
||||
) -> list[str]: # Copied from tortoise-orm
|
||||
m2m_tables_for_create = []
|
||||
for m2m_field in model._meta.m2m_fields:
|
||||
field_object = cast("ManyToManyFieldInstance", model._meta.fields_map[m2m_field])
|
||||
if field_object._generated or field_object.through in models_tables:
|
||||
continue
|
||||
backward_key, forward_key = field_object.backward_key, field_object.forward_key
|
||||
if field_object.db_constraint:
|
||||
backward_fk = self._create_fk_string(
|
||||
"",
|
||||
backward_key,
|
||||
db_table,
|
||||
model._meta.db_pk_column,
|
||||
field_object.on_delete,
|
||||
"",
|
||||
)
|
||||
forward_fk = self._create_fk_string(
|
||||
"",
|
||||
forward_key,
|
||||
field_object.related_model._meta.db_table,
|
||||
field_object.related_model._meta.db_pk_column,
|
||||
field_object.on_delete,
|
||||
"",
|
||||
)
|
||||
else:
|
||||
backward_fk = forward_fk = ""
|
||||
exists = "IF NOT EXISTS " if safe else ""
|
||||
through_table_name = field_object.through
|
||||
backward_type = self._get_pk_field_sql_type(model._meta.pk)
|
||||
forward_type = self._get_pk_field_sql_type(field_object.related_model._meta.pk)
|
||||
comment = ""
|
||||
if desc := field_object.description:
|
||||
comment = self._table_comment_generator(table=through_table_name, comment=desc)
|
||||
m2m_create_string = self.M2M_TABLE_TEMPLATE.format(
|
||||
exists=exists,
|
||||
table_name=through_table_name,
|
||||
backward_fk=backward_fk,
|
||||
forward_fk=forward_fk,
|
||||
backward_key=backward_key,
|
||||
backward_type=backward_type,
|
||||
forward_key=forward_key,
|
||||
forward_type=forward_type,
|
||||
extra=self._table_generate_extra(table=field_object.through),
|
||||
comment=comment,
|
||||
)
|
||||
if not field_object.db_constraint:
|
||||
m2m_create_string = m2m_create_string.replace(
|
||||
""",
|
||||
,
|
||||
""",
|
||||
"",
|
||||
) # may have better way
|
||||
m2m_create_string += self._post_table_hook()
|
||||
if getattr(field_object, "create_unique_index", field_object.unique):
|
||||
unique_index_create_sql = self._get_unique_index_sql(
|
||||
exists, through_table_name, [backward_key, forward_key]
|
||||
)
|
||||
if unique_index_create_sql.endswith(";"):
|
||||
m2m_create_string += "\n" + unique_index_create_sql
|
||||
else:
|
||||
lines = m2m_create_string.splitlines()
|
||||
lines[-2] += ","
|
||||
indent = m.group() if (m := re.match(r"\s+", lines[-2])) else ""
|
||||
lines.insert(-1, indent + unique_index_create_sql)
|
||||
m2m_create_string = "\n".join(lines)
|
||||
m2m_tables_for_create.append(m2m_create_string)
|
||||
return m2m_tables_for_create
|
||||
|
||||
setattr(BaseSchemaGenerator, "_get_m2m_tables", _get_m2m_tables)
|
||||
|
||||
|
||||
_init_asyncio_patch()
|
||||
_init_tortoise_0_24_1_patch()
|
||||
|
||||
|
||||
class Command(AbstractAsyncContextManager):
|
||||
def __init__(
|
||||
self,
|
||||
tortoise_config: dict,
|
||||
app: str = "models",
|
||||
location: str = "./migrations",
|
||||
) -> None:
|
||||
self.tortoise_config = tortoise_config
|
||||
self.app = app
|
||||
self.location = location
|
||||
Migrate.app = app
|
||||
|
||||
async def init(self) -> None:
|
||||
await Migrate.init(self.tortoise_config, self.app, self.location)
|
||||
|
||||
async def __aenter__(self) -> Command:
|
||||
await self.init()
|
||||
return self
|
||||
|
||||
async def close(self) -> None:
|
||||
await connections.close_all()
|
||||
|
||||
async def __aexit__(self, *args, **kw) -> None:
|
||||
await self.close()
|
||||
|
||||
async def _upgrade(self, conn, version_file, fake: bool = False) -> None:
|
||||
file_path = Path(Migrate.migrate_location, version_file)
|
||||
m = import_py_file(file_path)
|
||||
upgrade = m.upgrade
|
||||
if not fake:
|
||||
await conn.execute_script(await upgrade(conn))
|
||||
await Aerich.create(
|
||||
version=version_file,
|
||||
app=self.app,
|
||||
content=get_models_describe(self.app),
|
||||
)
|
||||
|
||||
async def upgrade(self, run_in_transaction: bool = True, fake: bool = False) -> list[str]:
|
||||
migrated = []
|
||||
for version_file in Migrate.get_all_version_files():
|
||||
try:
|
||||
exists = await Aerich.exists(version=version_file, app=self.app)
|
||||
except OperationalError:
|
||||
exists = False
|
||||
if not exists:
|
||||
app_conn_name = get_app_connection_name(self.tortoise_config, self.app)
|
||||
if run_in_transaction:
|
||||
async with in_transaction(app_conn_name) as conn:
|
||||
await self._upgrade(conn, version_file, fake=fake)
|
||||
else:
|
||||
app_conn = get_app_connection(self.tortoise_config, self.app)
|
||||
await self._upgrade(app_conn, version_file, fake=fake)
|
||||
migrated.append(version_file)
|
||||
return migrated
|
||||
|
||||
async def downgrade(self, version: int, delete: bool, fake: bool = False) -> list[str]:
|
||||
ret: list[str] = []
|
||||
if version == -1:
|
||||
specified_version = await Migrate.get_last_version()
|
||||
else:
|
||||
specified_version = await Aerich.filter(
|
||||
app=self.app, version__startswith=f"{version}_"
|
||||
).first()
|
||||
if not specified_version:
|
||||
raise DowngradeError("No specified version found")
|
||||
if version == -1:
|
||||
versions = [specified_version]
|
||||
else:
|
||||
versions = await Aerich.filter(app=self.app, pk__gte=specified_version.pk)
|
||||
for version_obj in versions:
|
||||
file = version_obj.version
|
||||
async with in_transaction(
|
||||
get_app_connection_name(self.tortoise_config, self.app)
|
||||
) as conn:
|
||||
file_path = Path(Migrate.migrate_location, file)
|
||||
m = import_py_file(file_path)
|
||||
downgrade = m.downgrade
|
||||
downgrade_sql = await downgrade(conn)
|
||||
if not downgrade_sql.strip():
|
||||
raise DowngradeError("No downgrade items found")
|
||||
if not fake:
|
||||
await conn.execute_script(downgrade_sql)
|
||||
await version_obj.delete()
|
||||
if delete:
|
||||
os.unlink(file_path)
|
||||
ret.append(file)
|
||||
return ret
|
||||
|
||||
async def heads(self) -> list[str]:
|
||||
ret = []
|
||||
versions = Migrate.get_all_version_files()
|
||||
for version in versions:
|
||||
if not await Aerich.exists(version=version, app=self.app):
|
||||
ret.append(version)
|
||||
return ret
|
||||
|
||||
async def history(self) -> list[str]:
|
||||
versions = Migrate.get_all_version_files()
|
||||
return [version for version in versions]
|
||||
|
||||
async def inspectdb(self, tables: list[str] | None = None) -> str:
|
||||
connection = get_app_connection(self.tortoise_config, self.app)
|
||||
dialect = connection.schema_generator.DIALECT
|
||||
if dialect == "mysql":
|
||||
cls: type[Inspect] = InspectMySQL
|
||||
elif dialect == "postgres":
|
||||
cls = InspectPostgres
|
||||
elif dialect == "sqlite":
|
||||
cls = InspectSQLite
|
||||
else:
|
||||
raise NotImplementedError(f"{dialect} is not supported")
|
||||
inspect = cls(connection, tables)
|
||||
return await inspect.inspect()
|
||||
|
||||
async def migrate(self, name: str = "update", empty: bool = False) -> str:
|
||||
return await Migrate.migrate(name, empty)
|
||||
|
||||
async def init_db(self, safe: bool) -> None:
|
||||
location = self.location
|
||||
app = self.app
|
||||
dirname = Path(location, app)
|
||||
if not dirname.exists():
|
||||
dirname.mkdir(parents=True)
|
||||
else:
|
||||
# If directory is empty, go ahead, otherwise raise FileExistsError
|
||||
for unexpected_file in dirname.glob("*"):
|
||||
raise FileExistsError(str(unexpected_file))
|
||||
|
||||
await Tortoise.init(config=self.tortoise_config)
|
||||
connection = get_app_connection(self.tortoise_config, app)
|
||||
await generate_schema_for_client(connection, safe)
|
||||
|
||||
schema = get_schema_sql(connection, safe)
|
||||
|
||||
version = await Migrate.generate_version()
|
||||
await Aerich.create(
|
||||
version=version,
|
||||
app=app,
|
||||
content=get_models_describe(app),
|
||||
)
|
||||
version_file = Path(dirname, version)
|
||||
content = MIGRATE_TEMPLATE.format(upgrade_sql=schema, downgrade_sql="")
|
||||
with open(version_file, "w", encoding="utf-8") as f:
|
||||
f.write(content)
|
||||
|
3
aerich/__main__.py
Normal file
3
aerich/__main__.py
Normal file
@ -0,0 +1,3 @@
|
||||
from .cli import main
|
||||
|
||||
main()
|
28
aerich/_compat.py
Normal file
28
aerich/_compat.py
Normal file
@ -0,0 +1,28 @@
|
||||
# mypy: disable-error-code="no-redef"
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
from types import ModuleType
|
||||
|
||||
import tortoise
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
import tomllib
|
||||
else:
|
||||
try:
|
||||
import tomli as tomllib
|
||||
except ImportError:
|
||||
import tomlkit as tomllib
|
||||
|
||||
|
||||
def imports_tomlkit() -> ModuleType:
|
||||
try:
|
||||
import tomli_w as tomlkit
|
||||
except ImportError:
|
||||
import tomlkit
|
||||
return tomlkit
|
||||
|
||||
|
||||
def tortoise_version_less_than(version: str) -> bool:
|
||||
# The min version of tortoise is '0.11.0', so we can compare it by a `<`,
|
||||
return tortoise.__version__ < version
|
371
aerich/cli.py
371
aerich/cli.py
@ -1,213 +1,298 @@
|
||||
import json
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from configparser import ConfigParser
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import cast
|
||||
|
||||
import asyncclick as click
|
||||
from asyncclick import Context, UsageError
|
||||
from tortoise import ConfigurationError, Tortoise, generate_schema_for_client
|
||||
from tortoise.transactions import in_transaction
|
||||
|
||||
from aerich.migrate import Migrate
|
||||
from aerich.utils import get_app_connection, get_app_connection_name, get_tortoise_config
|
||||
from aerich import Command
|
||||
from aerich._compat import imports_tomlkit, tomllib
|
||||
from aerich.enums import Color
|
||||
from aerich.exceptions import DowngradeError
|
||||
from aerich.utils import add_src_path, get_tortoise_config
|
||||
from aerich.version import __version__
|
||||
|
||||
from . import __version__
|
||||
from .models import Aerich
|
||||
CONFIG_DEFAULT_VALUES = {
|
||||
"src_folder": ".",
|
||||
}
|
||||
|
||||
|
||||
class Color(str, Enum):
|
||||
green = "green"
|
||||
red = "red"
|
||||
yellow = "yellow"
|
||||
def _patch_context_to_close_tortoise_connections_when_exit() -> None:
|
||||
from tortoise import Tortoise, connections
|
||||
|
||||
origin_aexit = Context.__aexit__
|
||||
|
||||
async def aexit(*args, **kw) -> None:
|
||||
await origin_aexit(*args, **kw)
|
||||
if Tortoise._inited:
|
||||
await connections.close_all()
|
||||
|
||||
Context.__aexit__ = aexit # type:ignore[method-assign]
|
||||
|
||||
|
||||
parser = ConfigParser()
|
||||
_patch_context_to_close_tortoise_connections_when_exit()
|
||||
|
||||
|
||||
@click.group(context_settings={"help_option_names": ["-h", "--help"]})
|
||||
@click.version_option(__version__)
|
||||
@click.version_option(__version__, "-V", "--version")
|
||||
@click.option(
|
||||
"-c", "--config", default="aerich.ini", show_default=True, help="Config file.",
|
||||
)
|
||||
@click.option("--app", default="models", show_default=True, help="Tortoise-ORM app name.")
|
||||
@click.option(
|
||||
"-n",
|
||||
"--name",
|
||||
default="aerich",
|
||||
"-c",
|
||||
"--config",
|
||||
default="pyproject.toml",
|
||||
show_default=True,
|
||||
help="Name of section in .ini file to use for aerich config.",
|
||||
help="Config file.",
|
||||
)
|
||||
@click.option("--app", required=False, help="Tortoise-ORM app name.")
|
||||
@click.pass_context
|
||||
async def cli(ctx: Context, config, app, name):
|
||||
async def cli(ctx: Context, config, app) -> None:
|
||||
ctx.ensure_object(dict)
|
||||
ctx.obj["config_file"] = config
|
||||
ctx.obj["name"] = name
|
||||
ctx.obj["app"] = app
|
||||
|
||||
invoked_subcommand = ctx.invoked_subcommand
|
||||
if invoked_subcommand != "init":
|
||||
if not os.path.exists(config):
|
||||
raise UsageError("You must exec init first", ctx=ctx)
|
||||
parser.read(config)
|
||||
|
||||
location = parser[name]["location"]
|
||||
tortoise_orm = parser[name]["tortoise_orm"]
|
||||
|
||||
config_path = Path(config)
|
||||
if not config_path.exists():
|
||||
raise UsageError(
|
||||
"You need to run `aerich init` first to create the config file.", ctx=ctx
|
||||
)
|
||||
content = config_path.read_text("utf-8")
|
||||
doc: dict = tomllib.loads(content)
|
||||
try:
|
||||
tool = cast("dict[str, str]", doc["tool"]["aerich"])
|
||||
location = tool["location"]
|
||||
tortoise_orm = tool["tortoise_orm"]
|
||||
src_folder = tool.get("src_folder", CONFIG_DEFAULT_VALUES["src_folder"])
|
||||
except KeyError as e:
|
||||
raise UsageError(
|
||||
"You need run `aerich init` again when upgrading to aerich 0.6.0+."
|
||||
) from e
|
||||
add_src_path(src_folder)
|
||||
tortoise_config = get_tortoise_config(ctx, tortoise_orm)
|
||||
|
||||
ctx.obj["config"] = tortoise_config
|
||||
ctx.obj["location"] = location
|
||||
|
||||
if invoked_subcommand != "init-db":
|
||||
if not app:
|
||||
try:
|
||||
await Migrate.init_with_old_models(tortoise_config, app, location)
|
||||
except ConfigurationError:
|
||||
raise UsageError(ctx=ctx, message="You must exec init-db first")
|
||||
apps_config = cast(dict, tortoise_config["apps"])
|
||||
except KeyError:
|
||||
raise UsageError('Config must define "apps" section')
|
||||
app = list(apps_config.keys())[0]
|
||||
command = Command(tortoise_config=tortoise_config, app=app, location=location)
|
||||
ctx.obj["command"] = command
|
||||
if invoked_subcommand != "init-db":
|
||||
if not Path(location, app).exists():
|
||||
raise UsageError(
|
||||
"You need to run `aerich init-db` first to initialize the database.", ctx=ctx
|
||||
)
|
||||
await command.init()
|
||||
|
||||
|
||||
@cli.command(help="Generate migrate changes file.")
|
||||
@click.option("--name", default="update", show_default=True, help="Migrate name.")
|
||||
@cli.command(help="Generate a migration file for the current state of the models.")
|
||||
@click.option("--name", default="update", show_default=True, help="Migration name.")
|
||||
@click.option("--empty", default=False, is_flag=True, help="Generate an empty migration file.")
|
||||
@click.pass_context
|
||||
async def migrate(ctx: Context, name):
|
||||
config = ctx.obj["config"]
|
||||
location = ctx.obj["location"]
|
||||
app = ctx.obj["app"]
|
||||
|
||||
ret = await Migrate.migrate(name)
|
||||
async def migrate(ctx: Context, name, empty) -> None:
|
||||
command = ctx.obj["command"]
|
||||
ret = await command.migrate(name, empty)
|
||||
if not ret:
|
||||
return click.secho("No changes detected", fg=Color.yellow)
|
||||
Migrate.write_old_models(config, app, location)
|
||||
click.secho(f"Success migrate {ret}", fg=Color.green)
|
||||
click.secho(f"Success creating migration file {ret}", fg=Color.green)
|
||||
|
||||
|
||||
@cli.command(help="Upgrade to latest version.")
|
||||
@cli.command(help="Upgrade to specified migration version.")
|
||||
@click.option(
|
||||
"--in-transaction",
|
||||
"-i",
|
||||
default=True,
|
||||
type=bool,
|
||||
help="Make migrations in a single transaction or not. Can be helpful for large migrations or creating concurrent indexes.",
|
||||
)
|
||||
@click.option(
|
||||
"--fake",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
help="Mark migrations as run without actually running them.",
|
||||
)
|
||||
@click.pass_context
|
||||
async def upgrade(ctx: Context):
|
||||
config = ctx.obj["config"]
|
||||
app = ctx.obj["app"]
|
||||
migrated = False
|
||||
for version in Migrate.get_all_version_files():
|
||||
if not await Aerich.exists(version=version, app=app):
|
||||
async with in_transaction(get_app_connection_name(config, app)) as conn:
|
||||
file_path = os.path.join(Migrate.migrate_location, version)
|
||||
with open(file_path, "r") as f:
|
||||
content = json.load(f)
|
||||
upgrade_query_list = content.get("upgrade")
|
||||
for upgrade_query in upgrade_query_list:
|
||||
await conn.execute_query(upgrade_query)
|
||||
await Aerich.create(version=version, app=app)
|
||||
click.secho(f"Success upgrade {version}", fg=Color.green)
|
||||
migrated = True
|
||||
async def upgrade(ctx: Context, in_transaction: bool, fake: bool) -> None:
|
||||
command = ctx.obj["command"]
|
||||
migrated = await command.upgrade(run_in_transaction=in_transaction, fake=fake)
|
||||
if not migrated:
|
||||
click.secho("No migrate items", fg=Color.yellow)
|
||||
click.secho("No upgrade items found", fg=Color.yellow)
|
||||
else:
|
||||
for version_file in migrated:
|
||||
if fake:
|
||||
click.echo(
|
||||
f"Upgrading to {version_file}... " + click.style("FAKED", fg=Color.green)
|
||||
)
|
||||
else:
|
||||
click.secho(f"Success upgrading to {version_file}", fg=Color.green)
|
||||
|
||||
|
||||
@cli.command(help="Downgrade to previous version.")
|
||||
@cli.command(help="Downgrade to specified version.")
|
||||
@click.option(
|
||||
"-v",
|
||||
"--version",
|
||||
default=-1,
|
||||
type=int,
|
||||
show_default=False,
|
||||
help="Specified version, default to last migration.",
|
||||
)
|
||||
@click.option(
|
||||
"-d",
|
||||
"--delete",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
show_default=True,
|
||||
help="Also delete the migration files.",
|
||||
)
|
||||
@click.option(
|
||||
"--fake",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
help="Mark migrations as run without actually running them.",
|
||||
)
|
||||
@click.pass_context
|
||||
async def downgrade(ctx: Context):
|
||||
app = ctx.obj["app"]
|
||||
config = ctx.obj["config"]
|
||||
last_version = await Migrate.get_last_version()
|
||||
if not last_version:
|
||||
return click.secho("No last version found", fg=Color.yellow)
|
||||
file = last_version.version
|
||||
async with in_transaction(get_app_connection_name(config, app)) as conn:
|
||||
file_path = os.path.join(Migrate.migrate_location, file)
|
||||
with open(file_path, "r") as f:
|
||||
content = json.load(f)
|
||||
downgrade_query_list = content.get("downgrade")
|
||||
for downgrade_query in downgrade_query_list:
|
||||
await conn.execute_query(downgrade_query)
|
||||
await last_version.delete()
|
||||
return click.secho(f"Success downgrade {file}", fg=Color.green)
|
||||
@click.confirmation_option(
|
||||
prompt="Downgrade is dangerous: you might lose your data! Are you sure?",
|
||||
)
|
||||
async def downgrade(ctx: Context, version: int, delete: bool, fake: bool) -> None:
|
||||
command = ctx.obj["command"]
|
||||
try:
|
||||
files = await command.downgrade(version, delete, fake=fake)
|
||||
except DowngradeError as e:
|
||||
return click.secho(str(e), fg=Color.yellow)
|
||||
for file in files:
|
||||
if fake:
|
||||
click.echo(f"Downgrading to {file}... " + click.style("FAKED", fg=Color.green))
|
||||
else:
|
||||
click.secho(f"Success downgrading to {file}", fg=Color.green)
|
||||
|
||||
|
||||
@cli.command(help="Show current available heads in migrate location.")
|
||||
@cli.command(help="Show currently available heads (unapplied migrations).")
|
||||
@click.pass_context
|
||||
async def heads(ctx: Context):
|
||||
app = ctx.obj["app"]
|
||||
versions = Migrate.get_all_version_files()
|
||||
is_heads = False
|
||||
for version in versions:
|
||||
if not await Aerich.exists(version=version, app=app):
|
||||
click.secho(version, fg=Color.green)
|
||||
is_heads = True
|
||||
if not is_heads:
|
||||
click.secho("No available heads,try migrate", fg=Color.green)
|
||||
async def heads(ctx: Context) -> None:
|
||||
command = ctx.obj["command"]
|
||||
head_list = await command.heads()
|
||||
if not head_list:
|
||||
return click.secho("No available heads.", fg=Color.green)
|
||||
for version in head_list:
|
||||
click.secho(version, fg=Color.green)
|
||||
|
||||
|
||||
@cli.command(help="List all migrate items.")
|
||||
@cli.command(help="List all migrations.")
|
||||
@click.pass_context
|
||||
def history(ctx):
|
||||
versions = Migrate.get_all_version_files()
|
||||
async def history(ctx: Context) -> None:
|
||||
command = ctx.obj["command"]
|
||||
versions = await command.history()
|
||||
if not versions:
|
||||
return click.secho("No migrations created yet.", fg=Color.green)
|
||||
for version in versions:
|
||||
click.secho(version, fg=Color.green)
|
||||
if not versions:
|
||||
click.secho("No history,try migrate", fg=Color.green)
|
||||
|
||||
|
||||
@cli.command(help="Init config file and generate root migrate location.")
|
||||
def _write_config(config_path, doc, table) -> None:
|
||||
tomlkit = imports_tomlkit()
|
||||
|
||||
try:
|
||||
doc["tool"]["aerich"] = table
|
||||
except KeyError:
|
||||
doc["tool"] = {"aerich": table}
|
||||
config_path.write_text(tomlkit.dumps(doc))
|
||||
|
||||
|
||||
@cli.command(help="Initialize aerich config and create migrations folder.")
|
||||
@click.option(
|
||||
"-t",
|
||||
"--tortoise-orm",
|
||||
required=True,
|
||||
help="Tortoise-ORM config module dict variable, like settings.TORTOISE_ORM.",
|
||||
help="Tortoise-ORM config dict location, like `settings.TORTOISE_ORM`.",
|
||||
)
|
||||
@click.option(
|
||||
"--location", default="./migrations", show_default=True, help="Migrate store location."
|
||||
"--location",
|
||||
default="./migrations",
|
||||
show_default=True,
|
||||
help="Migrations folder.",
|
||||
)
|
||||
@click.option(
|
||||
"-s",
|
||||
"--src_folder",
|
||||
default=CONFIG_DEFAULT_VALUES["src_folder"],
|
||||
show_default=False,
|
||||
help="Folder of the source, relative to the project root.",
|
||||
)
|
||||
@click.pass_context
|
||||
async def init(
|
||||
ctx: Context, tortoise_orm, location,
|
||||
):
|
||||
async def init(ctx: Context, tortoise_orm, location, src_folder) -> None:
|
||||
config_file = ctx.obj["config_file"]
|
||||
name = ctx.obj["name"]
|
||||
if os.path.exists(config_file):
|
||||
return click.secho("You have inited", fg=Color.yellow)
|
||||
|
||||
parser.add_section(name)
|
||||
parser.set(name, "tortoise_orm", tortoise_orm)
|
||||
parser.set(name, "location", location)
|
||||
if os.path.isabs(src_folder):
|
||||
src_folder = os.path.relpath(os.getcwd(), src_folder)
|
||||
# Add ./ so it's clear that this is relative path
|
||||
if not src_folder.startswith("./"):
|
||||
src_folder = "./" + src_folder
|
||||
|
||||
with open(config_file, "w") as f:
|
||||
parser.write(f)
|
||||
# check that we can find the configuration, if not we can fail before the config file gets created
|
||||
add_src_path(src_folder)
|
||||
get_tortoise_config(ctx, tortoise_orm)
|
||||
config_path = Path(config_file)
|
||||
content = config_path.read_text("utf-8") if config_path.exists() else "[tool.aerich]"
|
||||
doc: dict = tomllib.loads(content)
|
||||
|
||||
if not os.path.isdir(location):
|
||||
os.mkdir(location)
|
||||
table = {"tortoise_orm": tortoise_orm, "location": location, "src_folder": src_folder}
|
||||
if (aerich_config := doc.get("tool", {}).get("aerich")) and all(
|
||||
aerich_config.get(k) == v for k, v in table.items()
|
||||
):
|
||||
click.echo(f"Aerich config {config_file} already inited.")
|
||||
else:
|
||||
_write_config(config_path, doc, table)
|
||||
click.secho(f"Success writing aerich config to {config_file}", fg=Color.green)
|
||||
|
||||
click.secho(f"Success create migrate location {location}", fg=Color.green)
|
||||
click.secho(f"Success generate config file {config_file}", fg=Color.green)
|
||||
Path(location).mkdir(parents=True, exist_ok=True)
|
||||
click.secho(f"Success creating migrations folder {location}", fg=Color.green)
|
||||
|
||||
|
||||
@cli.command(help="Generate schema and generate app migrate location.")
|
||||
@cli.command(help="Generate schema and generate app migration folder.")
|
||||
@click.option(
|
||||
"-s",
|
||||
"--safe",
|
||||
type=bool,
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="When set to true, creates the table only when it does not already exist.",
|
||||
help="Create tables only when they do not already exist.",
|
||||
show_default=True,
|
||||
)
|
||||
@click.pass_context
|
||||
async def init_db(ctx: Context, safe):
|
||||
config = ctx.obj["config"]
|
||||
location = ctx.obj["location"]
|
||||
app = ctx.obj["app"]
|
||||
|
||||
dirname = os.path.join(location, app)
|
||||
if not os.path.isdir(dirname):
|
||||
os.mkdir(dirname)
|
||||
click.secho(f"Success create app migrate location {dirname}", fg=Color.green)
|
||||
|
||||
Migrate.write_old_models(config, app, location)
|
||||
|
||||
await Tortoise.init(config=config)
|
||||
connection = get_app_connection(config, app)
|
||||
await generate_schema_for_client(connection, safe)
|
||||
|
||||
return click.secho(f'Success generate schema for app "{app}"', fg=Color.green)
|
||||
async def init_db(ctx: Context, safe: bool) -> None:
|
||||
command = ctx.obj["command"]
|
||||
app = command.app
|
||||
dirname = Path(command.location, app)
|
||||
try:
|
||||
await command.init_db(safe)
|
||||
click.secho(f"Success creating app migration folder {dirname}", fg=Color.green)
|
||||
click.secho(f'Success generating initial migration file for app "{app}"', fg=Color.green)
|
||||
except FileExistsError:
|
||||
return click.secho(
|
||||
f"App {app} is already initialized. Delete {dirname} and try again.", fg=Color.yellow
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
sys.path.insert(0, ".")
|
||||
cli(_anyio_backend="asyncio")
|
||||
@cli.command(help="Prints the current database tables to stdout as Tortoise-ORM models.")
|
||||
@click.option(
|
||||
"-t",
|
||||
"--table",
|
||||
help="Which tables to inspect.",
|
||||
multiple=True,
|
||||
required=False,
|
||||
)
|
||||
@click.pass_context
|
||||
async def inspectdb(ctx: Context, table: list[str]) -> None:
|
||||
command = ctx.obj["command"]
|
||||
ret = await command.inspectdb(table)
|
||||
click.secho(ret)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
cli()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
49
aerich/coder.py
Normal file
49
aerich/coder.py
Normal file
@ -0,0 +1,49 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import json
|
||||
import pickle # nosec: B301,B403
|
||||
from typing import Any
|
||||
|
||||
from tortoise.indexes import Index
|
||||
|
||||
|
||||
class JsonEncoder(json.JSONEncoder):
|
||||
def default(self, obj) -> Any:
|
||||
if isinstance(obj, Index):
|
||||
if hasattr(obj, "describe"):
|
||||
# For tortoise>=0.24
|
||||
return obj.describe()
|
||||
return {
|
||||
"type": "index",
|
||||
"val": base64.b64encode(pickle.dumps(obj)).decode(), # nosec: B301
|
||||
}
|
||||
else:
|
||||
return super().default(obj)
|
||||
|
||||
|
||||
def object_hook(obj) -> Any:
|
||||
if (type_ := obj.get("type")) and type_ == "index" and (val := obj.get("val")):
|
||||
return pickle.loads(base64.b64decode(val)) # nosec: B301
|
||||
return obj
|
||||
|
||||
|
||||
def load_index(obj: dict) -> Index:
|
||||
"""Convert a dict that generated by `Index.decribe()` to a Index instance"""
|
||||
try:
|
||||
index = Index(fields=obj["fields"] or obj["expressions"], name=obj.get("name"))
|
||||
except KeyError:
|
||||
return object_hook(obj)
|
||||
if extra := obj.get("extra"):
|
||||
index.extra = extra
|
||||
if idx_type := obj.get("type"):
|
||||
index.INDEX_TYPE = idx_type
|
||||
return index
|
||||
|
||||
|
||||
def encoder(obj: dict) -> str:
|
||||
return json.dumps(obj, cls=JsonEncoder)
|
||||
|
||||
|
||||
def decoder(obj: str | bytes) -> Any:
|
||||
return json.loads(obj, object_hook=object_hook)
|
@ -1,67 +1,108 @@
|
||||
from typing import List, Type
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from enum import Enum
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from tortoise import BaseDBAsyncClient, ForeignKeyFieldInstance, ManyToManyFieldInstance, Model
|
||||
from tortoise.backends.base.schema_generator import BaseSchemaGenerator
|
||||
from tortoise.fields import Field, JSONField, TextField, UUIDField
|
||||
|
||||
from aerich._compat import tortoise_version_less_than
|
||||
from aerich.utils import is_default_function
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from tortoise import BaseDBAsyncClient, Model
|
||||
|
||||
|
||||
class BaseDDL:
|
||||
schema_generator_cls: Type[BaseSchemaGenerator] = BaseSchemaGenerator
|
||||
schema_generator_cls: type[BaseSchemaGenerator] = BaseSchemaGenerator
|
||||
DIALECT = "sql"
|
||||
_DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS {table_name}"
|
||||
_ADD_COLUMN_TEMPLATE = "ALTER TABLE {table_name} ADD {column}"
|
||||
_DROP_COLUMN_TEMPLATE = "ALTER TABLE {table_name} DROP COLUMN {column_name}"
|
||||
_ADD_INDEX_TEMPLATE = (
|
||||
"ALTER TABLE {table_name} ADD {unique} INDEX {index_name} ({column_names})"
|
||||
_DROP_TABLE_TEMPLATE = 'DROP TABLE IF EXISTS "{table_name}"'
|
||||
_ADD_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ADD {column}'
|
||||
_DROP_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" DROP COLUMN "{column_name}"'
|
||||
_ALTER_DEFAULT_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {default}'
|
||||
_RENAME_COLUMN_TEMPLATE = (
|
||||
'ALTER TABLE "{table_name}" RENAME COLUMN "{old_column_name}" TO "{new_column_name}"'
|
||||
)
|
||||
_DROP_INDEX_TEMPLATE = "ALTER TABLE {table_name} DROP INDEX {index_name}"
|
||||
_ADD_FK_TEMPLATE = "ALTER TABLE {table_name} ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}"
|
||||
_DROP_FK_TEMPLATE = "ALTER TABLE {table_name} DROP FOREIGN KEY {fk_name}"
|
||||
_M2M_TABLE_TEMPLATE = "CREATE TABLE {table_name} ({backward_key} {backward_type} NOT NULL REFERENCES {backward_table} ({backward_field}) ON DELETE CASCADE,{forward_key} {forward_type} NOT NULL REFERENCES {forward_table} ({forward_field}) ON DELETE CASCADE){extra}{comment};"
|
||||
_ADD_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" ADD {index_type}{unique}INDEX "{index_name}" ({column_names}){extra}'
|
||||
_DROP_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" DROP INDEX IF EXISTS "{index_name}"'
|
||||
_ADD_FK_TEMPLATE = 'ALTER TABLE "{table_name}" ADD CONSTRAINT "{fk_name}" FOREIGN KEY ("{db_column}") REFERENCES "{table}" ("{field}") ON DELETE {on_delete}'
|
||||
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP FOREIGN KEY "{fk_name}"'
|
||||
_M2M_TABLE_TEMPLATE = (
|
||||
'CREATE TABLE "{table_name}" (\n'
|
||||
' "{backward_key}" {backward_type} NOT NULL REFERENCES "{backward_table}" ("{backward_field}") ON DELETE CASCADE,\n'
|
||||
' "{forward_key}" {forward_type} NOT NULL REFERENCES "{forward_table}" ("{forward_field}") ON DELETE {on_delete}\n'
|
||||
"){extra}{comment}"
|
||||
)
|
||||
_MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" MODIFY COLUMN {column}'
|
||||
_CHANGE_COLUMN_TEMPLATE = (
|
||||
'ALTER TABLE "{table_name}" CHANGE {old_column_name} {new_column_name} {new_column_type}'
|
||||
)
|
||||
_RENAME_TABLE_TEMPLATE = 'ALTER TABLE "{old_table_name}" RENAME TO "{new_table_name}"'
|
||||
|
||||
def __init__(self, client: "BaseDBAsyncClient"):
|
||||
def __init__(self, client: BaseDBAsyncClient) -> None:
|
||||
self.client = client
|
||||
self.schema_generator = self.schema_generator_cls(client)
|
||||
|
||||
def create_table(self, model: "Type[Model]"):
|
||||
return self.schema_generator._get_table_sql(model, True)["table_creation_string"]
|
||||
@staticmethod
|
||||
def get_table_name(model: type[Model]) -> str:
|
||||
return model._meta.db_table
|
||||
|
||||
def drop_table(self, model: "Type[Model]"):
|
||||
return self._DROP_TABLE_TEMPLATE.format(table_name=model._meta.db_table)
|
||||
def create_table(self, model: type[Model]) -> str:
|
||||
schema = self.schema_generator._get_table_sql(model, True)["table_creation_string"]
|
||||
if tortoise_version_less_than("0.23.1"):
|
||||
# Remove extra space
|
||||
schema = re.sub(r'(["()A-Za-z]) (["()A-Za-z])', r"\1 \2", schema)
|
||||
return schema.rstrip(";")
|
||||
|
||||
def create_m2m_table(self, model: "Type[Model]", field: ManyToManyFieldInstance):
|
||||
def drop_table(self, table_name: str) -> str:
|
||||
return self._DROP_TABLE_TEMPLATE.format(table_name=table_name)
|
||||
|
||||
def create_m2m(
|
||||
self, model: type[Model], field_describe: dict, reference_table_describe: dict
|
||||
) -> str:
|
||||
through = cast(str, field_describe.get("through"))
|
||||
description = field_describe.get("description")
|
||||
pk_field = cast(dict, reference_table_describe.get("pk_field"))
|
||||
reference_id = pk_field.get("db_column")
|
||||
db_field_types = cast(dict, pk_field.get("db_field_types"))
|
||||
return self._M2M_TABLE_TEMPLATE.format(
|
||||
table_name=field.through,
|
||||
table_name=through,
|
||||
backward_table=model._meta.db_table,
|
||||
forward_table=field.related_model._meta.db_table,
|
||||
forward_table=reference_table_describe.get("table"),
|
||||
backward_field=model._meta.db_pk_column,
|
||||
forward_field=field.related_model._meta.db_pk_column,
|
||||
backward_key=field.backward_key,
|
||||
forward_field=reference_id,
|
||||
backward_key=field_describe.get("backward_key"),
|
||||
backward_type=model._meta.pk.get_for_dialect(self.DIALECT, "SQL_TYPE"),
|
||||
forward_key=field.forward_key,
|
||||
forward_type=field.related_model._meta.pk.get_for_dialect(self.DIALECT, "SQL_TYPE"),
|
||||
extra=self.schema_generator._table_generate_extra(table=field.through),
|
||||
comment=self.schema_generator._table_comment_generator(
|
||||
table=field.through, comment=field.description
|
||||
)
|
||||
if field.description
|
||||
else "",
|
||||
forward_key=field_describe.get("forward_key"),
|
||||
forward_type=db_field_types.get(self.DIALECT) or db_field_types.get(""),
|
||||
on_delete=field_describe.get("on_delete"),
|
||||
extra=self.schema_generator._table_generate_extra(table=through),
|
||||
comment=(
|
||||
self.schema_generator._table_comment_generator(table=through, comment=description)
|
||||
if description
|
||||
else ""
|
||||
),
|
||||
)
|
||||
|
||||
def drop_m2m(self, field: ManyToManyFieldInstance):
|
||||
return self._DROP_TABLE_TEMPLATE.format(table_name=field.through)
|
||||
def drop_m2m(self, table_name: str) -> str:
|
||||
return self._DROP_TABLE_TEMPLATE.format(table_name=table_name)
|
||||
|
||||
def add_column(self, model: "Type[Model]", field_object: Field):
|
||||
def _get_default(self, model: type[Model], field_describe: dict) -> Any:
|
||||
db_table = model._meta.db_table
|
||||
default = field_object.default
|
||||
db_column = field_object.model_field_name
|
||||
auto_now_add = getattr(field_object, "auto_now_add", False)
|
||||
auto_now = getattr(field_object, "auto_now", False)
|
||||
default = field_describe.get("default")
|
||||
if isinstance(default, Enum):
|
||||
default = default.value
|
||||
db_column = cast(str, field_describe.get("db_column"))
|
||||
auto_now_add = field_describe.get("auto_now_add", False)
|
||||
auto_now = field_describe.get("auto_now", False)
|
||||
if default is not None or auto_now_add:
|
||||
if callable(default) or isinstance(field_object, (UUIDField, TextField, JSONField)):
|
||||
if field_describe.get("field_type") in [
|
||||
"UUIDField",
|
||||
"TextField",
|
||||
"JSONField",
|
||||
] or is_default_function(default):
|
||||
default = ""
|
||||
else:
|
||||
default = field_object.to_db_value(default, model)
|
||||
try:
|
||||
default = self.schema_generator._column_default_generator(
|
||||
db_table,
|
||||
@ -72,83 +113,190 @@ class BaseDDL:
|
||||
)
|
||||
except NotImplementedError:
|
||||
default = ""
|
||||
else:
|
||||
default = ""
|
||||
return self._ADD_COLUMN_TEMPLATE.format(
|
||||
table_name=db_table,
|
||||
column=self.schema_generator._create_string(
|
||||
db_column=field_object.model_field_name,
|
||||
field_type=field_object.get_for_dialect(self.DIALECT, "SQL_TYPE"),
|
||||
nullable="NOT NULL" if not field_object.null else "",
|
||||
unique="UNIQUE" if field_object.unique else "",
|
||||
comment=self.schema_generator._column_comment_generator(
|
||||
table=db_table,
|
||||
column=field_object.model_field_name,
|
||||
comment=field_object.description,
|
||||
)
|
||||
if field_object.description
|
||||
else "",
|
||||
is_primary_key=field_object.pk,
|
||||
default=default,
|
||||
),
|
||||
)
|
||||
return default
|
||||
|
||||
def drop_column(self, model: "Type[Model]", column_name: str):
|
||||
def add_column(self, model: type[Model], field_describe: dict, is_pk: bool = False) -> str:
|
||||
return self._add_or_modify_column(model, field_describe, is_pk)
|
||||
|
||||
def _add_or_modify_column(
|
||||
self, model: type[Model], field_describe: dict, is_pk: bool, modify: bool = False
|
||||
) -> str:
|
||||
db_table = model._meta.db_table
|
||||
description = field_describe.get("description")
|
||||
db_column = cast(str, field_describe.get("db_column"))
|
||||
db_field_types = cast(dict, field_describe.get("db_field_types"))
|
||||
default = self._get_default(model, field_describe)
|
||||
if default is None:
|
||||
default = ""
|
||||
if modify:
|
||||
unique = ""
|
||||
template = self._MODIFY_COLUMN_TEMPLATE
|
||||
else:
|
||||
# sqlite does not support alter table to add unique column
|
||||
unique = " UNIQUE" if field_describe.get("unique") and self.DIALECT != "sqlite" else ""
|
||||
template = self._ADD_COLUMN_TEMPLATE
|
||||
column = self.schema_generator._create_string(
|
||||
db_column=db_column,
|
||||
field_type=db_field_types.get(self.DIALECT, db_field_types.get("")),
|
||||
nullable=" NOT NULL" if not field_describe.get("nullable") else "",
|
||||
unique=unique,
|
||||
comment=(
|
||||
self.schema_generator._column_comment_generator(
|
||||
table=db_table,
|
||||
column=db_column,
|
||||
comment=description,
|
||||
)
|
||||
if description
|
||||
else ""
|
||||
),
|
||||
is_primary_key=is_pk,
|
||||
default=default,
|
||||
)
|
||||
if tortoise_version_less_than("0.23.1"):
|
||||
column = column.replace(" ", " ")
|
||||
return template.format(table_name=db_table, column=column)
|
||||
|
||||
def drop_column(self, model: type[Model], column_name: str) -> str:
|
||||
return self._DROP_COLUMN_TEMPLATE.format(
|
||||
table_name=model._meta.db_table, column_name=column_name
|
||||
)
|
||||
|
||||
def add_index(self, model: "Type[Model]", field_names: List[str], unique=False):
|
||||
def modify_column(self, model: type[Model], field_describe: dict, is_pk: bool = False) -> str:
|
||||
return self._add_or_modify_column(model, field_describe, is_pk, modify=True)
|
||||
|
||||
def rename_column(self, model: type[Model], old_column_name: str, new_column_name: str) -> str:
|
||||
return self._RENAME_COLUMN_TEMPLATE.format(
|
||||
table_name=model._meta.db_table,
|
||||
old_column_name=old_column_name,
|
||||
new_column_name=new_column_name,
|
||||
)
|
||||
|
||||
def change_column(
|
||||
self, model: type[Model], old_column_name: str, new_column_name: str, new_column_type: str
|
||||
) -> str:
|
||||
return self._CHANGE_COLUMN_TEMPLATE.format(
|
||||
table_name=model._meta.db_table,
|
||||
old_column_name=old_column_name,
|
||||
new_column_name=new_column_name,
|
||||
new_column_type=new_column_type,
|
||||
)
|
||||
|
||||
def _index_name(self, unique: bool | None, model: type[Model], field_names: list[str]) -> str:
|
||||
func_name = "_get_index_name"
|
||||
if not hasattr(self.schema_generator, func_name):
|
||||
# For tortoise-orm<0.24.1
|
||||
func_name = "_generate_index_name"
|
||||
return getattr(self.schema_generator, func_name)(
|
||||
"idx" if not unique else "uid", model, field_names
|
||||
)
|
||||
|
||||
def add_index(
|
||||
self,
|
||||
model: type[Model],
|
||||
field_names: list[str],
|
||||
unique: bool | None = False,
|
||||
name: str | None = None,
|
||||
index_type: str = "",
|
||||
extra: str | None = "",
|
||||
) -> str:
|
||||
return self._ADD_INDEX_TEMPLATE.format(
|
||||
unique="UNIQUE" if unique else "",
|
||||
index_name=self.schema_generator._generate_index_name(
|
||||
"idx" if not unique else "uid", model, field_names
|
||||
),
|
||||
unique="UNIQUE " if unique else "",
|
||||
index_name=name or self._index_name(unique, model, field_names),
|
||||
table_name=model._meta.db_table,
|
||||
column_names=", ".join([self.schema_generator.quote(f) for f in field_names]),
|
||||
column_names=", ".join(self.schema_generator.quote(f) for f in field_names),
|
||||
index_type=f"{index_type} " if index_type else "",
|
||||
extra=f"{extra}" if extra else "",
|
||||
)
|
||||
|
||||
def drop_index(self, model: "Type[Model]", field_names: List[str], unique=False):
|
||||
def drop_index(
|
||||
self,
|
||||
model: type[Model],
|
||||
field_names: list[str],
|
||||
unique: bool | None = False,
|
||||
name: str | None = None,
|
||||
) -> str:
|
||||
return self._DROP_INDEX_TEMPLATE.format(
|
||||
index_name=self.schema_generator._generate_index_name(
|
||||
"idx" if not unique else "uid", model, field_names
|
||||
),
|
||||
index_name=name or self._index_name(unique, model, field_names),
|
||||
table_name=model._meta.db_table,
|
||||
)
|
||||
|
||||
def add_fk(self, model: "Type[Model]", field: ForeignKeyFieldInstance):
|
||||
db_table = model._meta.db_table
|
||||
to_field_name = field.to_field_instance.source_field
|
||||
if not to_field_name:
|
||||
to_field_name = field.to_field_instance.model_field_name
|
||||
def drop_index_by_name(self, model: type[Model], index_name: str) -> str:
|
||||
return self.drop_index(model, [], name=index_name)
|
||||
|
||||
db_column = field.source_field or field.model_field_name + "_id"
|
||||
fk_name = self.schema_generator._generate_fk_name(
|
||||
def _generate_fk_name(
|
||||
self, db_table: str, field_describe: dict, reference_table_describe: dict
|
||||
) -> str:
|
||||
"""Generate fk name"""
|
||||
db_column = cast(str, field_describe.get("raw_field"))
|
||||
pk_field = cast(dict, reference_table_describe.get("pk_field"))
|
||||
to_field = cast(str, pk_field.get("db_column"))
|
||||
to_table = cast(str, reference_table_describe.get("table"))
|
||||
func_name = "_get_fk_name"
|
||||
if not hasattr(self.schema_generator, func_name):
|
||||
# For tortoise-orm<0.24.1
|
||||
func_name = "_generate_fk_name"
|
||||
return getattr(self.schema_generator, func_name)(
|
||||
from_table=db_table,
|
||||
from_field=db_column,
|
||||
to_table=field.related_model._meta.db_table,
|
||||
to_field=to_field_name,
|
||||
)
|
||||
return self._ADD_FK_TEMPLATE.format(
|
||||
table_name=db_table,
|
||||
fk_name=fk_name,
|
||||
db_column=db_column,
|
||||
table=field.related_model._meta.db_table,
|
||||
field=to_field_name,
|
||||
on_delete=field.on_delete,
|
||||
to_table=to_table,
|
||||
to_field=to_field,
|
||||
)
|
||||
|
||||
def drop_fk(self, model: "Type[Model]", field: ForeignKeyFieldInstance):
|
||||
to_field_name = field.to_field_instance.source_field
|
||||
if not to_field_name:
|
||||
to_field_name = field.to_field_instance.model_field_name
|
||||
def add_fk(
|
||||
self, model: type[Model], field_describe: dict, reference_table_describe: dict
|
||||
) -> str:
|
||||
db_table = model._meta.db_table
|
||||
return self._DROP_FK_TEMPLATE.format(
|
||||
|
||||
db_column = field_describe.get("raw_field")
|
||||
pk_field = cast(dict, reference_table_describe.get("pk_field"))
|
||||
reference_id = pk_field.get("db_column")
|
||||
return self._ADD_FK_TEMPLATE.format(
|
||||
table_name=db_table,
|
||||
fk_name=self.schema_generator._generate_fk_name(
|
||||
from_table=db_table,
|
||||
from_field=field.source_field or field.model_field_name + "_id",
|
||||
to_table=field.related_model._meta.db_table,
|
||||
to_field=to_field_name,
|
||||
),
|
||||
fk_name=self._generate_fk_name(db_table, field_describe, reference_table_describe),
|
||||
db_column=db_column,
|
||||
table=reference_table_describe.get("table"),
|
||||
field=reference_id,
|
||||
on_delete=field_describe.get("on_delete"),
|
||||
)
|
||||
|
||||
def drop_fk(
|
||||
self, model: type[Model], field_describe: dict, reference_table_describe: dict
|
||||
) -> str:
|
||||
db_table = model._meta.db_table
|
||||
fk_name = self._generate_fk_name(db_table, field_describe, reference_table_describe)
|
||||
return self._DROP_FK_TEMPLATE.format(table_name=db_table, fk_name=fk_name)
|
||||
|
||||
def alter_column_default(self, model: type[Model], field_describe: dict) -> str:
|
||||
db_table = model._meta.db_table
|
||||
default = self._get_default(model, field_describe)
|
||||
return self._ALTER_DEFAULT_TEMPLATE.format(
|
||||
table_name=db_table,
|
||||
column=field_describe.get("db_column"),
|
||||
default="SET" + default if default is not None else "DROP DEFAULT",
|
||||
)
|
||||
|
||||
def alter_column_null(self, model: type[Model], field_describe: dict) -> str:
|
||||
return self.modify_column(model, field_describe)
|
||||
|
||||
def set_comment(self, model: type[Model], field_describe: dict) -> str:
|
||||
return self.modify_column(model, field_describe)
|
||||
|
||||
def rename_table(self, model: type[Model], old_table_name: str, new_table_name: str) -> str:
|
||||
db_table = model._meta.db_table
|
||||
return self._RENAME_TABLE_TEMPLATE.format(
|
||||
table_name=db_table, old_table_name=old_table_name, new_table_name=new_table_name
|
||||
)
|
||||
|
||||
def alter_indexed_column_unique(
|
||||
self, model: type[Model], field_name: str, drop: bool = False
|
||||
) -> list[str]:
|
||||
"""Change unique constraint for indexed field, e.g.: Field(db_index=True) --> Field(unique=True)"""
|
||||
fields = [field_name]
|
||||
if drop:
|
||||
drop_unique = self.drop_index(model, fields, unique=True)
|
||||
add_normal_index = self.add_index(model, fields, unique=False)
|
||||
return [drop_unique, add_normal_index]
|
||||
else:
|
||||
drop_index = self.drop_index(model, fields, unique=False)
|
||||
add_unique_index = self.add_index(model, fields, unique=True)
|
||||
return [drop_index, add_unique_index]
|
||||
|
@ -1,8 +1,61 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator
|
||||
|
||||
from aerich.ddl import BaseDDL
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from tortoise import Model
|
||||
|
||||
|
||||
class MysqlDDL(BaseDDL):
|
||||
schema_generator_cls = MySQLSchemaGenerator
|
||||
DIALECT = MySQLSchemaGenerator.DIALECT
|
||||
_DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS `{table_name}`"
|
||||
_ADD_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` ADD {column}"
|
||||
_ALTER_DEFAULT_TEMPLATE = "ALTER TABLE `{table_name}` ALTER COLUMN `{column}` {default}"
|
||||
_CHANGE_COLUMN_TEMPLATE = (
|
||||
"ALTER TABLE `{table_name}` CHANGE {old_column_name} {new_column_name} {new_column_type}"
|
||||
)
|
||||
_DROP_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` DROP COLUMN `{column_name}`"
|
||||
_RENAME_COLUMN_TEMPLATE = (
|
||||
"ALTER TABLE `{table_name}` RENAME COLUMN `{old_column_name}` TO `{new_column_name}`"
|
||||
)
|
||||
_ADD_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` ADD {index_type}{unique}INDEX `{index_name}` ({column_names}){extra}"
|
||||
_DROP_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` DROP INDEX `{index_name}`"
|
||||
_ADD_INDEXED_UNIQUE_TEMPLATE = (
|
||||
"ALTER TABLE `{table_name}` DROP INDEX `{index_name}`, ADD UNIQUE (`{column_name}`)"
|
||||
)
|
||||
_DROP_INDEXED_UNIQUE_TEMPLATE = (
|
||||
"ALTER TABLE `{table_name}` DROP INDEX `{column_name}`, ADD INDEX (`{index_name}`)"
|
||||
)
|
||||
_ADD_FK_TEMPLATE = "ALTER TABLE `{table_name}` ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}"
|
||||
_DROP_FK_TEMPLATE = "ALTER TABLE `{table_name}` DROP FOREIGN KEY `{fk_name}`"
|
||||
_M2M_TABLE_TEMPLATE = (
|
||||
"CREATE TABLE `{table_name}` (\n"
|
||||
" `{backward_key}` {backward_type} NOT NULL REFERENCES `{backward_table}` (`{backward_field}`) ON DELETE CASCADE,\n"
|
||||
" `{forward_key}` {forward_type} NOT NULL REFERENCES `{forward_table}` (`{forward_field}`) ON DELETE CASCADE\n"
|
||||
"){extra}{comment}"
|
||||
)
|
||||
_MODIFY_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` MODIFY COLUMN {column}"
|
||||
_RENAME_TABLE_TEMPLATE = "ALTER TABLE `{old_table_name}` RENAME TO `{new_table_name}`"
|
||||
|
||||
def _index_name(self, unique: bool | None, model: type[Model], field_names: list[str]) -> str:
|
||||
if unique and len(field_names) == 1:
|
||||
# Example: `email = CharField(max_length=50, unique=True)`
|
||||
# Generate schema: `"email" VARCHAR(10) NOT NULL UNIQUE`
|
||||
# Unique index key is the same as field name: `email`
|
||||
return field_names[0]
|
||||
return super()._index_name(unique, model, field_names)
|
||||
|
||||
def alter_indexed_column_unique(
|
||||
self, model: type[Model], field_name: str, drop: bool = False
|
||||
) -> list[str]:
|
||||
# if drop is false: Drop index and add unique
|
||||
# else: Drop unique index and add normal index
|
||||
template = self._DROP_INDEXED_UNIQUE_TEMPLATE if drop else self._ADD_INDEXED_UNIQUE_TEMPLATE
|
||||
table = self.get_table_name(model)
|
||||
index = self._index_name(unique=False, model=model, field_names=[field_name])
|
||||
return [template.format(table_name=table, index_name=index, column_name=field_name)]
|
||||
|
@ -1,8 +1,53 @@
|
||||
from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import cast
|
||||
|
||||
from tortoise import Model
|
||||
from tortoise.backends.base_postgres.schema_generator import BasePostgresSchemaGenerator
|
||||
|
||||
from aerich.ddl import BaseDDL
|
||||
|
||||
|
||||
class PostgresDDL(BaseDDL):
|
||||
schema_generator_cls = AsyncpgSchemaGenerator
|
||||
DIALECT = AsyncpgSchemaGenerator.DIALECT
|
||||
schema_generator_cls = BasePostgresSchemaGenerator
|
||||
DIALECT = BasePostgresSchemaGenerator.DIALECT
|
||||
_ADD_INDEX_TEMPLATE = 'CREATE {unique}INDEX IF NOT EXISTS "{index_name}" ON "{table_name}" {index_type}({column_names}){extra}'
|
||||
_DROP_INDEX_TEMPLATE = 'DROP INDEX IF EXISTS "{index_name}"'
|
||||
_ALTER_NULL_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {set_drop} NOT NULL'
|
||||
_MODIFY_COLUMN_TEMPLATE = (
|
||||
'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" TYPE {datatype}{using}'
|
||||
)
|
||||
_SET_COMMENT_TEMPLATE = 'COMMENT ON COLUMN "{table_name}"."{column}" IS {comment}'
|
||||
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP CONSTRAINT IF EXISTS "{fk_name}"'
|
||||
|
||||
def alter_column_null(self, model: type[Model], field_describe: dict) -> str:
|
||||
db_table = model._meta.db_table
|
||||
return self._ALTER_NULL_TEMPLATE.format(
|
||||
table_name=db_table,
|
||||
column=field_describe.get("db_column"),
|
||||
set_drop="DROP" if field_describe.get("nullable") else "SET",
|
||||
)
|
||||
|
||||
def modify_column(self, model: type[Model], field_describe: dict, is_pk: bool = False) -> str:
|
||||
db_table = model._meta.db_table
|
||||
db_field_types = cast(dict, field_describe.get("db_field_types"))
|
||||
db_column = field_describe.get("db_column")
|
||||
datatype = db_field_types.get(self.DIALECT) or db_field_types.get("")
|
||||
return self._MODIFY_COLUMN_TEMPLATE.format(
|
||||
table_name=db_table,
|
||||
column=db_column,
|
||||
datatype=datatype,
|
||||
using=f' USING "{db_column}"::{datatype}',
|
||||
)
|
||||
|
||||
def set_comment(self, model: type[Model], field_describe: dict) -> str:
|
||||
db_table = model._meta.db_table
|
||||
return self._SET_COMMENT_TEMPLATE.format(
|
||||
table_name=db_table,
|
||||
column=field_describe.get("db_column") or field_describe.get("raw_field"),
|
||||
comment=(
|
||||
"'{}'".format(field_describe.get("description"))
|
||||
if field_describe.get("description")
|
||||
else "NULL"
|
||||
),
|
||||
)
|
||||
|
@ -1,8 +1,26 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from tortoise import Model
|
||||
from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator
|
||||
|
||||
from aerich.ddl import BaseDDL
|
||||
from aerich.exceptions import NotSupportError
|
||||
|
||||
|
||||
class SqliteDDL(BaseDDL):
|
||||
schema_generator_cls = SqliteSchemaGenerator
|
||||
DIALECT = SqliteSchemaGenerator.DIALECT
|
||||
_ADD_INDEX_TEMPLATE = 'CREATE {unique}INDEX "{index_name}" ON "{table_name}" ({column_names})'
|
||||
_DROP_INDEX_TEMPLATE = 'DROP INDEX IF EXISTS "{index_name}"'
|
||||
|
||||
def modify_column(self, model: type[Model], field_object: dict, is_pk: bool = True):
|
||||
raise NotSupportError("Modify column is unsupported in SQLite.")
|
||||
|
||||
def alter_column_default(self, model: type[Model], field_describe: dict):
|
||||
raise NotSupportError("Alter column default is unsupported in SQLite.")
|
||||
|
||||
def alter_column_null(self, model: type[Model], field_describe: dict):
|
||||
raise NotSupportError("Alter column null is unsupported in SQLite.")
|
||||
|
||||
def set_comment(self, model: type[Model], field_describe: dict):
|
||||
raise NotSupportError("Alter column comment is unsupported in SQLite.")
|
||||
|
7
aerich/enums.py
Normal file
7
aerich/enums.py
Normal file
@ -0,0 +1,7 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class Color(str, Enum):
|
||||
green = "green"
|
||||
red = "red"
|
||||
yellow = "yellow"
|
@ -1,6 +1,10 @@
|
||||
class ConfigurationError(Exception):
|
||||
class NotSupportError(Exception):
|
||||
"""
|
||||
config error
|
||||
raise when features not support
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
class DowngradeError(Exception):
|
||||
"""
|
||||
raise when downgrade error
|
||||
"""
|
||||
|
192
aerich/inspectdb/__init__.py
Normal file
192
aerich/inspectdb/__init__.py
Normal file
@ -0,0 +1,192 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
from typing import Any, Callable, Dict, TypedDict
|
||||
|
||||
from pydantic import BaseModel
|
||||
from tortoise import BaseDBAsyncClient
|
||||
|
||||
|
||||
class ColumnInfoDict(TypedDict):
|
||||
name: str
|
||||
pk: str
|
||||
index: str
|
||||
null: str
|
||||
default: str
|
||||
length: str
|
||||
comment: str
|
||||
|
||||
|
||||
# TODO: use dict to replace typing.Dict when dropping support for Python3.8
|
||||
FieldMapDict = Dict[str, Callable[..., str]]
|
||||
|
||||
|
||||
class Column(BaseModel):
|
||||
name: str
|
||||
data_type: str
|
||||
null: bool
|
||||
default: Any
|
||||
comment: str | None = None
|
||||
pk: bool
|
||||
unique: bool
|
||||
index: bool
|
||||
length: int | None = None
|
||||
extra: str | None = None
|
||||
decimal_places: int | None = None
|
||||
max_digits: int | None = None
|
||||
|
||||
def translate(self) -> ColumnInfoDict:
|
||||
comment = default = length = index = null = pk = ""
|
||||
if self.pk:
|
||||
pk = "primary_key=True, "
|
||||
else:
|
||||
if self.unique:
|
||||
index = "unique=True, "
|
||||
elif self.index:
|
||||
index = "db_index=True, "
|
||||
if self.data_type in ("varchar", "VARCHAR"):
|
||||
length = f"max_length={self.length}, "
|
||||
elif self.data_type in ("decimal", "numeric"):
|
||||
length_parts = []
|
||||
if self.max_digits:
|
||||
length_parts.append(f"max_digits={self.max_digits}")
|
||||
if self.decimal_places:
|
||||
length_parts.append(f"decimal_places={self.decimal_places}")
|
||||
if length_parts:
|
||||
length = ", ".join(length_parts) + ", "
|
||||
if self.null:
|
||||
null = "null=True, "
|
||||
if self.default is not None and not self.pk:
|
||||
if self.data_type in ("tinyint", "INT"):
|
||||
default = f"default={'True' if self.default == '1' else 'False'}, "
|
||||
elif self.data_type == "bool":
|
||||
default = f"default={'True' if self.default == 'true' else 'False'}, "
|
||||
elif self.data_type in ("datetime", "timestamptz", "TIMESTAMP"):
|
||||
if self.default == "CURRENT_TIMESTAMP":
|
||||
if self.extra == "DEFAULT_GENERATED on update CURRENT_TIMESTAMP":
|
||||
default = "auto_now=True, "
|
||||
else:
|
||||
default = "auto_now_add=True, "
|
||||
else:
|
||||
if "::" in self.default:
|
||||
default = f"default={self.default.split('::')[0]}, "
|
||||
elif self.default.endswith("()"):
|
||||
default = ""
|
||||
elif self.default == "":
|
||||
default = 'default=""'
|
||||
else:
|
||||
default = f"default={self.default}, "
|
||||
|
||||
if self.comment:
|
||||
comment = f"description='{self.comment}', "
|
||||
return {
|
||||
"name": self.name,
|
||||
"pk": pk,
|
||||
"index": index,
|
||||
"null": null,
|
||||
"default": default,
|
||||
"length": length,
|
||||
"comment": comment,
|
||||
}
|
||||
|
||||
|
||||
class Inspect:
|
||||
_table_template = "class {table}(Model):\n"
|
||||
|
||||
def __init__(self, conn: BaseDBAsyncClient, tables: list[str] | None = None) -> None:
|
||||
self.conn = conn
|
||||
with contextlib.suppress(AttributeError):
|
||||
self.database = conn.database # type:ignore[attr-defined]
|
||||
self.tables = tables
|
||||
|
||||
@property
|
||||
def field_map(self) -> FieldMapDict:
|
||||
raise NotImplementedError
|
||||
|
||||
async def inspect(self) -> str:
|
||||
if not self.tables:
|
||||
self.tables = await self.get_all_tables()
|
||||
result = "from tortoise import Model, fields\n\n\n"
|
||||
tables = []
|
||||
for table in self.tables:
|
||||
columns = await self.get_columns(table)
|
||||
fields = []
|
||||
model = self._table_template.format(table=table.title().replace("_", ""))
|
||||
for column in columns:
|
||||
field = self.field_map[column.data_type](**column.translate())
|
||||
fields.append(" " + field)
|
||||
tables.append(model + "\n".join(fields))
|
||||
return result + "\n\n\n".join(tables)
|
||||
|
||||
async def get_columns(self, table: str) -> list[Column]:
|
||||
raise NotImplementedError
|
||||
|
||||
async def get_all_tables(self) -> list[str]:
|
||||
raise NotImplementedError
|
||||
|
||||
@staticmethod
|
||||
def get_field_string(
|
||||
field_class: str, arguments: str = "{null}{default}{comment}", **kwargs
|
||||
) -> str:
|
||||
name = kwargs["name"]
|
||||
field_params = arguments.format(**kwargs).strip().rstrip(",")
|
||||
return f"{name} = fields.{field_class}({field_params})"
|
||||
|
||||
@classmethod
|
||||
def decimal_field(cls, **kwargs) -> str:
|
||||
return cls.get_field_string("DecimalField", **kwargs)
|
||||
|
||||
@classmethod
|
||||
def time_field(cls, **kwargs) -> str:
|
||||
return cls.get_field_string("TimeField", **kwargs)
|
||||
|
||||
@classmethod
|
||||
def date_field(cls, **kwargs) -> str:
|
||||
return cls.get_field_string("DateField", **kwargs)
|
||||
|
||||
@classmethod
|
||||
def float_field(cls, **kwargs) -> str:
|
||||
return cls.get_field_string("FloatField", **kwargs)
|
||||
|
||||
@classmethod
|
||||
def datetime_field(cls, **kwargs) -> str:
|
||||
return cls.get_field_string("DatetimeField", **kwargs)
|
||||
|
||||
@classmethod
|
||||
def text_field(cls, **kwargs) -> str:
|
||||
return cls.get_field_string("TextField", **kwargs)
|
||||
|
||||
@classmethod
|
||||
def char_field(cls, **kwargs) -> str:
|
||||
arguments = "{pk}{index}{length}{null}{default}{comment}"
|
||||
return cls.get_field_string("CharField", arguments, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def int_field(cls, field_class="IntField", **kwargs) -> str:
|
||||
arguments = "{pk}{index}{default}{comment}"
|
||||
return cls.get_field_string(field_class, arguments, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def smallint_field(cls, **kwargs) -> str:
|
||||
return cls.int_field("SmallIntField", **kwargs)
|
||||
|
||||
@classmethod
|
||||
def bigint_field(cls, **kwargs) -> str:
|
||||
return cls.int_field("BigIntField", **kwargs)
|
||||
|
||||
@classmethod
|
||||
def bool_field(cls, **kwargs) -> str:
|
||||
return cls.get_field_string("BooleanField", **kwargs)
|
||||
|
||||
@classmethod
|
||||
def uuid_field(cls, **kwargs) -> str:
|
||||
arguments = "{pk}{index}{default}{comment}"
|
||||
return cls.get_field_string("UUIDField", arguments, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def json_field(cls, **kwargs) -> str:
|
||||
return cls.get_field_string("JSONField", **kwargs)
|
||||
|
||||
@classmethod
|
||||
def binary_field(cls, **kwargs) -> str:
|
||||
return cls.get_field_string("BinaryField", **kwargs)
|
67
aerich/inspectdb/mysql.py
Normal file
67
aerich/inspectdb/mysql.py
Normal file
@ -0,0 +1,67 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from aerich.inspectdb import Column, FieldMapDict, Inspect
|
||||
|
||||
|
||||
class InspectMySQL(Inspect):
|
||||
@property
|
||||
def field_map(self) -> FieldMapDict:
|
||||
return {
|
||||
"int": self.int_field,
|
||||
"smallint": self.smallint_field,
|
||||
"tinyint": self.bool_field,
|
||||
"bigint": self.bigint_field,
|
||||
"varchar": self.char_field,
|
||||
"char": self.uuid_field,
|
||||
"longtext": self.text_field,
|
||||
"text": self.text_field,
|
||||
"datetime": self.datetime_field,
|
||||
"float": self.float_field,
|
||||
"double": self.float_field,
|
||||
"date": self.date_field,
|
||||
"time": self.time_field,
|
||||
"decimal": self.decimal_field,
|
||||
"json": self.json_field,
|
||||
"longblob": self.binary_field,
|
||||
}
|
||||
|
||||
async def get_all_tables(self) -> list[str]:
|
||||
sql = "select TABLE_NAME from information_schema.TABLES where TABLE_SCHEMA=%s"
|
||||
ret = await self.conn.execute_query_dict(sql, [self.database])
|
||||
return list(map(lambda x: x["TABLE_NAME"], ret))
|
||||
|
||||
async def get_columns(self, table: str) -> list[Column]:
|
||||
columns = []
|
||||
sql = """select c.*, s.NON_UNIQUE, s.INDEX_NAME
|
||||
from information_schema.COLUMNS c
|
||||
left join information_schema.STATISTICS s on c.TABLE_NAME = s.TABLE_NAME
|
||||
and c.TABLE_SCHEMA = s.TABLE_SCHEMA
|
||||
and c.COLUMN_NAME = s.COLUMN_NAME
|
||||
where c.TABLE_SCHEMA = %s
|
||||
and c.TABLE_NAME = %s"""
|
||||
ret = await self.conn.execute_query_dict(sql, [self.database, table])
|
||||
for row in ret:
|
||||
unique = index = False
|
||||
if (non_unique := row["NON_UNIQUE"]) is not None:
|
||||
unique = not non_unique
|
||||
elif row["COLUMN_KEY"] == "UNI":
|
||||
unique = True
|
||||
if (index_name := row["INDEX_NAME"]) is not None:
|
||||
index = index_name != "PRIMARY"
|
||||
columns.append(
|
||||
Column(
|
||||
name=row["COLUMN_NAME"],
|
||||
data_type=row["DATA_TYPE"],
|
||||
null=row["IS_NULLABLE"] == "YES",
|
||||
default=row["COLUMN_DEFAULT"],
|
||||
pk=row["COLUMN_KEY"] == "PRI",
|
||||
comment=row["COLUMN_COMMENT"],
|
||||
unique=unique,
|
||||
extra=row["EXTRA"],
|
||||
index=index,
|
||||
length=row["CHARACTER_MAXIMUM_LENGTH"],
|
||||
max_digits=row["NUMERIC_PRECISION"],
|
||||
decimal_places=row["NUMERIC_SCALE"],
|
||||
)
|
||||
)
|
||||
return columns
|
83
aerich/inspectdb/postgres.py
Normal file
83
aerich/inspectdb/postgres.py
Normal file
@ -0,0 +1,83 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from aerich.inspectdb import Column, FieldMapDict, Inspect
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from tortoise.backends.base_postgres.client import BasePostgresClient
|
||||
|
||||
|
||||
class InspectPostgres(Inspect):
|
||||
def __init__(self, conn: BasePostgresClient, tables: list[str] | None = None) -> None:
|
||||
super().__init__(conn, tables)
|
||||
self.schema = conn.server_settings.get("schema") or "public"
|
||||
|
||||
@property
|
||||
def field_map(self) -> FieldMapDict:
|
||||
return {
|
||||
"int2": self.smallint_field,
|
||||
"int4": self.int_field,
|
||||
"int8": self.bigint_field,
|
||||
"smallint": self.smallint_field,
|
||||
"bigint": self.bigint_field,
|
||||
"varchar": self.char_field,
|
||||
"text": self.text_field,
|
||||
"timestamptz": self.datetime_field,
|
||||
"float4": self.float_field,
|
||||
"float8": self.float_field,
|
||||
"date": self.date_field,
|
||||
"time": self.time_field,
|
||||
"decimal": self.decimal_field,
|
||||
"numeric": self.decimal_field,
|
||||
"uuid": self.uuid_field,
|
||||
"jsonb": self.json_field,
|
||||
"bytea": self.binary_field,
|
||||
"bool": self.bool_field,
|
||||
"timestamp": self.datetime_field,
|
||||
}
|
||||
|
||||
async def get_all_tables(self) -> list[str]:
|
||||
sql = "select TABLE_NAME from information_schema.TABLES where table_catalog=$1 and table_schema=$2"
|
||||
ret = await self.conn.execute_query_dict(sql, [self.database, self.schema])
|
||||
return list(map(lambda x: x["table_name"], ret))
|
||||
|
||||
async def get_columns(self, table: str) -> list[Column]:
|
||||
columns = []
|
||||
sql = f"""select c.column_name,
|
||||
col_description('public.{table}'::regclass, ordinal_position) as column_comment,
|
||||
t.constraint_type as column_key,
|
||||
udt_name as data_type,
|
||||
is_nullable,
|
||||
column_default,
|
||||
character_maximum_length,
|
||||
numeric_precision,
|
||||
numeric_scale
|
||||
from information_schema.constraint_column_usage const
|
||||
join information_schema.table_constraints t
|
||||
using (table_catalog, table_schema, table_name, constraint_catalog, constraint_schema, constraint_name)
|
||||
right join information_schema.columns c using (column_name, table_catalog, table_schema, table_name)
|
||||
where c.table_catalog = $1
|
||||
and c.table_name = $2
|
||||
and c.table_schema = $3""" # nosec:B608
|
||||
if "psycopg" in str(type(self.conn)).lower():
|
||||
sql = re.sub(r"\$[123]", "%s", sql)
|
||||
ret = await self.conn.execute_query_dict(sql, [self.database, table, self.schema])
|
||||
for row in ret:
|
||||
columns.append(
|
||||
Column(
|
||||
name=row["column_name"],
|
||||
data_type=row["data_type"],
|
||||
null=row["is_nullable"] == "YES",
|
||||
default=row["column_default"],
|
||||
length=row["character_maximum_length"],
|
||||
max_digits=row["numeric_precision"],
|
||||
decimal_places=row["numeric_scale"],
|
||||
comment=row["column_comment"],
|
||||
pk=row["column_key"] == "PRIMARY KEY",
|
||||
unique=False, # can't get this simply
|
||||
index=False, # can't get this simply
|
||||
)
|
||||
)
|
||||
return columns
|
61
aerich/inspectdb/sqlite.py
Normal file
61
aerich/inspectdb/sqlite.py
Normal file
@ -0,0 +1,61 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from aerich.inspectdb import Column, FieldMapDict, Inspect
|
||||
|
||||
|
||||
class InspectSQLite(Inspect):
|
||||
@property
|
||||
def field_map(self) -> FieldMapDict:
|
||||
return {
|
||||
"INTEGER": self.int_field,
|
||||
"INT": self.bool_field,
|
||||
"SMALLINT": self.smallint_field,
|
||||
"VARCHAR": self.char_field,
|
||||
"TEXT": self.text_field,
|
||||
"TIMESTAMP": self.datetime_field,
|
||||
"REAL": self.float_field,
|
||||
"BIGINT": self.bigint_field,
|
||||
"DATE": self.date_field,
|
||||
"TIME": self.time_field,
|
||||
"JSON": self.json_field,
|
||||
"BLOB": self.binary_field,
|
||||
}
|
||||
|
||||
async def get_columns(self, table: str) -> list[Column]:
|
||||
columns = []
|
||||
sql = f"PRAGMA table_info({table})"
|
||||
ret = await self.conn.execute_query_dict(sql)
|
||||
columns_index = await self._get_columns_index(table)
|
||||
for row in ret:
|
||||
try:
|
||||
length = row["type"].split("(")[1].split(")")[0]
|
||||
except IndexError:
|
||||
length = None
|
||||
columns.append(
|
||||
Column(
|
||||
name=row["name"],
|
||||
data_type=row["type"].split("(")[0],
|
||||
null=row["notnull"] == 0,
|
||||
default=row["dflt_value"],
|
||||
length=length,
|
||||
pk=row["pk"] == 1,
|
||||
unique=columns_index.get(row["name"]) == "unique",
|
||||
index=columns_index.get(row["name"]) == "index",
|
||||
)
|
||||
)
|
||||
return columns
|
||||
|
||||
async def _get_columns_index(self, table: str) -> dict[str, str]:
|
||||
sql = f"PRAGMA index_list ({table})"
|
||||
indexes = await self.conn.execute_query_dict(sql)
|
||||
ret = {}
|
||||
for index in indexes:
|
||||
sql = f"PRAGMA index_info({index['name']})"
|
||||
index_info = (await self.conn.execute_query_dict(sql))[0]
|
||||
ret[index_info["name"]] = "unique" if index["unique"] else "index"
|
||||
return ret
|
||||
|
||||
async def get_all_tables(self) -> list[str]:
|
||||
sql = "select tbl_name from sqlite_master where type='table' and name!='sqlite_sequence'"
|
||||
ret = await self.conn.execute_query_dict(sql)
|
||||
return list(map(lambda x: x["tbl_name"], ret))
|
1020
aerich/migrate.py
1020
aerich/migrate.py
File diff suppressed because it is too large
Load Diff
@ -1,9 +1,15 @@
|
||||
from tortoise import Model, fields
|
||||
|
||||
from aerich.coder import decoder, encoder
|
||||
|
||||
MAX_VERSION_LENGTH = 255
|
||||
MAX_APP_LENGTH = 100
|
||||
|
||||
|
||||
class Aerich(Model):
|
||||
version = fields.CharField(max_length=50)
|
||||
app = fields.CharField(max_length=20)
|
||||
version = fields.CharField(max_length=MAX_VERSION_LENGTH)
|
||||
app = fields.CharField(max_length=MAX_APP_LENGTH)
|
||||
content: dict = fields.JSONField(encoder=encoder, decoder=decoder)
|
||||
|
||||
class Meta:
|
||||
ordering = ["-id"]
|
||||
|
116
aerich/utils.py
116
aerich/utils.py
@ -1,25 +1,52 @@
|
||||
import importlib
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncclick import BadOptionUsage, Context
|
||||
import importlib.util
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
from types import ModuleType
|
||||
|
||||
from asyncclick import BadOptionUsage, ClickException, Context
|
||||
from dictdiffer import diff
|
||||
from tortoise import BaseDBAsyncClient, Tortoise
|
||||
|
||||
|
||||
def get_app_connection_name(config, app) -> str:
|
||||
def add_src_path(path: str) -> str:
|
||||
"""
|
||||
add a folder to the paths, so we can import from there
|
||||
:param path: path to add
|
||||
:return: absolute path
|
||||
"""
|
||||
if not os.path.isabs(path):
|
||||
# use the absolute path, otherwise some other things (e.g. __file__) won't work properly
|
||||
path = os.path.abspath(path)
|
||||
if not os.path.isdir(path):
|
||||
raise ClickException(f"Specified source folder does not exist: {path}")
|
||||
if path not in sys.path:
|
||||
sys.path.insert(0, path)
|
||||
return path
|
||||
|
||||
|
||||
def get_app_connection_name(config, app_name: str) -> str:
|
||||
"""
|
||||
get connection name
|
||||
:param config:
|
||||
:param app:
|
||||
:return:
|
||||
:param app_name:
|
||||
:return: the default connection name (Usally it is 'default')
|
||||
"""
|
||||
return config.get("apps").get(app).get("default_connection")
|
||||
if app := config.get("apps").get(app_name):
|
||||
return app.get("default_connection", "default")
|
||||
raise BadOptionUsage(option_name="--app", message=f"Can't get app named {app_name!r}")
|
||||
|
||||
|
||||
def get_app_connection(config, app) -> BaseDBAsyncClient:
|
||||
"""
|
||||
get connection name
|
||||
get connection client
|
||||
:param config:
|
||||
:param app:
|
||||
:return:
|
||||
:return: client instance
|
||||
"""
|
||||
return Tortoise.get_connection(get_app_connection_name(config, app))
|
||||
|
||||
@ -34,12 +61,11 @@ def get_tortoise_config(ctx: Context, tortoise_orm: str) -> dict:
|
||||
splits = tortoise_orm.split(".")
|
||||
config_path = ".".join(splits[:-1])
|
||||
tortoise_config = splits[-1]
|
||||
|
||||
try:
|
||||
config_module = importlib.import_module(config_path)
|
||||
except (ModuleNotFoundError, AttributeError):
|
||||
raise BadOptionUsage(
|
||||
ctx=ctx, message=f'No config named "{config_path}"', option_name="--config"
|
||||
)
|
||||
except ModuleNotFoundError as e:
|
||||
raise ClickException(f"Error while importing configuration module: {e}") from None
|
||||
|
||||
config = getattr(config_module, tortoise_config, None)
|
||||
if not config:
|
||||
@ -49,3 +75,69 @@ def get_tortoise_config(ctx: Context, tortoise_orm: str) -> dict:
|
||||
ctx=ctx,
|
||||
)
|
||||
return config
|
||||
|
||||
|
||||
def get_models_describe(app: str) -> dict:
|
||||
"""
|
||||
get app models describe
|
||||
:param app:
|
||||
:return:
|
||||
"""
|
||||
ret = {}
|
||||
for model in Tortoise.apps[app].values():
|
||||
managed = getattr(model.Meta, "managed", None)
|
||||
describe = model.describe()
|
||||
ret[describe.get("name")] = dict(describe, managed=managed)
|
||||
return ret
|
||||
|
||||
|
||||
def is_default_function(string: str) -> re.Match | None:
|
||||
return re.match(r"^<function.+>$", str(string or ""))
|
||||
|
||||
|
||||
def import_py_file(file: str | Path) -> ModuleType:
|
||||
module_name, file_ext = os.path.splitext(os.path.split(file)[-1])
|
||||
spec = importlib.util.spec_from_file_location(module_name, file)
|
||||
module = importlib.util.module_from_spec(spec) # type:ignore[arg-type]
|
||||
spec.loader.exec_module(module) # type:ignore[union-attr]
|
||||
return module
|
||||
|
||||
|
||||
def get_dict_diff_by_key(
|
||||
old_fields: list[dict], new_fields: list[dict], key="through"
|
||||
) -> Generator[tuple]:
|
||||
"""
|
||||
Compare two list by key instead of by index
|
||||
|
||||
:param old_fields: previous field info list
|
||||
:param new_fields: current field info list
|
||||
:param key: if two dicts have the same value of this key, action is change; otherwise, is remove/add
|
||||
:return: similar to dictdiffer.diff
|
||||
|
||||
Example::
|
||||
|
||||
>>> old = [{'through': 'a'}, {'through': 'b'}, {'through': 'c'}]
|
||||
>>> new = [{'through': 'a'}, {'through': 'c'}] # remove the second element
|
||||
>>> list(diff(old, new))
|
||||
[('change', [1, 'through'], ('b', 'c')),
|
||||
('remove', '', [(2, {'through': 'c'})])]
|
||||
>>> list(get_dict_diff_by_key(old, new))
|
||||
[('remove', '', [(0, {'through': 'b'})])]
|
||||
|
||||
"""
|
||||
length_old, length_new = len(old_fields), len(new_fields)
|
||||
if length_old == 0 or length_new == 0 or length_old == length_new == 1:
|
||||
yield from diff(old_fields, new_fields)
|
||||
else:
|
||||
value_index: dict[str, int] = {f[key]: i for i, f in enumerate(new_fields)}
|
||||
additions = set(range(length_new))
|
||||
for field in old_fields:
|
||||
value = field[key]
|
||||
if (index := value_index.get(value)) is not None:
|
||||
additions.remove(index)
|
||||
yield from diff([field], [new_fields[index]]) # change
|
||||
else:
|
||||
yield from diff([field], []) # remove
|
||||
if additions:
|
||||
for index in sorted(additions):
|
||||
yield from diff([], [new_fields[index]]) # add
|
||||
|
3
aerich/version.py
Normal file
3
aerich/version.py
Normal file
@ -0,0 +1,3 @@
|
||||
from importlib.metadata import version
|
||||
|
||||
__version__ = version(__package__)
|
98
conftest.py
98
conftest.py
@ -1,11 +1,97 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from tortoise.contrib.test import finalizer, initializer
|
||||
from tortoise import Tortoise, expand_db_url
|
||||
from tortoise.backends.base_postgres.schema_generator import BasePostgresSchemaGenerator
|
||||
from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator
|
||||
from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator
|
||||
from tortoise.contrib.test import MEMORY_SQLITE
|
||||
|
||||
from aerich.ddl.mysql import MysqlDDL
|
||||
from aerich.ddl.postgres import PostgresDDL
|
||||
from aerich.ddl.sqlite import SqliteDDL
|
||||
from aerich.migrate import Migrate
|
||||
from tests._utils import chdir, copy_files, init_db, run_shell
|
||||
|
||||
db_url = os.getenv("TEST_DB", MEMORY_SQLITE)
|
||||
db_url_second = os.getenv("TEST_DB_SECOND", MEMORY_SQLITE)
|
||||
tortoise_orm = {
|
||||
"connections": {
|
||||
"default": expand_db_url(db_url, testing=True),
|
||||
"second": expand_db_url(db_url_second, testing=True),
|
||||
},
|
||||
"apps": {
|
||||
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"},
|
||||
"models_second": {"models": ["tests.models_second"], "default_connection": "second"},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
def initialize_tests(request):
|
||||
db_url = os.getenv("TEST_DB", "sqlite://:memory:")
|
||||
initializer(["tests.models"], db_url=db_url)
|
||||
request.addfinalizer(finalizer)
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def reset_migrate() -> None:
|
||||
Migrate.upgrade_operators = []
|
||||
Migrate.downgrade_operators = []
|
||||
Migrate._upgrade_fk_m2m_index_operators = []
|
||||
Migrate._downgrade_fk_m2m_index_operators = []
|
||||
Migrate._upgrade_m2m = []
|
||||
Migrate._downgrade_m2m = []
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def event_loop() -> Generator:
|
||||
policy = asyncio.get_event_loop_policy()
|
||||
res = policy.new_event_loop()
|
||||
asyncio.set_event_loop(res)
|
||||
res._close = res.close # type:ignore[attr-defined]
|
||||
res.close = lambda: None # type:ignore[method-assign]
|
||||
|
||||
yield res
|
||||
|
||||
res._close() # type:ignore[attr-defined]
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
async def initialize_tests(event_loop, request) -> None:
|
||||
await init_db(tortoise_orm)
|
||||
client = Tortoise.get_connection("default")
|
||||
if client.schema_generator is MySQLSchemaGenerator:
|
||||
Migrate.ddl = MysqlDDL(client)
|
||||
elif client.schema_generator is SqliteSchemaGenerator:
|
||||
Migrate.ddl = SqliteDDL(client)
|
||||
elif issubclass(client.schema_generator, BasePostgresSchemaGenerator):
|
||||
Migrate.ddl = PostgresDDL(client)
|
||||
Migrate.dialect = Migrate.ddl.DIALECT
|
||||
request.addfinalizer(lambda: event_loop.run_until_complete(Tortoise._drop_databases()))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def new_aerich_project(tmp_path: Path):
|
||||
test_dir = Path(__file__).parent / "tests"
|
||||
asset_dir = test_dir / "assets" / "fake"
|
||||
settings_py = asset_dir / "settings.py"
|
||||
_tests_py = asset_dir / "_tests.py"
|
||||
db_py = asset_dir / "db.py"
|
||||
models_py = test_dir / "models.py"
|
||||
models_second_py = test_dir / "models_second.py"
|
||||
copy_files(settings_py, _tests_py, models_py, models_second_py, db_py, target_dir=tmp_path)
|
||||
dst_dir = tmp_path / "tests"
|
||||
dst_dir.mkdir()
|
||||
dst_dir.joinpath("__init__.py").touch()
|
||||
copy_files(test_dir / "_utils.py", test_dir / "indexes.py", target_dir=dst_dir)
|
||||
if should_remove := str(tmp_path) not in sys.path:
|
||||
sys.path.append(str(tmp_path))
|
||||
with chdir(tmp_path):
|
||||
run_shell("python db.py create", capture_output=False)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if not os.getenv("AERICH_DONT_DROP_FAKE_DB"):
|
||||
run_shell("python db.py drop", capture_output=False)
|
||||
if should_remove:
|
||||
sys.path.remove(str(tmp_path))
|
||||
|
2434
poetry.lock
generated
2434
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
132
pyproject.toml
132
pyproject.toml
@ -1,32 +1,116 @@
|
||||
[tool.poetry]
|
||||
[project]
|
||||
name = "aerich"
|
||||
version = "0.1.6"
|
||||
version = "0.8.2"
|
||||
description = "A database migrations tool for Tortoise ORM."
|
||||
authors = ["long2ice <long2ice@gmail.com>"]
|
||||
authors = [{name="long2ice", email="long2ice@gmail.com>"}]
|
||||
license = { text = "Apache-2.0" }
|
||||
readme = "README.md"
|
||||
keywords = ["migrate", "Tortoise-ORM", "mysql"]
|
||||
packages = [{ include = "aerich" }]
|
||||
include = ["CHANGELOG.md", "LICENSE", "README.md"]
|
||||
requires-python = ">=3.8"
|
||||
dependencies = [
|
||||
"tortoise-orm (>=0.21.0,<1.0.0); python_version < '4.0'",
|
||||
"pydantic (>=2.0.2,!=2.1.0,!=2.7.0,<3.0.0)",
|
||||
"dictdiffer (>=0.9.0,<1.0.0)",
|
||||
"asyncclick (>=8.1.7,<9.0.0)",
|
||||
"eval-type-backport (>=0.2.2,<1.0.0); python_version < '3.10'",
|
||||
]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8"
|
||||
tortoise-orm = {git = "https://github.com/tortoise-orm/tortoise-orm.git", branch = "develop"}
|
||||
asyncclick = "*"
|
||||
pydantic = "*"
|
||||
[project.optional-dependencies]
|
||||
toml = [
|
||||
"tomli-w (>=1.1.0,<2.0.0); python_version >= '3.11'",
|
||||
"tomlkit (>=0.11.4,<1.0.0); python_version < '3.11'",
|
||||
]
|
||||
# Need asyncpg or psyncopg for PostgreSQL
|
||||
asyncpg = ["asyncpg"]
|
||||
psycopg = ["psycopg[pool,binary] (>=3.0.12,<4.0.0)"]
|
||||
# Need asyncmy or aiomysql for MySQL
|
||||
asyncmy = ["asyncmy>=0.2.9; python_version < '4.0'"]
|
||||
mysql = ["aiomysql>=0.2.0"]
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
taskipy = "*"
|
||||
asynctest = "*"
|
||||
flake8 = "*"
|
||||
isort = "*"
|
||||
black = "^19.10b0"
|
||||
pytest = "*"
|
||||
aiomysql = "*"
|
||||
asyncpg = "*"
|
||||
pytest-xdist = "*"
|
||||
mypy = "*"
|
||||
[project.urls]
|
||||
homepage = "https://github.com/tortoise/aerich"
|
||||
repository = "https://github.com/tortoise/aerich.git"
|
||||
documentation = "https://github.com/tortoise/aerich"
|
||||
|
||||
[tool.taskipy.tasks]
|
||||
export = "poetry export -f requirements.txt --without-hashes > requirements.txt"
|
||||
export-dev = "poetry export -f requirements.txt --dev --without-hashes > requirements-dev.txt"
|
||||
[project.scripts]
|
||||
aerich = "aerich.cli:main"
|
||||
|
||||
[tool.poetry]
|
||||
requires-poetry = ">=2.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
ruff = "^0.9.0"
|
||||
bandit = "^1.7.0"
|
||||
mypy = "^1.10.0"
|
||||
twine = "^6.1.0"
|
||||
|
||||
[tool.poetry.group.test.dependencies]
|
||||
pytest = "^8.3.0"
|
||||
pytest-mock = "^3.14.0"
|
||||
pytest-xdist = "^3.6.0"
|
||||
# Breaking change in 0.23.*
|
||||
# https://github.com/pytest-dev/pytest-asyncio/issues/706
|
||||
pytest-asyncio = "^0.21.2"
|
||||
# required for sha256_password by asyncmy
|
||||
cryptography = {version="^44.0.1", python="!=3.9.0,!=3.9.1"}
|
||||
|
||||
[tool.aerich]
|
||||
tortoise_orm = "conftest.tortoise_orm"
|
||||
location = "./migrations"
|
||||
src_folder = "./."
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry>=0.12"]
|
||||
build-backend = "poetry.masonry.api"
|
||||
requires = ["poetry-core>=2.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
asyncio_mode = 'auto'
|
||||
|
||||
[tool.coverage.run]
|
||||
branch = true
|
||||
source = ["aerich"]
|
||||
|
||||
[tool.coverage.report]
|
||||
exclude_also = [
|
||||
"if TYPE_CHECKING:"
|
||||
]
|
||||
|
||||
[tool.mypy]
|
||||
pretty = true
|
||||
python_version = "3.8"
|
||||
check_untyped_defs = true
|
||||
warn_unused_ignores = true
|
||||
disallow_incomplete_defs = false
|
||||
exclude = ["tests/assets", "migrations"]
|
||||
|
||||
[[tool.mypy.overrides]]
|
||||
module = [
|
||||
'dictdiffer.*',
|
||||
'tomlkit',
|
||||
'tomli_w',
|
||||
'tomli',
|
||||
]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 100
|
||||
|
||||
[tool.ruff.lint]
|
||||
extend-select = [
|
||||
"I", # https://docs.astral.sh/ruff/rules/#isort-i
|
||||
"SIM", # https://docs.astral.sh/ruff/rules/#flake8-simplify-sim
|
||||
"FA", # https://docs.astral.sh/ruff/rules/#flake8-future-annotations-fa
|
||||
"UP", # https://docs.astral.sh/ruff/rules/#pyupgrade-up
|
||||
"RUF100", # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf
|
||||
]
|
||||
ignore = ["UP031"] # https://docs.astral.sh/ruff/rules/printf-string-formatting/
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
# TODO: Remove this line when dropping support for Python3.8
|
||||
"aerich/inspectdb/__init__.py" = ["UP006", "UP035"]
|
||||
"aerich/_compat.py" = ["F401"]
|
||||
|
||||
[tool.bandit]
|
||||
exclude_dirs = ["tests", "conftest.py"]
|
||||
|
@ -1,2 +0,0 @@
|
||||
[pytest]
|
||||
addopts = -p no:warnings --ignore=src
|
@ -1,48 +0,0 @@
|
||||
aiomysql==0.0.20
|
||||
aiosqlite==0.13.0
|
||||
anyio==1.3.0
|
||||
apipkg==1.5
|
||||
appdirs==1.4.4
|
||||
async-generator==1.10
|
||||
asyncclick==7.0.9
|
||||
asyncpg==0.20.1
|
||||
asynctest==0.13.0
|
||||
atomicwrites==1.4.0; sys_platform == "win32"
|
||||
attrs==19.3.0
|
||||
black==19.10b0
|
||||
cffi==1.14.0
|
||||
ciso8601==2.1.3; sys_platform != "win32" and implementation_name == "cpython"
|
||||
click==7.1.2
|
||||
colorama==0.4.3; sys_platform == "win32"
|
||||
cryptography==2.9.2
|
||||
execnet==1.7.1
|
||||
flake8==3.8.1
|
||||
iso8601==0.1.12; sys_platform == "win32" or implementation_name != "cpython"
|
||||
isort==4.3.21
|
||||
mccabe==0.6.1
|
||||
more-itertools==8.3.0
|
||||
mypy==0.770
|
||||
mypy-extensions==0.4.3
|
||||
packaging==20.4
|
||||
pathspec==0.8.0
|
||||
pluggy==0.13.1
|
||||
py==1.8.1
|
||||
pycodestyle==2.6.0
|
||||
pycparser==2.20
|
||||
pydantic==1.5.1
|
||||
pyflakes==2.2.0
|
||||
pymysql==0.9.2
|
||||
pyparsing==2.4.7
|
||||
pypika==0.37.6
|
||||
pytest==5.4.2
|
||||
pytest-forked==1.1.3
|
||||
pytest-xdist==1.32.0
|
||||
regex==2020.5.14
|
||||
six==1.14.0
|
||||
sniffio==1.1.0
|
||||
taskipy==1.2.1
|
||||
toml==0.10.1
|
||||
-e git+https://github.com/long2ice/tortoise-orm.git@1f67b7a0ca1384365d6ff89d9e245e733166d1a6#egg=tortoise-orm
|
||||
typed-ast==1.4.1
|
||||
typing-extensions==3.7.4.2
|
||||
wcwidth==0.1.9
|
@ -1,10 +0,0 @@
|
||||
aiosqlite==0.13.0
|
||||
anyio==1.3.0
|
||||
async-generator==1.10
|
||||
asyncclick==7.0.9
|
||||
ciso8601==2.1.3; sys_platform != "win32" and implementation_name == "cpython"
|
||||
iso8601==0.1.12; sys_platform == "win32" or implementation_name != "cpython"
|
||||
pydantic==1.5.1
|
||||
pypika==0.37.6
|
||||
sniffio==1.1.0
|
||||
typing-extensions==3.7.4.2
|
47
setup.cfg
47
setup.cfg
@ -1,47 +0,0 @@
|
||||
[flake8]
|
||||
max-line-length = 100
|
||||
exclude =
|
||||
ignore = E501,W503,DAR101,DAR201,DAR402
|
||||
|
||||
[darglint]
|
||||
docstring_style=sphinx
|
||||
|
||||
[isort]
|
||||
not_skip=__init__.py
|
||||
multi_line_output=3
|
||||
include_trailing_comma=True
|
||||
force_grid_wrap=0
|
||||
use_parentheses=True
|
||||
line_length=100
|
||||
|
||||
[tool:pytest]
|
||||
addopts = -n auto --tb=native -q
|
||||
|
||||
[mypy]
|
||||
pretty = True
|
||||
ignore_missing_imports = True
|
||||
check_untyped_defs = True
|
||||
disallow_subclassing_any = True
|
||||
disallow_untyped_calls = True
|
||||
disallow_untyped_defs = False
|
||||
disallow_incomplete_defs = False
|
||||
disallow_untyped_decorators = True
|
||||
no_implicit_optional = True
|
||||
warn_redundant_casts = True
|
||||
warn_unused_ignores = True
|
||||
warn_no_return = True
|
||||
warn_return_any = False
|
||||
warn_unused_configs = True
|
||||
warn_unreachable = True
|
||||
allow_redefinition = True
|
||||
strict_equality = True
|
||||
show_error_context = True
|
||||
|
||||
[mypy-tests.*]
|
||||
check_untyped_defs = False
|
||||
disallow_untyped_defs = False
|
||||
disallow_incomplete_defs = False
|
||||
warn_unreachable = False
|
||||
|
||||
[mypy-conftest]
|
||||
disallow_untyped_defs = False
|
44
setup.py
44
setup.py
@ -1,44 +0,0 @@
|
||||
import os
|
||||
import re
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
|
||||
def version():
|
||||
ver_str_line = open('aerich/__init__.py', 'rt').read()
|
||||
mob = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", ver_str_line, re.M)
|
||||
if not mob:
|
||||
raise RuntimeError("Unable to find version string")
|
||||
return mob.group(1)
|
||||
|
||||
|
||||
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as f:
|
||||
long_description = f.read()
|
||||
|
||||
|
||||
def requirements():
|
||||
return open('requirements.txt', 'rt').read().splitlines()
|
||||
|
||||
|
||||
setup(
|
||||
name='aerich',
|
||||
version=version(),
|
||||
description='A database migrations tool for Tortoise-ORM.',
|
||||
author='long2ice',
|
||||
long_description_content_type='text/x-rst',
|
||||
long_description=long_description,
|
||||
author_email='long2ice@gmail.com',
|
||||
url='https://github.com/long2ice/aerich',
|
||||
license='MIT License',
|
||||
packages=find_packages(include=['aerich*']),
|
||||
include_package_data=True,
|
||||
zip_safe=True,
|
||||
entry_points={
|
||||
'console_scripts': ['aerich = aerich.cli:main'],
|
||||
},
|
||||
platforms='any',
|
||||
keywords=(
|
||||
'migrate Tortoise-ORM mysql'
|
||||
),
|
||||
dependency_links=['https://github.com/tortoise-orm/tortoise-orm.git@develop#egg=tortoise-orm'],
|
||||
install_requires=requirements(),
|
||||
)
|
@ -1,6 +0,0 @@
|
||||
TORTOISE_ORM = {
|
||||
"connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"},
|
||||
"apps": {
|
||||
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"}
|
||||
},
|
||||
}
|
87
tests/_utils.py
Normal file
87
tests/_utils.py
Normal file
@ -0,0 +1,87 @@
|
||||
import contextlib
|
||||
import os
|
||||
import platform
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from tortoise import Tortoise, generate_schema_for_client
|
||||
from tortoise.exceptions import DBConnectionError, OperationalError
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
from contextlib import chdir
|
||||
else:
|
||||
|
||||
class chdir(contextlib.AbstractContextManager): # Copied from source code of Python3.13
|
||||
"""Non thread-safe context manager to change the current working directory."""
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
self._old_cwd = []
|
||||
|
||||
def __enter__(self):
|
||||
self._old_cwd.append(os.getcwd())
|
||||
os.chdir(self.path)
|
||||
|
||||
def __exit__(self, *excinfo):
|
||||
os.chdir(self._old_cwd.pop())
|
||||
|
||||
|
||||
async def drop_db(tortoise_orm) -> None:
|
||||
# Placing init outside the try-block(suppress) since it doesn't
|
||||
# establish connections to the DB eagerly.
|
||||
await Tortoise.init(config=tortoise_orm)
|
||||
with contextlib.suppress(DBConnectionError, OperationalError):
|
||||
await Tortoise._drop_databases()
|
||||
|
||||
|
||||
async def init_db(tortoise_orm, generate_schemas=True) -> None:
|
||||
await drop_db(tortoise_orm)
|
||||
await Tortoise.init(config=tortoise_orm, _create_db=True)
|
||||
if generate_schemas:
|
||||
await generate_schema_for_client(Tortoise.get_connection("default"), safe=True)
|
||||
|
||||
|
||||
def copy_files(*src_files: Path, target_dir: Path) -> None:
|
||||
for src in src_files:
|
||||
shutil.copy(src, target_dir)
|
||||
|
||||
|
||||
class Dialect:
|
||||
test_db_url: str
|
||||
|
||||
@classmethod
|
||||
def load_env(cls) -> None:
|
||||
if getattr(cls, "test_db_url", None) is None:
|
||||
cls.test_db_url = os.getenv("TEST_DB", "")
|
||||
|
||||
@classmethod
|
||||
def is_postgres(cls) -> bool:
|
||||
cls.load_env()
|
||||
return "postgres" in cls.test_db_url
|
||||
|
||||
@classmethod
|
||||
def is_mysql(cls) -> bool:
|
||||
cls.load_env()
|
||||
return "mysql" in cls.test_db_url
|
||||
|
||||
@classmethod
|
||||
def is_sqlite(cls) -> bool:
|
||||
cls.load_env()
|
||||
return not cls.test_db_url or "sqlite" in cls.test_db_url
|
||||
|
||||
|
||||
WINDOWS = platform.system() == "Windows"
|
||||
|
||||
|
||||
def run_shell(command: str, capture_output=True, **kw) -> str:
|
||||
if WINDOWS and command.startswith("aerich "):
|
||||
command = "python -m " + command
|
||||
r = subprocess.run(shlex.split(command), capture_output=capture_output)
|
||||
if r.returncode != 0 and r.stderr:
|
||||
return r.stderr.decode()
|
||||
if not r.stdout:
|
||||
return ""
|
||||
return r.stdout.decode()
|
80
tests/assets/fake/_tests.py
Normal file
80
tests/assets/fake/_tests.py
Normal file
@ -0,0 +1,80 @@
|
||||
import pytest
|
||||
from models import NewModel
|
||||
from models_second import Config
|
||||
from settings import TORTOISE_ORM
|
||||
from tortoise import Tortoise
|
||||
from tortoise.exceptions import OperationalError
|
||||
|
||||
try:
|
||||
# This error does not translate to tortoise's OperationalError
|
||||
from psycopg.errors import UndefinedColumn
|
||||
except ImportError:
|
||||
errors = (OperationalError,)
|
||||
else:
|
||||
errors = (OperationalError, UndefinedColumn)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def anyio_backend() -> str:
|
||||
return "asyncio"
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def init_connections():
|
||||
await Tortoise.init(TORTOISE_ORM)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
await Tortoise.close_connections()
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_init_db():
|
||||
m1 = await NewModel.filter(name="")
|
||||
assert isinstance(m1, list)
|
||||
m2 = await Config.filter(key="")
|
||||
assert isinstance(m2, list)
|
||||
await NewModel.create(name="")
|
||||
await Config.create(key="", label="", value={})
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_fake_field_1():
|
||||
assert "field_1" in NewModel._meta.fields_map
|
||||
assert "field_1" in Config._meta.fields_map
|
||||
with pytest.raises(errors):
|
||||
await NewModel.create(name="", field_1=1)
|
||||
with pytest.raises(errors):
|
||||
await Config.create(key="", label="", value={}, field_1=1)
|
||||
|
||||
obj1 = NewModel(name="", field_1=1)
|
||||
with pytest.raises(errors):
|
||||
await obj1.save()
|
||||
obj1 = NewModel(name="")
|
||||
with pytest.raises(errors):
|
||||
await obj1.save()
|
||||
with pytest.raises(errors):
|
||||
obj1 = await NewModel.first()
|
||||
obj1 = await NewModel.all().first().values("id", "name")
|
||||
assert obj1 and obj1["id"]
|
||||
|
||||
obj2 = Config(key="", label="", value={}, field_1=1)
|
||||
with pytest.raises(errors):
|
||||
await obj2.save()
|
||||
obj2 = Config(key="", label="", value={})
|
||||
with pytest.raises(errors):
|
||||
await obj2.save()
|
||||
with pytest.raises(errors):
|
||||
obj2 = await Config.first()
|
||||
obj2 = await Config.all().first().values("id", "key")
|
||||
assert obj2 and obj2["id"]
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_fake_field_2():
|
||||
assert "field_2" in NewModel._meta.fields_map
|
||||
assert "field_2" in Config._meta.fields_map
|
||||
with pytest.raises(errors):
|
||||
await NewModel.create(name="")
|
||||
with pytest.raises(errors):
|
||||
await Config.create(key="", label="", value={})
|
28
tests/assets/fake/db.py
Normal file
28
tests/assets/fake/db.py
Normal file
@ -0,0 +1,28 @@
|
||||
import asyncclick as click
|
||||
from settings import TORTOISE_ORM
|
||||
|
||||
from tests._utils import drop_db, init_db
|
||||
|
||||
|
||||
@click.group()
|
||||
def cli(): ...
|
||||
|
||||
|
||||
@cli.command()
|
||||
async def create():
|
||||
await init_db(TORTOISE_ORM, False)
|
||||
click.echo(f"Success to create databases for {TORTOISE_ORM['connections']}")
|
||||
|
||||
|
||||
@cli.command()
|
||||
async def drop():
|
||||
await drop_db(TORTOISE_ORM)
|
||||
click.echo(f"Dropped databases for {TORTOISE_ORM['connections']}")
|
||||
|
||||
|
||||
def main():
|
||||
cli()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
22
tests/assets/fake/settings.py
Normal file
22
tests/assets/fake/settings.py
Normal file
@ -0,0 +1,22 @@
|
||||
import os
|
||||
from datetime import date
|
||||
|
||||
from tortoise.contrib.test import MEMORY_SQLITE
|
||||
|
||||
DB_URL = (
|
||||
_u.replace("\\{\\}", f"aerich_fake_{date.today():%Y%m%d}")
|
||||
if (_u := os.getenv("TEST_DB"))
|
||||
else MEMORY_SQLITE
|
||||
)
|
||||
DB_URL_SECOND = (DB_URL + "_second") if DB_URL != MEMORY_SQLITE else MEMORY_SQLITE
|
||||
|
||||
TORTOISE_ORM = {
|
||||
"connections": {
|
||||
"default": DB_URL.replace(MEMORY_SQLITE, "sqlite://db.sqlite3"),
|
||||
"second": DB_URL_SECOND.replace(MEMORY_SQLITE, "sqlite://db_second.sqlite3"),
|
||||
},
|
||||
"apps": {
|
||||
"models": {"models": ["models", "aerich.models"], "default_connection": "default"},
|
||||
"models_second": {"models": ["models_second"], "default_connection": "second"},
|
||||
},
|
||||
}
|
76
tests/assets/sqlite_migrate/_tests.py
Normal file
76
tests/assets/sqlite_migrate/_tests.py
Normal file
@ -0,0 +1,76 @@
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
from models import Foo
|
||||
from tortoise.exceptions import IntegrityError
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_allow_duplicate() -> None:
|
||||
await Foo.all().delete()
|
||||
await Foo.create(name="foo")
|
||||
obj = await Foo.create(name="foo")
|
||||
assert (await Foo.all().count()) == 2
|
||||
await obj.delete()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_unique_is_true() -> None:
|
||||
with pytest.raises(IntegrityError):
|
||||
await Foo.create(name="foo")
|
||||
await Foo.create(name="foo")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_unique_field() -> None:
|
||||
if not await Foo.filter(age=0).exists():
|
||||
await Foo.create(name="0_" + uuid.uuid4().hex, age=0)
|
||||
with pytest.raises(IntegrityError):
|
||||
await Foo.create(name=uuid.uuid4().hex, age=0)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_drop_unique_field() -> None:
|
||||
name = "1_" + uuid.uuid4().hex
|
||||
await Foo.create(name=name, age=0)
|
||||
assert await Foo.filter(name=name).exists()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_with_age_field() -> None:
|
||||
name = "2_" + uuid.uuid4().hex
|
||||
await Foo.create(name=name, age=0)
|
||||
obj = await Foo.get(name=name)
|
||||
assert obj.age == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_without_age_field() -> None:
|
||||
name = "3_" + uuid.uuid4().hex
|
||||
await Foo.create(name=name, age=0)
|
||||
obj = await Foo.get(name=name)
|
||||
assert getattr(obj, "age", None) is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_m2m_with_custom_through() -> None:
|
||||
from models import FooGroup, Group
|
||||
|
||||
name = "4_" + uuid.uuid4().hex
|
||||
foo = await Foo.create(name=name)
|
||||
group = await Group.create(name=name + "1")
|
||||
await FooGroup.all().delete()
|
||||
await foo.groups.add(group)
|
||||
foo_group = await FooGroup.get(foo=foo, group=group)
|
||||
assert not foo_group.is_active
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_m2m_field_after_init_db() -> None:
|
||||
from models import Group
|
||||
|
||||
name = "5_" + uuid.uuid4().hex
|
||||
foo = await Foo.create(name=name)
|
||||
group = await Group.create(name=name + "1")
|
||||
await foo.groups.add(group)
|
||||
assert (await group.users.all().first()) == foo
|
28
tests/assets/sqlite_migrate/conftest_.py
Normal file
28
tests/assets/sqlite_migrate/conftest_.py
Normal file
@ -0,0 +1,28 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Generator
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
import settings
|
||||
from tortoise import Tortoise, connections
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def event_loop() -> Generator:
|
||||
policy = asyncio.get_event_loop_policy()
|
||||
res = policy.new_event_loop()
|
||||
asyncio.set_event_loop(res)
|
||||
res._close = res.close # type:ignore[attr-defined]
|
||||
res.close = lambda: None # type:ignore[method-assign]
|
||||
|
||||
yield res
|
||||
|
||||
res._close() # type:ignore[attr-defined]
|
||||
|
||||
|
||||
@pytest_asyncio.fixture(scope="session", autouse=True)
|
||||
async def api(event_loop, request):
|
||||
await Tortoise.init(config=settings.TORTOISE_ORM)
|
||||
request.addfinalizer(lambda: event_loop.run_until_complete(connections.close_all(discard=True)))
|
5
tests/assets/sqlite_migrate/models.py
Normal file
5
tests/assets/sqlite_migrate/models.py
Normal file
@ -0,0 +1,5 @@
|
||||
from tortoise import Model, fields
|
||||
|
||||
|
||||
class Foo(Model):
|
||||
name = fields.CharField(max_length=60, db_index=False)
|
4
tests/assets/sqlite_migrate/settings.py
Normal file
4
tests/assets/sqlite_migrate/settings.py
Normal file
@ -0,0 +1,4 @@
|
||||
TORTOISE_ORM = {
|
||||
"connections": {"default": "sqlite://db.sqlite3"},
|
||||
"apps": {"models": {"models": ["models", "aerich.models"]}},
|
||||
}
|
7
tests/indexes.py
Normal file
7
tests/indexes.py
Normal file
@ -0,0 +1,7 @@
|
||||
from tortoise.indexes import Index
|
||||
|
||||
|
||||
class CustomIndex(Index):
|
||||
def __init__(self, *args, **kw) -> None:
|
||||
super().__init__(*args, **kw)
|
||||
self._foo = ""
|
106
tests/models.py
106
tests/models.py
@ -1,7 +1,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import uuid
|
||||
from enum import IntEnum
|
||||
|
||||
from tortoise import Model, fields
|
||||
from tortoise.contrib.mysql.indexes import FullTextIndex
|
||||
from tortoise.contrib.postgres.indexes import HashIndex
|
||||
from tortoise.indexes import Index
|
||||
|
||||
from tests._utils import Dialect
|
||||
from tests.indexes import CustomIndex
|
||||
|
||||
|
||||
class ProductType(IntEnum):
|
||||
@ -23,35 +32,110 @@ class Status(IntEnum):
|
||||
|
||||
class User(Model):
|
||||
username = fields.CharField(max_length=20, unique=True)
|
||||
password = fields.CharField(max_length=200)
|
||||
password = fields.CharField(max_length=100)
|
||||
last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now)
|
||||
is_active = fields.BooleanField(default=True, description="Is Active")
|
||||
is_superuser = fields.BooleanField(default=False, description="Is SuperUser")
|
||||
avatar = fields.CharField(max_length=200, default="")
|
||||
intro = fields.TextField(default="")
|
||||
longitude = fields.DecimalField(max_digits=10, decimal_places=8)
|
||||
|
||||
products: fields.ManyToManyRelation[Product]
|
||||
|
||||
class Meta:
|
||||
# reverse indexes elements
|
||||
indexes = [CustomIndex(fields=("is_superuser",)), Index(fields=("username", "is_active"))]
|
||||
|
||||
|
||||
class Email(Model):
|
||||
email_id = fields.IntField(primary_key=True)
|
||||
email = fields.CharField(max_length=200, db_index=True)
|
||||
company = fields.CharField(max_length=100, db_index=True, unique=True)
|
||||
is_primary = fields.BooleanField(default=False)
|
||||
address = fields.CharField(max_length=200)
|
||||
users: fields.ManyToManyRelation[User] = fields.ManyToManyField("models.User")
|
||||
config: fields.OneToOneRelation[Config] = fields.OneToOneField("models.Config")
|
||||
|
||||
|
||||
def default_name():
|
||||
return uuid.uuid4()
|
||||
|
||||
|
||||
class Category(Model):
|
||||
slug = fields.CharField(max_length=200)
|
||||
name = fields.CharField(max_length=200)
|
||||
user = fields.ForeignKeyField("models.User", description="User")
|
||||
slug = fields.CharField(max_length=100)
|
||||
name = fields.CharField(max_length=200, null=True, default=default_name)
|
||||
owner: fields.ForeignKeyRelation[User] = fields.ForeignKeyField(
|
||||
"models.User", description="User"
|
||||
)
|
||||
title = fields.CharField(max_length=20, unique=False)
|
||||
created_at = fields.DatetimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
if Dialect.is_postgres():
|
||||
indexes = [HashIndex(fields=("slug",))]
|
||||
elif Dialect.is_mysql():
|
||||
indexes = [FullTextIndex(fields=("slug",))] # type:ignore
|
||||
else:
|
||||
indexes = [Index(fields=("slug",))] # type:ignore
|
||||
|
||||
|
||||
class Product(Model):
|
||||
categories = fields.ManyToManyField("models.Category")
|
||||
id = fields.BigIntField(primary_key=True)
|
||||
categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField(
|
||||
"models.Category", null=False
|
||||
)
|
||||
users: fields.ManyToManyRelation[User] = fields.ManyToManyField(
|
||||
"models.User", related_name="products"
|
||||
)
|
||||
name = fields.CharField(max_length=50)
|
||||
view_num = fields.IntField(description="View Num")
|
||||
view_num = fields.IntField(description="View Num", default=0)
|
||||
sort = fields.IntField()
|
||||
is_reviewed = fields.BooleanField(description="Is Reviewed")
|
||||
type = fields.IntEnumField(ProductType, description="Product Type")
|
||||
image = fields.CharField(max_length=200)
|
||||
type: int = fields.IntEnumField(
|
||||
ProductType, description="Product Type", source_field="type_db_alias"
|
||||
)
|
||||
pic = fields.CharField(max_length=200)
|
||||
body = fields.TextField()
|
||||
price = fields.FloatField(null=True)
|
||||
no = fields.UUIDField(db_index=True)
|
||||
created_at = fields.DatetimeField(auto_now_add=True)
|
||||
is_deleted = fields.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
unique_together = (("name", "type"),)
|
||||
indexes = (("name", "type"),)
|
||||
managed = True
|
||||
|
||||
|
||||
class Config(Model):
|
||||
slug = fields.CharField(primary_key=True, max_length=20)
|
||||
categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField(
|
||||
"models.Category", through="config_category_map", related_name="category_set"
|
||||
)
|
||||
label = fields.CharField(max_length=200)
|
||||
key = fields.CharField(max_length=20)
|
||||
value = fields.JSONField()
|
||||
status: Status = fields.IntEnumField(Status, default=Status.on)
|
||||
value: dict = fields.JSONField()
|
||||
status: Status = fields.IntEnumField(Status)
|
||||
user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField(
|
||||
"models.User", description="User"
|
||||
)
|
||||
|
||||
email: fields.OneToOneRelation[Email]
|
||||
|
||||
class Meta:
|
||||
managed = True
|
||||
|
||||
|
||||
class DontManageMe(Model):
|
||||
name = fields.CharField(max_length=50)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
|
||||
|
||||
class Ignore(Model):
|
||||
class Meta:
|
||||
managed = False
|
||||
|
||||
|
||||
class NewModel(Model):
|
||||
name = fields.CharField(max_length=50)
|
||||
|
71
tests/models_second.py
Normal file
71
tests/models_second.py
Normal file
@ -0,0 +1,71 @@
|
||||
import datetime
|
||||
from enum import IntEnum
|
||||
|
||||
from tortoise import Model, fields
|
||||
|
||||
|
||||
class ProductType(IntEnum):
|
||||
article = 1
|
||||
page = 2
|
||||
|
||||
|
||||
class PermissionAction(IntEnum):
|
||||
create = 1
|
||||
delete = 2
|
||||
update = 3
|
||||
read = 4
|
||||
|
||||
|
||||
class Status(IntEnum):
|
||||
on = 1
|
||||
off = 0
|
||||
|
||||
|
||||
class User(Model):
|
||||
username = fields.CharField(max_length=20, unique=True)
|
||||
password = fields.CharField(max_length=200)
|
||||
last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now)
|
||||
is_active = fields.BooleanField(default=True, description="Is Active")
|
||||
is_superuser = fields.BooleanField(default=False, description="Is SuperUser")
|
||||
avatar = fields.CharField(max_length=200, default="")
|
||||
intro = fields.TextField(default="")
|
||||
|
||||
|
||||
class Email(Model):
|
||||
email = fields.CharField(max_length=200)
|
||||
is_primary = fields.BooleanField(default=False)
|
||||
user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField(
|
||||
"models_second.User", db_constraint=False
|
||||
)
|
||||
|
||||
|
||||
class Category(Model):
|
||||
slug = fields.CharField(max_length=200)
|
||||
name = fields.CharField(max_length=200)
|
||||
user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField(
|
||||
"models_second.User", description="User"
|
||||
)
|
||||
created_at = fields.DatetimeField(auto_now_add=True)
|
||||
|
||||
|
||||
class Product(Model):
|
||||
categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField(
|
||||
"models_second.Category"
|
||||
)
|
||||
name = fields.CharField(max_length=50)
|
||||
view_num = fields.IntField(description="View Num")
|
||||
sort = fields.IntField()
|
||||
is_reviewed = fields.BooleanField(description="Is Reviewed")
|
||||
type: int = fields.IntEnumField(
|
||||
ProductType, description="Product Type", source_field="type_db_alias"
|
||||
)
|
||||
image = fields.CharField(max_length=200)
|
||||
body = fields.TextField()
|
||||
created_at = fields.DatetimeField(auto_now_add=True)
|
||||
|
||||
|
||||
class Config(Model):
|
||||
label = fields.CharField(max_length=200)
|
||||
key = fields.CharField(max_length=20)
|
||||
value: dict = fields.JSONField()
|
||||
status: Status = fields.IntEnumField(Status, default=Status.on)
|
129
tests/old_models.py
Normal file
129
tests/old_models.py
Normal file
@ -0,0 +1,129 @@
|
||||
import datetime
|
||||
from enum import IntEnum
|
||||
|
||||
from tortoise import Model, fields
|
||||
from tortoise.indexes import Index
|
||||
|
||||
from tests.indexes import CustomIndex
|
||||
|
||||
|
||||
class ProductType(IntEnum):
|
||||
article = 1
|
||||
page = 2
|
||||
|
||||
|
||||
class PermissionAction(IntEnum):
|
||||
create = 1
|
||||
delete = 2
|
||||
update = 3
|
||||
read = 4
|
||||
|
||||
|
||||
class Status(IntEnum):
|
||||
on = 1
|
||||
off = 0
|
||||
|
||||
|
||||
class User(Model):
|
||||
username = fields.CharField(max_length=20)
|
||||
password = fields.CharField(max_length=200)
|
||||
last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now)
|
||||
is_active = fields.BooleanField(default=True, description="Is Active")
|
||||
is_superuser = fields.BooleanField(default=False, description="Is SuperUser")
|
||||
avatar = fields.CharField(max_length=200, default="")
|
||||
intro = fields.TextField(default="")
|
||||
longitude = fields.DecimalField(max_digits=12, decimal_places=9)
|
||||
|
||||
class Meta:
|
||||
indexes = [Index(fields=("username", "is_active")), CustomIndex(fields=("is_superuser",))]
|
||||
|
||||
|
||||
class Email(Model):
|
||||
email = fields.CharField(max_length=200)
|
||||
company = fields.CharField(max_length=100, db_index=True)
|
||||
is_primary = fields.BooleanField(default=False)
|
||||
user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField(
|
||||
"models.User", db_constraint=False
|
||||
)
|
||||
|
||||
|
||||
class Category(Model):
|
||||
slug = fields.CharField(max_length=200)
|
||||
name = fields.CharField(max_length=200)
|
||||
user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField(
|
||||
"models.User", description="User"
|
||||
)
|
||||
title = fields.CharField(max_length=20, unique=True)
|
||||
created_at = fields.DatetimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
indexes = [Index(fields=("slug",))]
|
||||
|
||||
|
||||
class Product(Model):
|
||||
categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField("models.Category")
|
||||
uid = fields.IntField(source_field="uuid", unique=True)
|
||||
name = fields.CharField(max_length=50)
|
||||
view_num = fields.IntField(description="View Num")
|
||||
sort = fields.IntField()
|
||||
is_review = fields.BooleanField(description="Is Reviewed")
|
||||
type: int = fields.IntEnumField(
|
||||
ProductType, description="Product Type", source_field="type_db_alias"
|
||||
)
|
||||
image = fields.CharField(max_length=200)
|
||||
body = fields.TextField()
|
||||
created_at = fields.DatetimeField(auto_now_add=True)
|
||||
is_delete = fields.BooleanField(default=False)
|
||||
|
||||
|
||||
class Config(Model):
|
||||
slug = fields.CharField(primary_key=True, max_length=10)
|
||||
category: fields.ManyToManyRelation[Category] = fields.ManyToManyField("models.Category")
|
||||
categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField(
|
||||
"models.Category", through="config_category_map", related_name="config_set"
|
||||
)
|
||||
name = fields.CharField(max_length=100, unique=True)
|
||||
label = fields.CharField(max_length=200)
|
||||
key = fields.CharField(max_length=20)
|
||||
value: dict = fields.JSONField()
|
||||
status: Status = fields.IntEnumField(Status, default=Status.on)
|
||||
|
||||
class Meta:
|
||||
table = "configs"
|
||||
|
||||
|
||||
class DontManageMe(Model):
|
||||
name = fields.CharField(max_length=50)
|
||||
|
||||
class Meta:
|
||||
table = "dont_manage"
|
||||
|
||||
|
||||
class Ignore(Model):
|
||||
name = fields.CharField(max_length=50)
|
||||
|
||||
class Meta:
|
||||
managed = True
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Generate a python file for the old_models_describe"""
|
||||
from pathlib import Path
|
||||
|
||||
from tortoise import run_async
|
||||
from tortoise.contrib.test import init_memory_sqlite
|
||||
|
||||
from aerich.utils import get_models_describe
|
||||
|
||||
@init_memory_sqlite
|
||||
async def run() -> None:
|
||||
old_models_describe = get_models_describe("models")
|
||||
p = Path("old_models_describe.py")
|
||||
p.write_text(f"{old_models_describe = }", encoding="utf-8")
|
||||
print(f"Write value to {p}\nYou can reformat it by `ruff format {p}`")
|
||||
|
||||
run_async(run())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
11
tests/test_command.py
Normal file
11
tests/test_command.py
Normal file
@ -0,0 +1,11 @@
|
||||
from aerich import Command
|
||||
from conftest import tortoise_orm
|
||||
|
||||
|
||||
async def test_command(mocker):
|
||||
mocker.patch("os.listdir", return_value=[])
|
||||
async with Command(tortoise_orm) as command:
|
||||
history = await command.history()
|
||||
heads = await command.heads()
|
||||
assert history == []
|
||||
assert heads == []
|
@ -1,114 +1,232 @@
|
||||
from tortoise import Tortoise
|
||||
from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator
|
||||
from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator
|
||||
from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator
|
||||
from tortoise.contrib import test
|
||||
import tortoise
|
||||
|
||||
from aerich.ddl.mysql import MysqlDDL
|
||||
from aerich.ddl.postgres import PostgresDDL
|
||||
from aerich.ddl.sqlite import SqliteDDL
|
||||
from tests.models import Category
|
||||
from aerich.migrate import Migrate
|
||||
from tests.models import Category, Product, User
|
||||
|
||||
|
||||
class TestDDL(test.TruncationTestCase):
|
||||
maxDiff = None
|
||||
|
||||
def setUp(self) -> None:
|
||||
client = Tortoise.get_connection("models")
|
||||
if client.schema_generator is MySQLSchemaGenerator:
|
||||
self.ddl = MysqlDDL(client)
|
||||
elif client.schema_generator is SqliteSchemaGenerator:
|
||||
self.ddl = SqliteDDL(client)
|
||||
elif client.schema_generator is AsyncpgSchemaGenerator:
|
||||
self.ddl = PostgresDDL(client)
|
||||
|
||||
def test_create_table(self):
|
||||
ret = self.ddl.create_table(Category)
|
||||
if isinstance(self.ddl, MysqlDDL):
|
||||
self.assertEqual(
|
||||
ret,
|
||||
"""CREATE TABLE IF NOT EXISTS `category` (
|
||||
def test_create_table():
|
||||
ret = Migrate.ddl.create_table(Category)
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
if tortoise.__version__ >= "0.24":
|
||||
assert (
|
||||
ret
|
||||
== """CREATE TABLE IF NOT EXISTS `category` (
|
||||
`id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
|
||||
`slug` VARCHAR(200) NOT NULL,
|
||||
`name` VARCHAR(200) NOT NULL,
|
||||
`created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6),
|
||||
`user_id` INT NOT NULL COMMENT 'User',
|
||||
CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE
|
||||
) CHARACTER SET utf8mb4;""",
|
||||
`slug` VARCHAR(100) NOT NULL,
|
||||
`name` VARCHAR(200),
|
||||
`title` VARCHAR(20) NOT NULL,
|
||||
`created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6),
|
||||
`owner_id` INT NOT NULL COMMENT 'User',
|
||||
CONSTRAINT `fk_category_user_110d4c63` FOREIGN KEY (`owner_id`) REFERENCES `user` (`id`) ON DELETE CASCADE,
|
||||
FULLTEXT KEY `idx_category_slug_e9bcff` (`slug`)
|
||||
) CHARACTER SET utf8mb4"""
|
||||
)
|
||||
elif isinstance(self.ddl, SqliteDDL):
|
||||
self.assertEqual(
|
||||
ret,
|
||||
"""CREATE TABLE IF NOT EXISTS "category" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
"slug" VARCHAR(200) NOT NULL,
|
||||
"name" VARCHAR(200) NOT NULL,
|
||||
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE /* User */
|
||||
);""",
|
||||
)
|
||||
elif isinstance(self.ddl, PostgresDDL):
|
||||
self.assertEqual(
|
||||
ret,
|
||||
"""CREATE TABLE IF NOT EXISTS "category" (
|
||||
"id" SERIAL NOT NULL PRIMARY KEY,
|
||||
"slug" VARCHAR(200) NOT NULL,
|
||||
"name" VARCHAR(200) NOT NULL,
|
||||
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE
|
||||
);
|
||||
COMMENT ON COLUMN "category"."user_id" IS 'User';""",
|
||||
)
|
||||
|
||||
def test_drop_table(self):
|
||||
ret = self.ddl.drop_table(Category)
|
||||
self.assertEqual(ret, "DROP TABLE IF EXISTS category")
|
||||
|
||||
def test_add_column(self):
|
||||
ret = self.ddl.add_column(Category, Category._meta.fields_map.get("name"))
|
||||
if isinstance(self.ddl, MysqlDDL):
|
||||
self.assertEqual(ret, "ALTER TABLE category ADD `name` VARCHAR(200) NOT NULL")
|
||||
elif isinstance(self.ddl, PostgresDDL):
|
||||
self.assertEqual(ret, 'ALTER TABLE category ADD "name" VARCHAR(200) NOT NULL')
|
||||
elif isinstance(self.ddl, SqliteDDL):
|
||||
self.assertEqual(ret, 'ALTER TABLE category ADD "name" VARCHAR(200) NOT NULL')
|
||||
|
||||
def test_drop_column(self):
|
||||
ret = self.ddl.drop_column(Category, "name")
|
||||
self.assertEqual(ret, "ALTER TABLE category DROP COLUMN name")
|
||||
self.assertEqual(ret, "ALTER TABLE category DROP COLUMN name")
|
||||
|
||||
def test_add_index(self):
|
||||
index = self.ddl.add_index(Category, ["name"])
|
||||
index_u = self.ddl.add_index(Category, ["name"], True)
|
||||
if isinstance(self.ddl, MysqlDDL):
|
||||
self.assertEqual(
|
||||
index, "ALTER TABLE category ADD INDEX idx_category_name_8b0cb9 (`name`)"
|
||||
)
|
||||
self.assertEqual(
|
||||
index_u, "ALTER TABLE category ADD UNIQUE INDEX uid_category_name_8b0cb9 (`name`)"
|
||||
)
|
||||
elif isinstance(self.ddl, SqliteDDL):
|
||||
self.assertEqual(
|
||||
index_u, 'ALTER TABLE category ADD UNIQUE INDEX uid_category_name_8b0cb9 ("name")'
|
||||
)
|
||||
self.assertEqual(
|
||||
index_u, 'ALTER TABLE category ADD UNIQUE INDEX uid_category_name_8b0cb9 ("name")'
|
||||
)
|
||||
|
||||
def test_drop_index(self):
|
||||
ret = self.ddl.drop_index(Category, ["name"])
|
||||
self.assertEqual(ret, "ALTER TABLE category DROP INDEX idx_category_name_8b0cb9")
|
||||
ret = self.ddl.drop_index(Category, ["name"], True)
|
||||
self.assertEqual(ret, "ALTER TABLE category DROP INDEX uid_category_name_8b0cb9")
|
||||
|
||||
def test_add_fk(self):
|
||||
ret = self.ddl.add_fk(Category, Category._meta.fields_map.get("user"))
|
||||
self.assertEqual(
|
||||
ret,
|
||||
"ALTER TABLE category ADD CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE",
|
||||
return
|
||||
assert (
|
||||
ret
|
||||
== """CREATE TABLE IF NOT EXISTS `category` (
|
||||
`id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
|
||||
`slug` VARCHAR(100) NOT NULL,
|
||||
`name` VARCHAR(200),
|
||||
`title` VARCHAR(20) NOT NULL,
|
||||
`created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6),
|
||||
`owner_id` INT NOT NULL COMMENT 'User',
|
||||
CONSTRAINT `fk_category_user_110d4c63` FOREIGN KEY (`owner_id`) REFERENCES `user` (`id`) ON DELETE CASCADE
|
||||
) CHARACTER SET utf8mb4;
|
||||
CREATE FULLTEXT INDEX `idx_category_slug_e9bcff` ON `category` (`slug`)"""
|
||||
)
|
||||
|
||||
def test_drop_fk(self):
|
||||
ret = self.ddl.drop_fk(Category, Category._meta.fields_map.get("user"))
|
||||
self.assertEqual(ret, "ALTER TABLE category DROP FOREIGN KEY fk_category_user_e2e3874c")
|
||||
elif isinstance(Migrate.ddl, SqliteDDL):
|
||||
exists = "IF NOT EXISTS " if tortoise.__version__ >= "0.24" else ""
|
||||
assert (
|
||||
ret
|
||||
== f"""CREATE TABLE IF NOT EXISTS "category" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
"slug" VARCHAR(100) NOT NULL,
|
||||
"name" VARCHAR(200),
|
||||
"title" VARCHAR(20) NOT NULL,
|
||||
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"owner_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE /* User */
|
||||
);
|
||||
CREATE INDEX {exists}"idx_category_slug_e9bcff" ON "category" ("slug")"""
|
||||
)
|
||||
|
||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert (
|
||||
ret
|
||||
== """CREATE TABLE IF NOT EXISTS "category" (
|
||||
"id" SERIAL NOT NULL PRIMARY KEY,
|
||||
"slug" VARCHAR(100) NOT NULL,
|
||||
"name" VARCHAR(200),
|
||||
"title" VARCHAR(20) NOT NULL,
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"owner_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS "idx_category_slug_e9bcff" ON "category" USING HASH ("slug");
|
||||
COMMENT ON COLUMN "category"."owner_id" IS 'User'"""
|
||||
)
|
||||
|
||||
|
||||
def test_drop_table():
|
||||
ret = Migrate.ddl.drop_table(Category._meta.db_table)
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret == "DROP TABLE IF EXISTS `category`"
|
||||
else:
|
||||
assert ret == 'DROP TABLE IF EXISTS "category"'
|
||||
|
||||
|
||||
def test_add_column():
|
||||
ret = Migrate.ddl.add_column(Category, Category._meta.fields_map["name"].describe(False))
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret == "ALTER TABLE `category` ADD `name` VARCHAR(200)"
|
||||
else:
|
||||
assert ret == 'ALTER TABLE "category" ADD "name" VARCHAR(200)'
|
||||
# add unique column
|
||||
ret = Migrate.ddl.add_column(User, User._meta.fields_map["username"].describe(False))
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret == "ALTER TABLE `user` ADD `username` VARCHAR(20) NOT NULL UNIQUE"
|
||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert ret == 'ALTER TABLE "user" ADD "username" VARCHAR(20) NOT NULL UNIQUE'
|
||||
else:
|
||||
assert ret == 'ALTER TABLE "user" ADD "username" VARCHAR(20) NOT NULL'
|
||||
|
||||
|
||||
def test_modify_column():
|
||||
if isinstance(Migrate.ddl, SqliteDDL):
|
||||
return
|
||||
|
||||
ret0 = Migrate.ddl.modify_column(Category, Category._meta.fields_map["name"].describe(False))
|
||||
ret1 = Migrate.ddl.modify_column(User, User._meta.fields_map["is_active"].describe(False))
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret0 == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)"
|
||||
assert (
|
||||
ret1
|
||||
== "ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1"
|
||||
)
|
||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert (
|
||||
ret0
|
||||
== 'ALTER TABLE "category" ALTER COLUMN "name" TYPE VARCHAR(200) USING "name"::VARCHAR(200)'
|
||||
)
|
||||
|
||||
assert (
|
||||
ret1 == 'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL'
|
||||
)
|
||||
|
||||
|
||||
def test_alter_column_default():
|
||||
if isinstance(Migrate.ddl, SqliteDDL):
|
||||
return
|
||||
ret = Migrate.ddl.alter_column_default(User, User._meta.fields_map["intro"].describe(False))
|
||||
if isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert ret == 'ALTER TABLE "user" ALTER COLUMN "intro" SET DEFAULT \'\''
|
||||
elif isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret == "ALTER TABLE `user` ALTER COLUMN `intro` SET DEFAULT ''"
|
||||
|
||||
ret = Migrate.ddl.alter_column_default(
|
||||
Category, Category._meta.fields_map["created_at"].describe(False)
|
||||
)
|
||||
if isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert (
|
||||
ret == 'ALTER TABLE "category" ALTER COLUMN "created_at" SET DEFAULT CURRENT_TIMESTAMP'
|
||||
)
|
||||
elif isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert (
|
||||
ret
|
||||
== "ALTER TABLE `category` ALTER COLUMN `created_at` SET DEFAULT CURRENT_TIMESTAMP(6)"
|
||||
)
|
||||
|
||||
ret = Migrate.ddl.alter_column_default(
|
||||
Product, Product._meta.fields_map["view_num"].describe(False)
|
||||
)
|
||||
if isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert ret == 'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0'
|
||||
elif isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret == "ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0"
|
||||
|
||||
|
||||
def test_alter_column_null():
|
||||
if isinstance(Migrate.ddl, (SqliteDDL, MysqlDDL)):
|
||||
return
|
||||
ret = Migrate.ddl.alter_column_null(Category, Category._meta.fields_map["name"].describe(False))
|
||||
if isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert ret == 'ALTER TABLE "category" ALTER COLUMN "name" DROP NOT NULL'
|
||||
|
||||
|
||||
def test_set_comment():
|
||||
if isinstance(Migrate.ddl, (SqliteDDL, MysqlDDL)):
|
||||
return
|
||||
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map["name"].describe(False))
|
||||
assert ret == 'COMMENT ON COLUMN "category"."name" IS NULL'
|
||||
|
||||
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map["owner"].describe(False))
|
||||
assert ret == 'COMMENT ON COLUMN "category"."owner_id" IS \'User\''
|
||||
|
||||
|
||||
def test_drop_column():
|
||||
ret = Migrate.ddl.drop_column(Category, "name")
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret == "ALTER TABLE `category` DROP COLUMN `name`"
|
||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert ret == 'ALTER TABLE "category" DROP COLUMN "name"'
|
||||
|
||||
|
||||
def test_add_index():
|
||||
index = Migrate.ddl.add_index(Category, ["name"])
|
||||
index_u = Migrate.ddl.add_index(Category, ["name"], True)
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert index == "ALTER TABLE `category` ADD INDEX `idx_category_name_8b0cb9` (`name`)"
|
||||
assert index_u == "ALTER TABLE `category` ADD UNIQUE INDEX `name` (`name`)"
|
||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert (
|
||||
index == 'CREATE INDEX IF NOT EXISTS "idx_category_name_8b0cb9" ON "category" ("name")'
|
||||
)
|
||||
assert (
|
||||
index_u
|
||||
== 'CREATE UNIQUE INDEX IF NOT EXISTS "uid_category_name_8b0cb9" ON "category" ("name")'
|
||||
)
|
||||
else:
|
||||
assert index == 'CREATE INDEX "idx_category_name_8b0cb9" ON "category" ("name")'
|
||||
assert index_u == 'CREATE UNIQUE INDEX "uid_category_name_8b0cb9" ON "category" ("name")'
|
||||
|
||||
|
||||
def test_drop_index():
|
||||
ret = Migrate.ddl.drop_index(Category, ["name"])
|
||||
ret_u = Migrate.ddl.drop_index(Category, ["name"], True)
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret == "ALTER TABLE `category` DROP INDEX `idx_category_name_8b0cb9`"
|
||||
assert ret_u == "ALTER TABLE `category` DROP INDEX `name`"
|
||||
else:
|
||||
assert ret == 'DROP INDEX IF EXISTS "idx_category_name_8b0cb9"'
|
||||
assert ret_u == 'DROP INDEX IF EXISTS "uid_category_name_8b0cb9"'
|
||||
|
||||
|
||||
def test_add_fk():
|
||||
ret = Migrate.ddl.add_fk(
|
||||
Category, Category._meta.fields_map["owner"].describe(False), User.describe(False)
|
||||
)
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert (
|
||||
ret
|
||||
== "ALTER TABLE `category` ADD CONSTRAINT `fk_category_user_110d4c63` FOREIGN KEY (`owner_id`) REFERENCES `user` (`id`) ON DELETE CASCADE"
|
||||
)
|
||||
else:
|
||||
assert (
|
||||
ret
|
||||
== 'ALTER TABLE "category" ADD CONSTRAINT "fk_category_user_110d4c63" FOREIGN KEY ("owner_id") REFERENCES "user" ("id") ON DELETE CASCADE'
|
||||
)
|
||||
|
||||
|
||||
def test_drop_fk():
|
||||
ret = Migrate.ddl.drop_fk(
|
||||
Category, Category._meta.fields_map["owner"].describe(False), User.describe(False)
|
||||
)
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret == "ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_110d4c63`"
|
||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert ret == 'ALTER TABLE "category" DROP CONSTRAINT IF EXISTS "fk_category_user_110d4c63"'
|
||||
else:
|
||||
assert ret == 'ALTER TABLE "category" DROP FOREIGN KEY "fk_category_user_110d4c63"'
|
||||
|
106
tests/test_fake.py
Normal file
106
tests/test_fake.py
Normal file
@ -0,0 +1,106 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
from tests._utils import Dialect, run_shell
|
||||
|
||||
|
||||
def _append_field(*files: str, name="field_1") -> None:
|
||||
for file in files:
|
||||
p = Path(file)
|
||||
field = f" {name} = fields.IntField(default=0)"
|
||||
with p.open("a") as f:
|
||||
f.write(os.linesep + field)
|
||||
|
||||
|
||||
def test_fake(new_aerich_project):
|
||||
if Dialect.is_sqlite():
|
||||
# TODO: go ahead if sqlite alter-column supported
|
||||
return
|
||||
output = run_shell("aerich init -t settings.TORTOISE_ORM")
|
||||
assert "Success" in output
|
||||
output = run_shell("aerich init-db")
|
||||
assert "Success" in output
|
||||
output = run_shell("aerich --app models_second init-db")
|
||||
assert "Success" in output
|
||||
output = run_shell("pytest _tests.py::test_init_db")
|
||||
assert "error" not in output.lower()
|
||||
_append_field("models.py", "models_second.py")
|
||||
output = run_shell("aerich migrate")
|
||||
assert "Success" in output
|
||||
output = run_shell("aerich --app models_second migrate")
|
||||
assert "Success" in output
|
||||
output = run_shell("aerich upgrade --fake")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich --app models_second upgrade --fake")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("pytest _tests.py::test_fake_field_1")
|
||||
assert "error" not in output.lower()
|
||||
_append_field("models.py", "models_second.py", name="field_2")
|
||||
output = run_shell("aerich migrate")
|
||||
assert "Success" in output
|
||||
output = run_shell("aerich --app models_second migrate")
|
||||
assert "Success" in output
|
||||
output = run_shell("aerich heads")
|
||||
assert "_update.py" in output
|
||||
output = run_shell("aerich upgrade --fake")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich --app models_second upgrade --fake")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("pytest _tests.py::test_fake_field_2")
|
||||
assert "error" not in output.lower()
|
||||
output = run_shell("aerich heads")
|
||||
assert "No available heads." in output
|
||||
output = run_shell("aerich --app models_second heads")
|
||||
assert "No available heads." in output
|
||||
_append_field("models.py", "models_second.py", name="field_3")
|
||||
run_shell("aerich migrate", capture_output=False)
|
||||
run_shell("aerich --app models_second migrate", capture_output=False)
|
||||
run_shell("aerich upgrade --fake", capture_output=False)
|
||||
run_shell("aerich --app models_second upgrade --fake", capture_output=False)
|
||||
output = run_shell("aerich downgrade --fake -v 2 --yes", input="y\n")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich --app models_second downgrade --fake -v 2 --yes", input="y\n")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich heads")
|
||||
assert "No available heads." not in output
|
||||
assert not re.search(r"1_\d+_update\.py", output)
|
||||
assert re.search(r"2_\d+_update\.py", output)
|
||||
output = run_shell("aerich --app models_second heads")
|
||||
assert "No available heads." not in output
|
||||
assert not re.search(r"1_\d+_update\.py", output)
|
||||
assert re.search(r"2_\d+_update\.py", output)
|
||||
output = run_shell("aerich downgrade --fake -v 1 --yes", input="y\n")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich --app models_second downgrade --fake -v 1 --yes", input="y\n")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich heads")
|
||||
assert "No available heads." not in output
|
||||
assert re.search(r"1_\d+_update\.py", output)
|
||||
assert re.search(r"2_\d+_update\.py", output)
|
||||
output = run_shell("aerich --app models_second heads")
|
||||
assert "No available heads." not in output
|
||||
assert re.search(r"1_\d+_update\.py", output)
|
||||
assert re.search(r"2_\d+_update\.py", output)
|
||||
output = run_shell("aerich upgrade --fake")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich --app models_second upgrade --fake")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich heads")
|
||||
assert "No available heads." in output
|
||||
output = run_shell("aerich --app models_second heads")
|
||||
assert "No available heads." in output
|
||||
output = run_shell("aerich downgrade --fake -v 1 --yes", input="y\n")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich --app models_second downgrade --fake -v 1 --yes", input="y\n")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich heads")
|
||||
assert "No available heads." not in output
|
||||
assert re.search(r"1_\d+_update\.py", output)
|
||||
assert re.search(r"2_\d+_update\.py", output)
|
||||
output = run_shell("aerich --app models_second heads")
|
||||
assert "No available heads." not in output
|
||||
assert re.search(r"1_\d+_update\.py", output)
|
||||
assert re.search(r"2_\d+_update\.py", output)
|
17
tests/test_inspectdb.py
Normal file
17
tests/test_inspectdb.py
Normal file
@ -0,0 +1,17 @@
|
||||
from tests._utils import Dialect, run_shell
|
||||
|
||||
|
||||
def test_inspect(new_aerich_project):
|
||||
if Dialect.is_sqlite():
|
||||
# TODO: test sqlite after #384 fixed
|
||||
return
|
||||
run_shell("aerich init -t settings.TORTOISE_ORM")
|
||||
run_shell("aerich init-db")
|
||||
ret = run_shell("aerich inspectdb -t product")
|
||||
assert ret.startswith("from tortoise import Model, fields")
|
||||
assert "primary_key=True" in ret
|
||||
assert "fields.DatetimeField" in ret
|
||||
assert "fields.FloatField" in ret
|
||||
assert "fields.UUIDField" in ret
|
||||
if Dialect.is_mysql():
|
||||
assert "db_index=True" in ret
|
1246
tests/test_migrate.py
Normal file
1246
tests/test_migrate.py
Normal file
File diff suppressed because it is too large
Load Diff
18
tests/test_python_m.py
Normal file
18
tests/test_python_m.py
Normal file
@ -0,0 +1,18 @@
|
||||
import subprocess # nosec
|
||||
from pathlib import Path
|
||||
|
||||
from aerich.version import __version__
|
||||
from tests._utils import chdir, run_shell
|
||||
|
||||
|
||||
def test_python_m_aerich():
|
||||
assert __version__ in run_shell("python -m aerich --version")
|
||||
|
||||
|
||||
def test_poetry_add(tmp_path: Path):
|
||||
package = Path(__file__).parent.resolve().parent
|
||||
with chdir(tmp_path):
|
||||
subprocess.run(["poetry", "new", "foo"]) # nosec
|
||||
with chdir("foo"):
|
||||
r = subprocess.run(["poetry", "add", package]) # nosec
|
||||
assert r.returncode == 0
|
213
tests/test_sqlite_migrate.py
Normal file
213
tests/test_sqlite_migrate.py
Normal file
@ -0,0 +1,213 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import platform
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
from collections.abc import Generator
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
|
||||
from tests._utils import Dialect, chdir, copy_files
|
||||
|
||||
|
||||
def run_aerich(cmd: str) -> subprocess.CompletedProcess | None:
|
||||
if not cmd.startswith("poetry") and not cmd.startswith("python"):
|
||||
if not cmd.startswith("aerich"):
|
||||
cmd = "aerich " + cmd
|
||||
if platform.system() == "Windows":
|
||||
cmd = "python -m " + cmd
|
||||
r = None
|
||||
with contextlib.suppress(subprocess.TimeoutExpired):
|
||||
r = subprocess.run(shlex.split(cmd), timeout=2)
|
||||
return r
|
||||
|
||||
|
||||
def run_shell(cmd: str) -> subprocess.CompletedProcess:
|
||||
envs = dict(os.environ, PYTHONPATH=".")
|
||||
return subprocess.run(shlex.split(cmd), env=envs)
|
||||
|
||||
|
||||
def _get_empty_db() -> Path:
|
||||
if (db_file := Path("db.sqlite3")).exists():
|
||||
db_file.unlink()
|
||||
return db_file
|
||||
|
||||
|
||||
@contextmanager
|
||||
def prepare_sqlite_project(tmp_path: Path) -> Generator[tuple[Path, str]]:
|
||||
test_dir = Path(__file__).parent
|
||||
asset_dir = test_dir / "assets" / "sqlite_migrate"
|
||||
with chdir(tmp_path):
|
||||
files = ("models.py", "settings.py", "_tests.py")
|
||||
copy_files(*(asset_dir / f for f in files), target_dir=Path())
|
||||
models_py, settings_py, test_py = (Path(f) for f in files)
|
||||
copy_files(asset_dir / "conftest_.py", target_dir=Path("conftest.py"))
|
||||
_get_empty_db()
|
||||
yield models_py, models_py.read_text("utf-8")
|
||||
|
||||
|
||||
def test_close_tortoise_connections_patch(tmp_path: Path) -> None:
|
||||
if not Dialect.is_sqlite():
|
||||
return
|
||||
with prepare_sqlite_project(tmp_path) as (models_py, models_text):
|
||||
run_aerich("aerich init -t settings.TORTOISE_ORM")
|
||||
r = run_aerich("aerich init-db")
|
||||
assert r is not None
|
||||
|
||||
|
||||
def test_sqlite_migrate_alter_indexed_unique(tmp_path: Path) -> None:
|
||||
if not Dialect.is_sqlite():
|
||||
return
|
||||
with prepare_sqlite_project(tmp_path) as (models_py, models_text):
|
||||
models_py.write_text(models_text.replace("db_index=False", "db_index=True"))
|
||||
run_aerich("aerich init -t settings.TORTOISE_ORM")
|
||||
run_aerich("aerich init-db")
|
||||
r = run_shell("pytest -s _tests.py::test_allow_duplicate")
|
||||
assert r.returncode == 0
|
||||
models_py.write_text(models_text.replace("db_index=False", "unique=True"))
|
||||
run_aerich("aerich migrate") # migrations/models/1_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest _tests.py::test_unique_is_true")
|
||||
assert r.returncode == 0
|
||||
models_py.write_text(models_text.replace("db_index=False", "db_index=True"))
|
||||
run_aerich("aerich migrate") # migrations/models/2_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest -s _tests.py::test_allow_duplicate")
|
||||
assert r.returncode == 0
|
||||
|
||||
|
||||
M2M_WITH_CUSTOM_THROUGH = """
|
||||
groups = fields.ManyToManyField("models.Group", through="foo_group")
|
||||
|
||||
class Group(Model):
|
||||
name = fields.CharField(max_length=60)
|
||||
|
||||
class FooGroup(Model):
|
||||
foo = fields.ForeignKeyField("models.Foo")
|
||||
group = fields.ForeignKeyField("models.Group")
|
||||
is_active = fields.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
table = "foo_group"
|
||||
"""
|
||||
|
||||
|
||||
def test_sqlite_migrate(tmp_path: Path) -> None:
|
||||
if not Dialect.is_sqlite():
|
||||
return
|
||||
with prepare_sqlite_project(tmp_path) as (models_py, models_text):
|
||||
MODELS = models_text
|
||||
run_aerich("aerich init -t settings.TORTOISE_ORM")
|
||||
config_file = Path("pyproject.toml")
|
||||
modify_time = config_file.stat().st_mtime
|
||||
run_aerich("aerich init-db")
|
||||
run_aerich("aerich init -t settings.TORTOISE_ORM")
|
||||
assert modify_time == config_file.stat().st_mtime
|
||||
r = run_shell("pytest _tests.py::test_allow_duplicate")
|
||||
assert r.returncode == 0
|
||||
# Add index
|
||||
models_py.write_text(MODELS.replace("index=False", "index=True"))
|
||||
run_aerich("aerich migrate") # migrations/models/1_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest -s _tests.py::test_allow_duplicate")
|
||||
assert r.returncode == 0
|
||||
# Drop index
|
||||
models_py.write_text(MODELS)
|
||||
run_aerich("aerich migrate") # migrations/models/2_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest -s _tests.py::test_allow_duplicate")
|
||||
assert r.returncode == 0
|
||||
# Add unique index
|
||||
models_py.write_text(MODELS.replace("index=False", "index=True, unique=True"))
|
||||
run_aerich("aerich migrate") # migrations/models/3_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest _tests.py::test_unique_is_true")
|
||||
assert r.returncode == 0
|
||||
# Drop unique index
|
||||
models_py.write_text(MODELS)
|
||||
run_aerich("aerich migrate") # migrations/models/4_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest _tests.py::test_allow_duplicate")
|
||||
assert r.returncode == 0
|
||||
# Add field with unique=True
|
||||
with models_py.open("a") as f:
|
||||
f.write(" age = fields.IntField(unique=True, default=0)")
|
||||
run_aerich("aerich migrate") # migrations/models/5_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest _tests.py::test_add_unique_field")
|
||||
assert r.returncode == 0
|
||||
# Drop unique field
|
||||
models_py.write_text(MODELS)
|
||||
run_aerich("aerich migrate") # migrations/models/6_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest -s _tests.py::test_drop_unique_field")
|
||||
assert r.returncode == 0
|
||||
|
||||
# Initial with indexed field and then drop it
|
||||
migrations_dir = Path("migrations/models")
|
||||
shutil.rmtree(migrations_dir)
|
||||
db_file = _get_empty_db()
|
||||
models_py.write_text(MODELS + " age = fields.IntField(db_index=True)")
|
||||
run_aerich("aerich init -t settings.TORTOISE_ORM")
|
||||
run_aerich("aerich init-db")
|
||||
migration_file = list(migrations_dir.glob("0_*.py"))[0]
|
||||
assert "CREATE INDEX" in migration_file.read_text()
|
||||
r = run_shell("pytest _tests.py::test_with_age_field")
|
||||
assert r.returncode == 0
|
||||
models_py.write_text(MODELS)
|
||||
run_aerich("aerich migrate")
|
||||
run_aerich("aerich upgrade")
|
||||
migration_file_1 = list(migrations_dir.glob("1_*.py"))[0]
|
||||
assert "DROP INDEX" in migration_file_1.read_text()
|
||||
r = run_shell("pytest _tests.py::test_without_age_field")
|
||||
assert r.returncode == 0
|
||||
|
||||
# Generate migration file in emptry directory
|
||||
db_file.unlink()
|
||||
run_aerich("aerich init-db")
|
||||
assert not db_file.exists()
|
||||
for p in migrations_dir.glob("*"):
|
||||
if p.is_dir():
|
||||
shutil.rmtree(p)
|
||||
else:
|
||||
p.unlink()
|
||||
run_aerich("aerich init-db")
|
||||
assert db_file.exists()
|
||||
|
||||
# init without '[tool]' section in pyproject.toml
|
||||
config_file = Path("pyproject.toml")
|
||||
config_file.write_text('[project]\nname = "project"')
|
||||
run_aerich("init -t settings.TORTOISE_ORM")
|
||||
assert "[tool.aerich]" in config_file.read_text()
|
||||
|
||||
# add m2m with custom model for through
|
||||
models_py.write_text(MODELS + M2M_WITH_CUSTOM_THROUGH)
|
||||
run_aerich("aerich migrate")
|
||||
run_aerich("aerich upgrade")
|
||||
migration_file_1 = list(migrations_dir.glob("1_*.py"))[0]
|
||||
assert "foo_group" in migration_file_1.read_text()
|
||||
r = run_shell("pytest _tests.py::test_m2m_with_custom_through")
|
||||
assert r.returncode == 0
|
||||
|
||||
# add m2m field after init-db
|
||||
new = """
|
||||
groups = fields.ManyToManyField("models.Group", through="foo_group", related_name="users")
|
||||
|
||||
class Group(Model):
|
||||
name = fields.CharField(max_length=60)
|
||||
"""
|
||||
_get_empty_db()
|
||||
if migrations_dir.exists():
|
||||
shutil.rmtree(migrations_dir)
|
||||
models_py.write_text(MODELS)
|
||||
run_aerich("aerich init-db")
|
||||
models_py.write_text(MODELS + new)
|
||||
run_aerich("aerich migrate")
|
||||
run_aerich("aerich upgrade")
|
||||
migration_file_1 = list(migrations_dir.glob("1_*.py"))[0]
|
||||
assert "foo_group" in migration_file_1.read_text()
|
||||
r = run_shell("pytest _tests.py::test_add_m2m_field_after_init_db")
|
||||
assert r.returncode == 0
|
164
tests/test_utils.py
Normal file
164
tests/test_utils.py
Normal file
@ -0,0 +1,164 @@
|
||||
from aerich.utils import get_dict_diff_by_key, import_py_file
|
||||
|
||||
|
||||
def test_import_py_file() -> None:
|
||||
m = import_py_file("aerich/utils.py")
|
||||
assert getattr(m, "import_py_file", None)
|
||||
|
||||
|
||||
class TestDiffFields:
|
||||
def test_the_same_through_order(self) -> None:
|
||||
old = [
|
||||
{"name": "users", "through": "users_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "members", "through": "users_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert type(get_dict_diff_by_key(old, new)).__name__ == "generator"
|
||||
assert len(diffs) == 1
|
||||
assert diffs == [("change", [0, "name"], ("users", "members"))]
|
||||
|
||||
def test_same_through_with_different_orders(self) -> None:
|
||||
old = [
|
||||
{"name": "users", "through": "users_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
{"name": "members", "through": "users_group"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert len(diffs) == 1
|
||||
assert diffs == [("change", [0, "name"], ("users", "members"))]
|
||||
|
||||
def test_the_same_field_name_order(self) -> None:
|
||||
old = [
|
||||
{"name": "users", "through": "users_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "users", "through": "user_groups"},
|
||||
{"name": "admins", "through": "admin_groups"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert len(diffs) == 4
|
||||
assert diffs == [
|
||||
("remove", "", [(0, {"name": "users", "through": "users_group"})]),
|
||||
("remove", "", [(0, {"name": "admins", "through": "admins_group"})]),
|
||||
("add", "", [(0, {"name": "users", "through": "user_groups"})]),
|
||||
("add", "", [(0, {"name": "admins", "through": "admin_groups"})]),
|
||||
]
|
||||
|
||||
def test_same_field_name_with_different_orders(self) -> None:
|
||||
old = [
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
{"name": "users", "through": "users_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "users", "through": "user_groups"},
|
||||
{"name": "admins", "through": "admin_groups"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert len(diffs) == 4
|
||||
assert diffs == [
|
||||
("remove", "", [(0, {"name": "admins", "through": "admins_group"})]),
|
||||
("remove", "", [(0, {"name": "users", "through": "users_group"})]),
|
||||
("add", "", [(0, {"name": "users", "through": "user_groups"})]),
|
||||
("add", "", [(0, {"name": "admins", "through": "admin_groups"})]),
|
||||
]
|
||||
|
||||
def test_drop_one(self) -> None:
|
||||
old = [
|
||||
{"name": "users", "through": "users_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert len(diffs) == 1
|
||||
assert diffs == [("remove", "", [(0, {"name": "users", "through": "users_group"})])]
|
||||
|
||||
def test_add_one(self) -> None:
|
||||
old = [
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "users", "through": "users_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert len(diffs) == 1
|
||||
assert diffs == [("add", "", [(0, {"name": "users", "through": "users_group"})])]
|
||||
|
||||
def test_drop_some(self) -> None:
|
||||
old = [
|
||||
{"name": "users", "through": "users_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
{"name": "staffs", "through": "staffs_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert len(diffs) == 2
|
||||
assert diffs == [
|
||||
("remove", "", [(0, {"name": "users", "through": "users_group"})]),
|
||||
("remove", "", [(0, {"name": "staffs", "through": "staffs_group"})]),
|
||||
]
|
||||
|
||||
def test_add_some(self) -> None:
|
||||
old = [
|
||||
{"name": "staffs", "through": "staffs_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "users", "through": "users_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
{"name": "staffs", "through": "staffs_group"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert len(diffs) == 2
|
||||
assert diffs == [
|
||||
("add", "", [(0, {"name": "users", "through": "users_group"})]),
|
||||
("add", "", [(0, {"name": "admins", "through": "admins_group"})]),
|
||||
]
|
||||
|
||||
def test_some_through_unchanged(self) -> None:
|
||||
old = [
|
||||
{"name": "staffs", "through": "staffs_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "users", "through": "users_group"},
|
||||
{"name": "admins_new", "through": "admins_group"},
|
||||
{"name": "staffs_new", "through": "staffs_group"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert len(diffs) == 3
|
||||
assert diffs == [
|
||||
("change", [0, "name"], ("staffs", "staffs_new")),
|
||||
("change", [0, "name"], ("admins", "admins_new")),
|
||||
("add", "", [(0, {"name": "users", "through": "users_group"})]),
|
||||
]
|
||||
|
||||
def test_some_unchanged_without_drop_or_add(self) -> None:
|
||||
old = [
|
||||
{"name": "staffs", "through": "staffs_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
{"name": "users", "through": "users_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "users_new", "through": "users_group"},
|
||||
{"name": "admins_new", "through": "admins_group"},
|
||||
{"name": "staffs_new", "through": "staffs_group"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert len(diffs) == 3
|
||||
assert diffs == [
|
||||
("change", [0, "name"], ("staffs", "staffs_new")),
|
||||
("change", [0, "name"], ("admins", "admins_new")),
|
||||
("change", [0, "name"], ("users", "users_new")),
|
||||
]
|
Loading…
x
Reference in New Issue
Block a user