Compare commits
1 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
40d0823c01 |
81
.github/workflows/ci.yml
vendored
81
.github/workflows/ci.yml
vendored
@ -18,67 +18,17 @@ jobs:
|
|||||||
POSTGRES_PASSWORD: 123456
|
POSTGRES_PASSWORD: 123456
|
||||||
POSTGRES_USER: postgres
|
POSTGRES_USER: postgres
|
||||||
options: --health-cmd=pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
|
options: --health-cmd=pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
|
|
||||||
tortoise-orm:
|
|
||||||
- tortoise021
|
|
||||||
- tortoise022
|
|
||||||
- tortoise023
|
|
||||||
- tortoise024
|
|
||||||
# TODO: add dev back when drop python3.8 support
|
|
||||||
# - tortoisedev
|
|
||||||
steps:
|
steps:
|
||||||
- name: Start MySQL
|
- name: Start MySQL
|
||||||
run: sudo systemctl start mysql.service
|
run: sudo systemctl start mysql.service
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v2
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: '3.x'
|
||||||
- uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: ~/.cache/pip
|
|
||||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/poetry.lock') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-pip-
|
|
||||||
- name: Install and configure Poetry
|
- name: Install and configure Poetry
|
||||||
run: |
|
run: |
|
||||||
pip install -U pip
|
pip install -U pip poetry
|
||||||
if [[ "${{ matrix.python-version }}" == "3.8" ]]; then
|
poetry config virtualenvs.create false
|
||||||
# poetry2.0+ does not support installed by python3.8, but can manage project using py38
|
|
||||||
python3.12 -m pip install "poetry>=2.0"
|
|
||||||
else
|
|
||||||
pip install "poetry>=2.0"
|
|
||||||
fi
|
|
||||||
poetry env use python${{ matrix.python-version }}
|
|
||||||
- name: Install dependencies and check style
|
|
||||||
run: poetry run make check
|
|
||||||
- name: Install TortoiseORM v0.21
|
|
||||||
if: matrix.tortoise-orm == 'tortoise021'
|
|
||||||
run: poetry run pip install --upgrade "tortoise-orm>=0.21,<0.22"
|
|
||||||
- name: Install TortoiseORM v0.22
|
|
||||||
if: matrix.tortoise-orm == 'tortoise022'
|
|
||||||
run: poetry run pip install --upgrade "tortoise-orm>=0.22,<0.23"
|
|
||||||
- name: Install TortoiseORM v0.23
|
|
||||||
if: matrix.tortoise-orm == 'tortoise023'
|
|
||||||
run: poetry run pip install --upgrade "tortoise-orm>=0.23,<0.24"
|
|
||||||
- name: Install TortoiseORM v0.24
|
|
||||||
if: matrix.tortoise-orm == 'tortoise024'
|
|
||||||
run: |
|
|
||||||
if [[ "${{ matrix.python-version }}" == "3.8" ]]; then
|
|
||||||
echo "Skip test for tortoise v0.24 as it does not support Python3.8"
|
|
||||||
else
|
|
||||||
poetry run pip install --upgrade "tortoise-orm>=0.24,<0.25"
|
|
||||||
fi
|
|
||||||
- name: Install TortoiseORM develop branch
|
|
||||||
if: matrix.tortoise-orm == 'tortoisedev'
|
|
||||||
run: |
|
|
||||||
if [[ "${{ matrix.python-version }}" == "3.8" ]]; then
|
|
||||||
echo "Skip test for tortoise develop branch as it does not support Python3.8"
|
|
||||||
else
|
|
||||||
poetry run pip uninstall -y tortoise-orm
|
|
||||||
poetry run pip install --upgrade "git+https://github.com/tortoise/tortoise-orm"
|
|
||||||
fi
|
|
||||||
- name: CI
|
- name: CI
|
||||||
env:
|
env:
|
||||||
MYSQL_PASS: root
|
MYSQL_PASS: root
|
||||||
@ -87,23 +37,4 @@ jobs:
|
|||||||
POSTGRES_PASS: 123456
|
POSTGRES_PASS: 123456
|
||||||
POSTGRES_HOST: 127.0.0.1
|
POSTGRES_HOST: 127.0.0.1
|
||||||
POSTGRES_PORT: 5432
|
POSTGRES_PORT: 5432
|
||||||
run: poetry run make _testall
|
run: make ci
|
||||||
- name: Verify aiomysql support
|
|
||||||
# Only check the latest version of tortoise
|
|
||||||
if: matrix.tortoise-orm == 'tortoise024'
|
|
||||||
run: |
|
|
||||||
poetry run pip uninstall -y asyncmy
|
|
||||||
poetry run make test_mysql
|
|
||||||
poetry run pip install asyncmy
|
|
||||||
env:
|
|
||||||
MYSQL_PASS: root
|
|
||||||
MYSQL_HOST: 127.0.0.1
|
|
||||||
MYSQL_PORT: 3306
|
|
||||||
- name: Verify psycopg support
|
|
||||||
# Only check the latest version of tortoise
|
|
||||||
if: matrix.tortoise-orm == 'tortoise024'
|
|
||||||
run: poetry run make test_psycopg
|
|
||||||
env:
|
|
||||||
POSTGRES_PASS: 123456
|
|
||||||
POSTGRES_HOST: 127.0.0.1
|
|
||||||
POSTGRES_PORT: 5432
|
|
||||||
|
4
.github/workflows/pypi.yml
vendored
4
.github/workflows/pypi.yml
vendored
@ -7,8 +7,8 @@ jobs:
|
|||||||
publish:
|
publish:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v2
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
- name: Install and configure Poetry
|
- name: Install and configure Poetry
|
||||||
|
92
CHANGELOG.md
92
CHANGELOG.md
@ -1,98 +1,8 @@
|
|||||||
# ChangeLog
|
# ChangeLog
|
||||||
|
|
||||||
## 0.8
|
|
||||||
|
|
||||||
### [0.8.3]**(Unreleased)**
|
|
||||||
|
|
||||||
#### Fixed
|
|
||||||
- fix: `aerich init-db` process is suspended. ([#435])
|
|
||||||
|
|
||||||
[#435]: https://github.com/tortoise/aerich/pull/435
|
|
||||||
|
|
||||||
### [0.8.2](../../releases/tag/v0.8.2) - 2025-02-28
|
|
||||||
|
|
||||||
#### Added
|
|
||||||
- Support changes `max_length` or int type for primary key field. ([#428])
|
|
||||||
- feat: support psycopg. ([#425])
|
|
||||||
- Support run `poetry add aerich` in project that inited by poetry v2. ([#424])
|
|
||||||
- feat: support command `python -m aerich`. ([#417])
|
|
||||||
- feat: add --fake to upgrade/downgrade. ([#398])
|
|
||||||
- Support ignore table by settings `managed=False` in `Meta` class. ([#397])
|
|
||||||
|
|
||||||
#### Fixed
|
|
||||||
- fix: aerich migrate raises tortoise.exceptions.FieldError when `index.INDEX_TYPE` is not empty. ([#415])
|
|
||||||
- No migration occurs as expected when adding `unique=True` to indexed field. ([#404])
|
|
||||||
- fix: inspectdb raise KeyError 'int2' for smallint. ([#401])
|
|
||||||
- fix: inspectdb not match data type 'DOUBLE' and 'CHAR' for MySQL. ([#187])
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Refactored version management to use `importlib.metadata.version(__package__)` instead of hardcoded version string ([#412])
|
|
||||||
|
|
||||||
[#397]: https://github.com/tortoise/aerich/pull/397
|
|
||||||
[#398]: https://github.com/tortoise/aerich/pull/398
|
|
||||||
[#401]: https://github.com/tortoise/aerich/pull/401
|
|
||||||
[#404]: https://github.com/tortoise/aerich/pull/404
|
|
||||||
[#412]: https://github.com/tortoise/aerich/pull/412
|
|
||||||
[#415]: https://github.com/tortoise/aerich/pull/415
|
|
||||||
[#417]: https://github.com/tortoise/aerich/pull/417
|
|
||||||
[#424]: https://github.com/tortoise/aerich/pull/424
|
|
||||||
[#425]: https://github.com/tortoise/aerich/pull/425
|
|
||||||
|
|
||||||
### [0.8.1](../../releases/tag/v0.8.1) - 2024-12-27
|
|
||||||
|
|
||||||
#### Fixed
|
|
||||||
- fix: add o2o field does not create constraint when migrating. ([#396])
|
|
||||||
- Migration with duplicate renaming of columns in some cases. ([#395])
|
|
||||||
- fix: intermediate table for m2m relation not created. ([#394])
|
|
||||||
- Migrate add m2m field with custom through generate duplicated table. ([#393])
|
|
||||||
- Migrate drop the wrong m2m field when model have multi m2m fields. ([#376])
|
|
||||||
- KeyError raised when removing or renaming an existing model. ([#386])
|
|
||||||
- fix: error when there is `__init__.py` in the migration folder. ([#272])
|
|
||||||
- Setting null=false on m2m field causes migration to fail. ([#334])
|
|
||||||
- Fix NonExistentKey when running `aerich init` without `[tool]` section in config file. ([#284])
|
|
||||||
- Fix configuration file reading error when containing Chinese characters. ([#286])
|
|
||||||
- sqlite: failed to create/drop index. ([#302])
|
|
||||||
- PostgreSQL: Cannot drop constraint after deleting or rename FK on a model. ([#378])
|
|
||||||
- Fix create/drop indexes in every migration. ([#377])
|
|
||||||
- Sort m2m fields before comparing them with diff. ([#271])
|
|
||||||
|
|
||||||
#### Changed
|
|
||||||
- Allow run `aerich init-db` with empty migration directories instead of abort with warnings. ([#286])
|
|
||||||
- Add version constraint(>=0.21) for tortoise-orm. ([#388])
|
|
||||||
- Move `tomlkit` to optional and support `pip install aerich[toml]`. ([#392])
|
|
||||||
|
|
||||||
[#396]: https://github.com/tortoise/aerich/pull/396
|
|
||||||
[#395]: https://github.com/tortoise/aerich/pull/395
|
|
||||||
[#394]: https://github.com/tortoise/aerich/pull/394
|
|
||||||
[#393]: https://github.com/tortoise/aerich/pull/393
|
|
||||||
[#392]: https://github.com/tortoise/aerich/pull/392
|
|
||||||
[#388]: https://github.com/tortoise/aerich/pull/388
|
|
||||||
[#386]: https://github.com/tortoise/aerich/pull/386
|
|
||||||
[#378]: https://github.com/tortoise/aerich/pull/378
|
|
||||||
[#377]: https://github.com/tortoise/aerich/pull/377
|
|
||||||
[#376]: https://github.com/tortoise/aerich/pull/376
|
|
||||||
[#334]: https://github.com/tortoise/aerich/pull/334
|
|
||||||
[#302]: https://github.com/tortoise/aerich/pull/302
|
|
||||||
[#286]: https://github.com/tortoise/aerich/pull/286
|
|
||||||
[#284]: https://github.com/tortoise/aerich/pull/284
|
|
||||||
[#272]: https://github.com/tortoise/aerich/pull/272
|
|
||||||
[#271]: https://github.com/tortoise/aerich/pull/271
|
|
||||||
|
|
||||||
### [0.8.0](../../releases/tag/v0.8.0) - 2024-12-04
|
|
||||||
|
|
||||||
- Fix the issue of parameter concatenation when generating ORM with inspectdb (#331)
|
|
||||||
- Fix KeyError when deleting a field with unqiue=True. (#364)
|
|
||||||
- Correct the click import. (#360)
|
|
||||||
- Improve CLI help text and output. (#355)
|
|
||||||
- Fix mysql drop unique index raises OperationalError. (#346)
|
|
||||||
|
|
||||||
**Upgrade note:**
|
|
||||||
1. Use column name as unique key name for mysql
|
|
||||||
2. Drop support for Python3.7
|
|
||||||
|
|
||||||
## 0.7
|
## 0.7
|
||||||
|
|
||||||
### [0.7.2](../../releases/tag/v0.7.2) - 2023-07-20
|
### 0.7.2
|
||||||
|
|
||||||
- Support virtual fields.
|
- Support virtual fields.
|
||||||
- Fix modify multiple times. (#279)
|
- Fix modify multiple times. (#279)
|
||||||
|
44
Makefile
44
Makefile
@ -1,43 +1,32 @@
|
|||||||
checkfiles = aerich/ tests/ conftest.py
|
checkfiles = aerich/ tests/ conftest.py
|
||||||
|
black_opts = -l 100 -t py38
|
||||||
py_warn = PYTHONDEVMODE=1
|
py_warn = PYTHONDEVMODE=1
|
||||||
MYSQL_HOST ?= "127.0.0.1"
|
MYSQL_HOST ?= "127.0.0.1"
|
||||||
MYSQL_PORT ?= 3306
|
MYSQL_PORT ?= 3306
|
||||||
MYSQL_PASS ?= "123456"
|
MYSQL_PASS ?= "123456"
|
||||||
POSTGRES_HOST ?= "127.0.0.1"
|
POSTGRES_HOST ?= "127.0.0.1"
|
||||||
POSTGRES_PORT ?= 5432
|
POSTGRES_PORT ?= 5432
|
||||||
POSTGRES_PASS ?= 123456
|
POSTGRES_PASS ?= "123456"
|
||||||
|
|
||||||
up:
|
up:
|
||||||
@poetry update
|
@poetry update
|
||||||
|
|
||||||
deps:
|
deps:
|
||||||
@poetry install --all-extras --all-groups
|
@poetry install -E asyncpg -E asyncmy
|
||||||
|
|
||||||
_style:
|
style: deps
|
||||||
@ruff check --fix $(checkfiles)
|
@isort -src $(checkfiles)
|
||||||
@ruff format $(checkfiles)
|
@black $(black_opts) $(checkfiles)
|
||||||
style: deps _style
|
|
||||||
|
|
||||||
_check:
|
check: deps
|
||||||
@ruff format --check $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false)
|
@black --check $(black_opts) $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false)
|
||||||
@ruff check $(checkfiles)
|
@ruff $(checkfiles)
|
||||||
@mypy $(checkfiles)
|
|
||||||
@bandit -r aerich
|
|
||||||
check: deps _check
|
|
||||||
|
|
||||||
_lint: _build
|
|
||||||
@ruff format $(checkfiles)
|
|
||||||
ruff check --fix $(checkfiles)
|
|
||||||
mypy $(checkfiles)
|
|
||||||
bandit -c pyproject.toml -r $(checkfiles)
|
|
||||||
twine check dist/*
|
|
||||||
lint: deps _lint
|
|
||||||
|
|
||||||
test: deps
|
test: deps
|
||||||
$(py_warn) TEST_DB=sqlite://:memory: pytest
|
$(py_warn) TEST_DB=sqlite://:memory: py.test
|
||||||
|
|
||||||
test_sqlite:
|
test_sqlite:
|
||||||
$(py_warn) TEST_DB=sqlite://:memory: pytest
|
$(py_warn) TEST_DB=sqlite://:memory: py.test
|
||||||
|
|
||||||
test_mysql:
|
test_mysql:
|
||||||
$(py_warn) TEST_DB="mysql://root:$(MYSQL_PASS)@$(MYSQL_HOST):$(MYSQL_PORT)/test_\{\}" pytest -vv -s
|
$(py_warn) TEST_DB="mysql://root:$(MYSQL_PASS)@$(MYSQL_HOST):$(MYSQL_PORT)/test_\{\}" pytest -vv -s
|
||||||
@ -45,14 +34,9 @@ test_mysql:
|
|||||||
test_postgres:
|
test_postgres:
|
||||||
$(py_warn) TEST_DB="postgres://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" pytest -vv -s
|
$(py_warn) TEST_DB="postgres://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" pytest -vv -s
|
||||||
|
|
||||||
test_psycopg:
|
testall: deps test_sqlite test_postgres test_mysql
|
||||||
$(py_warn) TEST_DB="psycopg://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" pytest -vv -s
|
|
||||||
|
|
||||||
_testall: test_sqlite test_postgres test_mysql
|
build: deps
|
||||||
testall: deps _testall
|
|
||||||
|
|
||||||
_build:
|
|
||||||
@poetry build
|
@poetry build
|
||||||
build: deps _build
|
|
||||||
|
|
||||||
ci: build _check _testall
|
ci: check testall
|
||||||
|
67
README.md
67
README.md
@ -17,7 +17,7 @@ it\'s own migration solution.
|
|||||||
Just install from pypi:
|
Just install from pypi:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
pip install "aerich[toml]"
|
pip install aerich
|
||||||
```
|
```
|
||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
@ -46,7 +46,7 @@ Commands:
|
|||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
You need to add `aerich.models` to your `Tortoise-ORM` config first. Example:
|
You need add `aerich.models` to your `Tortoise-ORM` config first. Example:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
TORTOISE_ORM = {
|
TORTOISE_ORM = {
|
||||||
@ -113,14 +113,6 @@ If `aerich` guesses you are renaming a column, it will ask `Rename {old_column}
|
|||||||
`True` to rename column without column drop, or choose `False` to drop the column then create. Note that the latter may
|
`True` to rename column without column drop, or choose `False` to drop the column then create. Note that the latter may
|
||||||
lose data.
|
lose data.
|
||||||
|
|
||||||
If you need to manually write migration, you could generate empty file:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
> aerich migrate --name add_index --empty
|
|
||||||
|
|
||||||
Success migrate 1_202326122220101229_add_index.py
|
|
||||||
```
|
|
||||||
|
|
||||||
### Upgrade to latest version
|
### Upgrade to latest version
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
@ -226,14 +218,14 @@ from tortoise import Model, fields
|
|||||||
|
|
||||||
|
|
||||||
class Test(Model):
|
class Test(Model):
|
||||||
date = fields.DateField(null=True)
|
date = fields.DateField(null=True, )
|
||||||
datetime = fields.DatetimeField(auto_now=True)
|
datetime = fields.DatetimeField(auto_now=True, )
|
||||||
decimal = fields.DecimalField(max_digits=10, decimal_places=2)
|
decimal = fields.DecimalField(max_digits=10, decimal_places=2, )
|
||||||
float = fields.FloatField(null=True)
|
float = fields.FloatField(null=True, )
|
||||||
id = fields.IntField(primary_key=True)
|
id = fields.IntField(pk=True, )
|
||||||
string = fields.CharField(max_length=200, null=True)
|
string = fields.CharField(max_length=200, null=True, )
|
||||||
time = fields.TimeField(null=True)
|
time = fields.TimeField(null=True, )
|
||||||
tinyint = fields.BooleanField(null=True)
|
tinyint = fields.BooleanField(null=True, )
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that this command is limited and can't infer some fields, such as `IntEnumField`, `ForeignKeyField`, and others.
|
Note that this command is limited and can't infer some fields, such as `IntEnumField`, `ForeignKeyField`, and others.
|
||||||
@ -243,8 +235,8 @@ Note that this command is limited and can't infer some fields, such as `IntEnumF
|
|||||||
```python
|
```python
|
||||||
tortoise_orm = {
|
tortoise_orm = {
|
||||||
"connections": {
|
"connections": {
|
||||||
"default": "postgres://postgres_user:postgres_pass@127.0.0.1:5432/db1",
|
"default": expand_db_url(db_url, True),
|
||||||
"second": "postgres://postgres_user:postgres_pass@127.0.0.1:5432/db2",
|
"second": expand_db_url(db_url_second, True),
|
||||||
},
|
},
|
||||||
"apps": {
|
"apps": {
|
||||||
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"},
|
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"},
|
||||||
@ -253,7 +245,7 @@ tortoise_orm = {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
You only need to specify `aerich.models` in one app, and must specify `--app` when running `aerich migrate` and so on, e.g. `aerich --app models_second migrate`.
|
You only need to specify `aerich.models` in one app, and must specify `--app` when running `aerich migrate` and so on.
|
||||||
|
|
||||||
## Restore `aerich` workflow
|
## Restore `aerich` workflow
|
||||||
|
|
||||||
@ -273,38 +265,11 @@ You can use `aerich` out of cli by use `Command` class.
|
|||||||
```python
|
```python
|
||||||
from aerich import Command
|
from aerich import Command
|
||||||
|
|
||||||
async with Command(tortoise_config=config, app='models') as command:
|
command = Command(tortoise_config=config, app='models')
|
||||||
await command.migrate('test')
|
await command.init()
|
||||||
await command.upgrade()
|
await command.migrate('test')
|
||||||
```
|
```
|
||||||
|
|
||||||
## Upgrade/Downgrade with `--fake` option
|
|
||||||
|
|
||||||
Marks the migrations up to the latest one(or back to the target one) as applied, but without actually running the SQL to change your database schema.
|
|
||||||
|
|
||||||
- Upgrade
|
|
||||||
|
|
||||||
```bash
|
|
||||||
aerich upgrade --fake
|
|
||||||
aerich --app models upgrade --fake
|
|
||||||
```
|
|
||||||
- Downgrade
|
|
||||||
|
|
||||||
```bash
|
|
||||||
aerich downgrade --fake -v 2
|
|
||||||
aerich --app models downgrade --fake -v 2
|
|
||||||
```
|
|
||||||
|
|
||||||
### Ignore tables
|
|
||||||
|
|
||||||
You can tell aerich to ignore table by setting `managed=False` in the `Meta` class, e.g.:
|
|
||||||
```py
|
|
||||||
class MyModel(Model):
|
|
||||||
class Meta:
|
|
||||||
managed = False
|
|
||||||
```
|
|
||||||
**Note** `managed=False` does not recognized by `tortoise-orm` and `aerich init-db`, it is only for `aerich migrate`.
|
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
This project is licensed under the
|
This project is licensed under the
|
||||||
|
@ -1,13 +1,8 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import platform
|
|
||||||
from contextlib import AbstractAsyncContextManager
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING
|
from typing import List
|
||||||
|
|
||||||
import tortoise
|
from tortoise import Tortoise, generate_schema_for_client
|
||||||
from tortoise import Tortoise, connections, generate_schema_for_client
|
|
||||||
from tortoise.exceptions import OperationalError
|
from tortoise.exceptions import OperationalError
|
||||||
from tortoise.transactions import in_transaction
|
from tortoise.transactions import in_transaction
|
||||||
from tortoise.utils import get_schema_sql
|
from tortoise.utils import get_schema_sql
|
||||||
@ -25,147 +20,26 @@ from aerich.utils import (
|
|||||||
import_py_file,
|
import_py_file,
|
||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from tortoise import Model
|
|
||||||
from tortoise.fields.relational import ManyToManyFieldInstance # NOQA:F401
|
|
||||||
|
|
||||||
from aerich.inspectdb import Inspect
|
class Command:
|
||||||
|
|
||||||
|
|
||||||
def _init_asyncio_patch():
|
|
||||||
"""
|
|
||||||
Select compatible event loop for psycopg3.
|
|
||||||
|
|
||||||
As of Python 3.8+, the default event loop on Windows is `proactor`,
|
|
||||||
however psycopg3 requires the old default "selector" event loop.
|
|
||||||
See https://www.psycopg.org/psycopg3/docs/advanced/async.html
|
|
||||||
"""
|
|
||||||
if platform.system() == "Windows":
|
|
||||||
try:
|
|
||||||
from asyncio import WindowsSelectorEventLoopPolicy # type:ignore
|
|
||||||
except ImportError:
|
|
||||||
pass # Can't assign a policy which doesn't exist.
|
|
||||||
else:
|
|
||||||
from asyncio import get_event_loop_policy, set_event_loop_policy
|
|
||||||
|
|
||||||
if not isinstance(get_event_loop_policy(), WindowsSelectorEventLoopPolicy):
|
|
||||||
set_event_loop_policy(WindowsSelectorEventLoopPolicy())
|
|
||||||
|
|
||||||
|
|
||||||
def _init_tortoise_0_24_1_patch():
|
|
||||||
# this patch is for "tortoise-orm==0.24.1" to fix:
|
|
||||||
# https://github.com/tortoise/tortoise-orm/issues/1893
|
|
||||||
if tortoise.__version__ != "0.24.1":
|
|
||||||
return
|
|
||||||
from tortoise.backends.base.schema_generator import BaseSchemaGenerator, cast, re
|
|
||||||
|
|
||||||
def _get_m2m_tables(
|
|
||||||
self, model: type[Model], db_table: str, safe: bool, models_tables: list[str]
|
|
||||||
) -> list[str]: # Copied from tortoise-orm
|
|
||||||
m2m_tables_for_create = []
|
|
||||||
for m2m_field in model._meta.m2m_fields:
|
|
||||||
field_object = cast("ManyToManyFieldInstance", model._meta.fields_map[m2m_field])
|
|
||||||
if field_object._generated or field_object.through in models_tables:
|
|
||||||
continue
|
|
||||||
backward_key, forward_key = field_object.backward_key, field_object.forward_key
|
|
||||||
if field_object.db_constraint:
|
|
||||||
backward_fk = self._create_fk_string(
|
|
||||||
"",
|
|
||||||
backward_key,
|
|
||||||
db_table,
|
|
||||||
model._meta.db_pk_column,
|
|
||||||
field_object.on_delete,
|
|
||||||
"",
|
|
||||||
)
|
|
||||||
forward_fk = self._create_fk_string(
|
|
||||||
"",
|
|
||||||
forward_key,
|
|
||||||
field_object.related_model._meta.db_table,
|
|
||||||
field_object.related_model._meta.db_pk_column,
|
|
||||||
field_object.on_delete,
|
|
||||||
"",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
backward_fk = forward_fk = ""
|
|
||||||
exists = "IF NOT EXISTS " if safe else ""
|
|
||||||
through_table_name = field_object.through
|
|
||||||
backward_type = self._get_pk_field_sql_type(model._meta.pk)
|
|
||||||
forward_type = self._get_pk_field_sql_type(field_object.related_model._meta.pk)
|
|
||||||
comment = ""
|
|
||||||
if desc := field_object.description:
|
|
||||||
comment = self._table_comment_generator(table=through_table_name, comment=desc)
|
|
||||||
m2m_create_string = self.M2M_TABLE_TEMPLATE.format(
|
|
||||||
exists=exists,
|
|
||||||
table_name=through_table_name,
|
|
||||||
backward_fk=backward_fk,
|
|
||||||
forward_fk=forward_fk,
|
|
||||||
backward_key=backward_key,
|
|
||||||
backward_type=backward_type,
|
|
||||||
forward_key=forward_key,
|
|
||||||
forward_type=forward_type,
|
|
||||||
extra=self._table_generate_extra(table=field_object.through),
|
|
||||||
comment=comment,
|
|
||||||
)
|
|
||||||
if not field_object.db_constraint:
|
|
||||||
m2m_create_string = m2m_create_string.replace(
|
|
||||||
""",
|
|
||||||
,
|
|
||||||
""",
|
|
||||||
"",
|
|
||||||
) # may have better way
|
|
||||||
m2m_create_string += self._post_table_hook()
|
|
||||||
if getattr(field_object, "create_unique_index", field_object.unique):
|
|
||||||
unique_index_create_sql = self._get_unique_index_sql(
|
|
||||||
exists, through_table_name, [backward_key, forward_key]
|
|
||||||
)
|
|
||||||
if unique_index_create_sql.endswith(";"):
|
|
||||||
m2m_create_string += "\n" + unique_index_create_sql
|
|
||||||
else:
|
|
||||||
lines = m2m_create_string.splitlines()
|
|
||||||
lines[-2] += ","
|
|
||||||
indent = m.group() if (m := re.match(r"\s+", lines[-2])) else ""
|
|
||||||
lines.insert(-1, indent + unique_index_create_sql)
|
|
||||||
m2m_create_string = "\n".join(lines)
|
|
||||||
m2m_tables_for_create.append(m2m_create_string)
|
|
||||||
return m2m_tables_for_create
|
|
||||||
|
|
||||||
setattr(BaseSchemaGenerator, "_get_m2m_tables", _get_m2m_tables)
|
|
||||||
|
|
||||||
|
|
||||||
_init_asyncio_patch()
|
|
||||||
_init_tortoise_0_24_1_patch()
|
|
||||||
|
|
||||||
|
|
||||||
class Command(AbstractAsyncContextManager):
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
tortoise_config: dict,
|
tortoise_config: dict,
|
||||||
app: str = "models",
|
app: str = "models",
|
||||||
location: str = "./migrations",
|
location: str = "./migrations",
|
||||||
) -> None:
|
):
|
||||||
self.tortoise_config = tortoise_config
|
self.tortoise_config = tortoise_config
|
||||||
self.app = app
|
self.app = app
|
||||||
self.location = location
|
self.location = location
|
||||||
Migrate.app = app
|
Migrate.app = app
|
||||||
|
|
||||||
async def init(self) -> None:
|
async def init(self):
|
||||||
await Migrate.init(self.tortoise_config, self.app, self.location)
|
await Migrate.init(self.tortoise_config, self.app, self.location)
|
||||||
|
|
||||||
async def __aenter__(self) -> Command:
|
async def _upgrade(self, conn, version_file):
|
||||||
await self.init()
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def close(self) -> None:
|
|
||||||
await connections.close_all()
|
|
||||||
|
|
||||||
async def __aexit__(self, *args, **kw) -> None:
|
|
||||||
await self.close()
|
|
||||||
|
|
||||||
async def _upgrade(self, conn, version_file, fake: bool = False) -> None:
|
|
||||||
file_path = Path(Migrate.migrate_location, version_file)
|
file_path = Path(Migrate.migrate_location, version_file)
|
||||||
m = import_py_file(file_path)
|
m = import_py_file(file_path)
|
||||||
upgrade = m.upgrade
|
upgrade = getattr(m, "upgrade")
|
||||||
if not fake:
|
|
||||||
await conn.execute_script(await upgrade(conn))
|
await conn.execute_script(await upgrade(conn))
|
||||||
await Aerich.create(
|
await Aerich.create(
|
||||||
version=version_file,
|
version=version_file,
|
||||||
@ -173,7 +47,7 @@ class Command(AbstractAsyncContextManager):
|
|||||||
content=get_models_describe(self.app),
|
content=get_models_describe(self.app),
|
||||||
)
|
)
|
||||||
|
|
||||||
async def upgrade(self, run_in_transaction: bool = True, fake: bool = False) -> list[str]:
|
async def upgrade(self, run_in_transaction: bool = True):
|
||||||
migrated = []
|
migrated = []
|
||||||
for version_file in Migrate.get_all_version_files():
|
for version_file in Migrate.get_all_version_files():
|
||||||
try:
|
try:
|
||||||
@ -184,15 +58,15 @@ class Command(AbstractAsyncContextManager):
|
|||||||
app_conn_name = get_app_connection_name(self.tortoise_config, self.app)
|
app_conn_name = get_app_connection_name(self.tortoise_config, self.app)
|
||||||
if run_in_transaction:
|
if run_in_transaction:
|
||||||
async with in_transaction(app_conn_name) as conn:
|
async with in_transaction(app_conn_name) as conn:
|
||||||
await self._upgrade(conn, version_file, fake=fake)
|
await self._upgrade(conn, version_file)
|
||||||
else:
|
else:
|
||||||
app_conn = get_app_connection(self.tortoise_config, self.app)
|
app_conn = get_app_connection(self.tortoise_config, self.app)
|
||||||
await self._upgrade(app_conn, version_file, fake=fake)
|
await self._upgrade(app_conn, version_file)
|
||||||
migrated.append(version_file)
|
migrated.append(version_file)
|
||||||
return migrated
|
return migrated
|
||||||
|
|
||||||
async def downgrade(self, version: int, delete: bool, fake: bool = False) -> list[str]:
|
async def downgrade(self, version: int, delete: bool):
|
||||||
ret: list[str] = []
|
ret = []
|
||||||
if version == -1:
|
if version == -1:
|
||||||
specified_version = await Migrate.get_last_version()
|
specified_version = await Migrate.get_last_version()
|
||||||
else:
|
else:
|
||||||
@ -205,26 +79,25 @@ class Command(AbstractAsyncContextManager):
|
|||||||
versions = [specified_version]
|
versions = [specified_version]
|
||||||
else:
|
else:
|
||||||
versions = await Aerich.filter(app=self.app, pk__gte=specified_version.pk)
|
versions = await Aerich.filter(app=self.app, pk__gte=specified_version.pk)
|
||||||
for version_obj in versions:
|
for version in versions:
|
||||||
file = version_obj.version
|
file = version.version
|
||||||
async with in_transaction(
|
async with in_transaction(
|
||||||
get_app_connection_name(self.tortoise_config, self.app)
|
get_app_connection_name(self.tortoise_config, self.app)
|
||||||
) as conn:
|
) as conn:
|
||||||
file_path = Path(Migrate.migrate_location, file)
|
file_path = Path(Migrate.migrate_location, file)
|
||||||
m = import_py_file(file_path)
|
m = import_py_file(file_path)
|
||||||
downgrade = m.downgrade
|
downgrade = getattr(m, "downgrade")
|
||||||
downgrade_sql = await downgrade(conn)
|
downgrade_sql = await downgrade(conn)
|
||||||
if not downgrade_sql.strip():
|
if not downgrade_sql.strip():
|
||||||
raise DowngradeError("No downgrade items found")
|
raise DowngradeError("No downgrade items found")
|
||||||
if not fake:
|
|
||||||
await conn.execute_script(downgrade_sql)
|
await conn.execute_script(downgrade_sql)
|
||||||
await version_obj.delete()
|
await version.delete()
|
||||||
if delete:
|
if delete:
|
||||||
os.unlink(file_path)
|
os.unlink(file_path)
|
||||||
ret.append(file)
|
ret.append(file)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
async def heads(self) -> list[str]:
|
async def heads(self):
|
||||||
ret = []
|
ret = []
|
||||||
versions = Migrate.get_all_version_files()
|
versions = Migrate.get_all_version_files()
|
||||||
for version in versions:
|
for version in versions:
|
||||||
@ -232,15 +105,15 @@ class Command(AbstractAsyncContextManager):
|
|||||||
ret.append(version)
|
ret.append(version)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
async def history(self) -> list[str]:
|
async def history(self):
|
||||||
versions = Migrate.get_all_version_files()
|
versions = Migrate.get_all_version_files()
|
||||||
return [version for version in versions]
|
return [version for version in versions]
|
||||||
|
|
||||||
async def inspectdb(self, tables: list[str] | None = None) -> str:
|
async def inspectdb(self, tables: List[str] = None) -> str:
|
||||||
connection = get_app_connection(self.tortoise_config, self.app)
|
connection = get_app_connection(self.tortoise_config, self.app)
|
||||||
dialect = connection.schema_generator.DIALECT
|
dialect = connection.schema_generator.DIALECT
|
||||||
if dialect == "mysql":
|
if dialect == "mysql":
|
||||||
cls: type[Inspect] = InspectMySQL
|
cls = InspectMySQL
|
||||||
elif dialect == "postgres":
|
elif dialect == "postgres":
|
||||||
cls = InspectPostgres
|
cls = InspectPostgres
|
||||||
elif dialect == "sqlite":
|
elif dialect == "sqlite":
|
||||||
@ -250,19 +123,14 @@ class Command(AbstractAsyncContextManager):
|
|||||||
inspect = cls(connection, tables)
|
inspect = cls(connection, tables)
|
||||||
return await inspect.inspect()
|
return await inspect.inspect()
|
||||||
|
|
||||||
async def migrate(self, name: str = "update", empty: bool = False) -> str:
|
async def migrate(self, name: str = "update"):
|
||||||
return await Migrate.migrate(name, empty)
|
return await Migrate.migrate(name)
|
||||||
|
|
||||||
async def init_db(self, safe: bool) -> None:
|
async def init_db(self, safe: bool):
|
||||||
location = self.location
|
location = self.location
|
||||||
app = self.app
|
app = self.app
|
||||||
dirname = Path(location, app)
|
dirname = Path(location, app)
|
||||||
if not dirname.exists():
|
|
||||||
dirname.mkdir(parents=True)
|
dirname.mkdir(parents=True)
|
||||||
else:
|
|
||||||
# If directory is empty, go ahead, otherwise raise FileExistsError
|
|
||||||
for unexpected_file in dirname.glob("*"):
|
|
||||||
raise FileExistsError(str(unexpected_file))
|
|
||||||
|
|
||||||
await Tortoise.init(config=self.tortoise_config)
|
await Tortoise.init(config=self.tortoise_config)
|
||||||
connection = get_app_connection(self.tortoise_config, app)
|
connection = get_app_connection(self.tortoise_config, app)
|
||||||
|
@ -1,3 +0,0 @@
|
|||||||
from .cli import main
|
|
||||||
|
|
||||||
main()
|
|
@ -1,28 +0,0 @@
|
|||||||
# mypy: disable-error-code="no-redef"
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import sys
|
|
||||||
from types import ModuleType
|
|
||||||
|
|
||||||
import tortoise
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 11):
|
|
||||||
import tomllib
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
import tomli as tomllib
|
|
||||||
except ImportError:
|
|
||||||
import tomlkit as tomllib
|
|
||||||
|
|
||||||
|
|
||||||
def imports_tomlkit() -> ModuleType:
|
|
||||||
try:
|
|
||||||
import tomli_w as tomlkit
|
|
||||||
except ImportError:
|
|
||||||
import tomlkit
|
|
||||||
return tomlkit
|
|
||||||
|
|
||||||
|
|
||||||
def tortoise_version_less_than(version: str) -> bool:
|
|
||||||
# The min version of tortoise is '0.11.0', so we can compare it by a `<`,
|
|
||||||
return tortoise.__version__ < version
|
|
203
aerich/cli.py
203
aerich/cli.py
@ -1,14 +1,16 @@
|
|||||||
from __future__ import annotations
|
import asyncio
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from functools import wraps
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import cast
|
from typing import List
|
||||||
|
|
||||||
import asyncclick as click
|
import click
|
||||||
from asyncclick import Context, UsageError
|
import tomlkit
|
||||||
|
from click import Context, UsageError
|
||||||
|
from tomlkit.exceptions import NonExistentKey
|
||||||
|
from tortoise import Tortoise
|
||||||
|
|
||||||
from aerich import Command
|
from aerich import Command
|
||||||
from aerich._compat import imports_tomlkit, tomllib
|
|
||||||
from aerich.enums import Color
|
from aerich.enums import Color
|
||||||
from aerich.exceptions import DowngradeError
|
from aerich.exceptions import DowngradeError
|
||||||
from aerich.utils import add_src_path, get_tortoise_config
|
from aerich.utils import add_src_path, get_tortoise_config
|
||||||
@ -19,20 +21,19 @@ CONFIG_DEFAULT_VALUES = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def _patch_context_to_close_tortoise_connections_when_exit() -> None:
|
def coro(f):
|
||||||
from tortoise import Tortoise, connections
|
@wraps(f)
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
|
||||||
origin_aexit = Context.__aexit__
|
# Close db connections at the end of all but the cli group function
|
||||||
|
try:
|
||||||
|
loop.run_until_complete(f(*args, **kwargs))
|
||||||
|
finally:
|
||||||
|
if f.__name__ not in ["cli", "init"]:
|
||||||
|
loop.run_until_complete(Tortoise.close_connections())
|
||||||
|
|
||||||
async def aexit(*args, **kw) -> None:
|
return wrapper
|
||||||
await origin_aexit(*args, **kw)
|
|
||||||
if Tortoise._inited:
|
|
||||||
await connections.close_all()
|
|
||||||
|
|
||||||
Context.__aexit__ = aexit # type:ignore[method-assign]
|
|
||||||
|
|
||||||
|
|
||||||
_patch_context_to_close_tortoise_connections_when_exit()
|
|
||||||
|
|
||||||
|
|
||||||
@click.group(context_settings={"help_option_names": ["-h", "--help"]})
|
@click.group(context_settings={"help_option_names": ["-h", "--help"]})
|
||||||
@ -46,7 +47,8 @@ _patch_context_to_close_tortoise_connections_when_exit()
|
|||||||
)
|
)
|
||||||
@click.option("--app", required=False, help="Tortoise-ORM app name.")
|
@click.option("--app", required=False, help="Tortoise-ORM app name.")
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
async def cli(ctx: Context, config, app) -> None:
|
@coro
|
||||||
|
async def cli(ctx: Context, config, app):
|
||||||
ctx.ensure_object(dict)
|
ctx.ensure_object(dict)
|
||||||
ctx.obj["config_file"] = config
|
ctx.obj["config_file"] = config
|
||||||
|
|
||||||
@ -54,78 +56,57 @@ async def cli(ctx: Context, config, app) -> None:
|
|||||||
if invoked_subcommand != "init":
|
if invoked_subcommand != "init":
|
||||||
config_path = Path(config)
|
config_path = Path(config)
|
||||||
if not config_path.exists():
|
if not config_path.exists():
|
||||||
raise UsageError(
|
raise UsageError("You must exec init first", ctx=ctx)
|
||||||
"You need to run `aerich init` first to create the config file.", ctx=ctx
|
content = config_path.read_text()
|
||||||
)
|
doc = tomlkit.parse(content)
|
||||||
content = config_path.read_text("utf-8")
|
|
||||||
doc: dict = tomllib.loads(content)
|
|
||||||
try:
|
try:
|
||||||
tool = cast("dict[str, str]", doc["tool"]["aerich"])
|
tool = doc["tool"]["aerich"]
|
||||||
location = tool["location"]
|
location = tool["location"]
|
||||||
tortoise_orm = tool["tortoise_orm"]
|
tortoise_orm = tool["tortoise_orm"]
|
||||||
src_folder = tool.get("src_folder", CONFIG_DEFAULT_VALUES["src_folder"])
|
src_folder = tool.get("src_folder", CONFIG_DEFAULT_VALUES["src_folder"])
|
||||||
except KeyError as e:
|
except NonExistentKey:
|
||||||
raise UsageError(
|
raise UsageError("You need run aerich init again when upgrade to 0.6.0+")
|
||||||
"You need run `aerich init` again when upgrading to aerich 0.6.0+."
|
|
||||||
) from e
|
|
||||||
add_src_path(src_folder)
|
add_src_path(src_folder)
|
||||||
tortoise_config = get_tortoise_config(ctx, tortoise_orm)
|
tortoise_config = get_tortoise_config(ctx, tortoise_orm)
|
||||||
if not app:
|
app = app or list(tortoise_config.get("apps").keys())[0]
|
||||||
try:
|
|
||||||
apps_config = cast(dict, tortoise_config["apps"])
|
|
||||||
except KeyError:
|
|
||||||
raise UsageError('Config must define "apps" section')
|
|
||||||
app = list(apps_config.keys())[0]
|
|
||||||
command = Command(tortoise_config=tortoise_config, app=app, location=location)
|
command = Command(tortoise_config=tortoise_config, app=app, location=location)
|
||||||
ctx.obj["command"] = command
|
ctx.obj["command"] = command
|
||||||
if invoked_subcommand != "init-db":
|
if invoked_subcommand != "init-db":
|
||||||
if not Path(location, app).exists():
|
if not Path(location, app).exists():
|
||||||
raise UsageError(
|
raise UsageError("You must exec init-db first", ctx=ctx)
|
||||||
"You need to run `aerich init-db` first to initialize the database.", ctx=ctx
|
|
||||||
)
|
|
||||||
await command.init()
|
await command.init()
|
||||||
|
|
||||||
|
|
||||||
@cli.command(help="Generate a migration file for the current state of the models.")
|
@cli.command(help="Generate migrate changes file.")
|
||||||
@click.option("--name", default="update", show_default=True, help="Migration name.")
|
@click.option("--name", default="update", show_default=True, help="Migrate name.")
|
||||||
@click.option("--empty", default=False, is_flag=True, help="Generate an empty migration file.")
|
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
async def migrate(ctx: Context, name, empty) -> None:
|
@coro
|
||||||
|
async def migrate(ctx: Context, name):
|
||||||
command = ctx.obj["command"]
|
command = ctx.obj["command"]
|
||||||
ret = await command.migrate(name, empty)
|
ret = await command.migrate(name)
|
||||||
if not ret:
|
if not ret:
|
||||||
return click.secho("No changes detected", fg=Color.yellow)
|
return click.secho("No changes detected", fg=Color.yellow)
|
||||||
click.secho(f"Success creating migration file {ret}", fg=Color.green)
|
click.secho(f"Success migrate {ret}", fg=Color.green)
|
||||||
|
|
||||||
|
|
||||||
@cli.command(help="Upgrade to specified migration version.")
|
@cli.command(help="Upgrade to specified version.")
|
||||||
@click.option(
|
@click.option(
|
||||||
"--in-transaction",
|
"--in-transaction",
|
||||||
"-i",
|
"-i",
|
||||||
default=True,
|
default=True,
|
||||||
type=bool,
|
type=bool,
|
||||||
help="Make migrations in a single transaction or not. Can be helpful for large migrations or creating concurrent indexes.",
|
help="Make migrations in transaction or not. Can be helpful for large migrations or creating concurrent indexes.",
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--fake",
|
|
||||||
default=False,
|
|
||||||
is_flag=True,
|
|
||||||
help="Mark migrations as run without actually running them.",
|
|
||||||
)
|
)
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
async def upgrade(ctx: Context, in_transaction: bool, fake: bool) -> None:
|
@coro
|
||||||
|
async def upgrade(ctx: Context, in_transaction: bool):
|
||||||
command = ctx.obj["command"]
|
command = ctx.obj["command"]
|
||||||
migrated = await command.upgrade(run_in_transaction=in_transaction, fake=fake)
|
migrated = await command.upgrade(run_in_transaction=in_transaction)
|
||||||
if not migrated:
|
if not migrated:
|
||||||
click.secho("No upgrade items found", fg=Color.yellow)
|
click.secho("No upgrade items found", fg=Color.yellow)
|
||||||
else:
|
else:
|
||||||
for version_file in migrated:
|
for version_file in migrated:
|
||||||
if fake:
|
click.secho(f"Success upgrade {version_file}", fg=Color.green)
|
||||||
click.echo(
|
|
||||||
f"Upgrading to {version_file}... " + click.style("FAKED", fg=Color.green)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
click.secho(f"Success upgrading to {version_file}", fg=Color.green)
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command(help="Downgrade to specified version.")
|
@cli.command(help="Downgrade to specified version.")
|
||||||
@ -134,8 +115,8 @@ async def upgrade(ctx: Context, in_transaction: bool, fake: bool) -> None:
|
|||||||
"--version",
|
"--version",
|
||||||
default=-1,
|
default=-1,
|
||||||
type=int,
|
type=int,
|
||||||
show_default=False,
|
show_default=True,
|
||||||
help="Specified version, default to last migration.",
|
help="Specified version, default to last.",
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"-d",
|
"-d",
|
||||||
@ -143,75 +124,59 @@ async def upgrade(ctx: Context, in_transaction: bool, fake: bool) -> None:
|
|||||||
is_flag=True,
|
is_flag=True,
|
||||||
default=False,
|
default=False,
|
||||||
show_default=True,
|
show_default=True,
|
||||||
help="Also delete the migration files.",
|
help="Delete version files at the same time.",
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--fake",
|
|
||||||
default=False,
|
|
||||||
is_flag=True,
|
|
||||||
help="Mark migrations as run without actually running them.",
|
|
||||||
)
|
)
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
@click.confirmation_option(
|
@click.confirmation_option(
|
||||||
prompt="Downgrade is dangerous: you might lose your data! Are you sure?",
|
prompt="Downgrade is dangerous, which maybe lose your data, are you sure?",
|
||||||
)
|
)
|
||||||
async def downgrade(ctx: Context, version: int, delete: bool, fake: bool) -> None:
|
@coro
|
||||||
|
async def downgrade(ctx: Context, version: int, delete: bool):
|
||||||
command = ctx.obj["command"]
|
command = ctx.obj["command"]
|
||||||
try:
|
try:
|
||||||
files = await command.downgrade(version, delete, fake=fake)
|
files = await command.downgrade(version, delete)
|
||||||
except DowngradeError as e:
|
except DowngradeError as e:
|
||||||
return click.secho(str(e), fg=Color.yellow)
|
return click.secho(str(e), fg=Color.yellow)
|
||||||
for file in files:
|
for file in files:
|
||||||
if fake:
|
click.secho(f"Success downgrade {file}", fg=Color.green)
|
||||||
click.echo(f"Downgrading to {file}... " + click.style("FAKED", fg=Color.green))
|
|
||||||
else:
|
|
||||||
click.secho(f"Success downgrading to {file}", fg=Color.green)
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command(help="Show currently available heads (unapplied migrations).")
|
@cli.command(help="Show current available heads in migrate location.")
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
async def heads(ctx: Context) -> None:
|
@coro
|
||||||
|
async def heads(ctx: Context):
|
||||||
command = ctx.obj["command"]
|
command = ctx.obj["command"]
|
||||||
head_list = await command.heads()
|
head_list = await command.heads()
|
||||||
if not head_list:
|
if not head_list:
|
||||||
return click.secho("No available heads.", fg=Color.green)
|
return click.secho("No available heads, try migrate first", fg=Color.green)
|
||||||
for version in head_list:
|
for version in head_list:
|
||||||
click.secho(version, fg=Color.green)
|
click.secho(version, fg=Color.green)
|
||||||
|
|
||||||
|
|
||||||
@cli.command(help="List all migrations.")
|
@cli.command(help="List all migrate items.")
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
async def history(ctx: Context) -> None:
|
@coro
|
||||||
|
async def history(ctx: Context):
|
||||||
command = ctx.obj["command"]
|
command = ctx.obj["command"]
|
||||||
versions = await command.history()
|
versions = await command.history()
|
||||||
if not versions:
|
if not versions:
|
||||||
return click.secho("No migrations created yet.", fg=Color.green)
|
return click.secho("No history, try migrate", fg=Color.green)
|
||||||
for version in versions:
|
for version in versions:
|
||||||
click.secho(version, fg=Color.green)
|
click.secho(version, fg=Color.green)
|
||||||
|
|
||||||
|
|
||||||
def _write_config(config_path, doc, table) -> None:
|
@cli.command(help="Init config file and generate root migrate location.")
|
||||||
tomlkit = imports_tomlkit()
|
|
||||||
|
|
||||||
try:
|
|
||||||
doc["tool"]["aerich"] = table
|
|
||||||
except KeyError:
|
|
||||||
doc["tool"] = {"aerich": table}
|
|
||||||
config_path.write_text(tomlkit.dumps(doc))
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command(help="Initialize aerich config and create migrations folder.")
|
|
||||||
@click.option(
|
@click.option(
|
||||||
"-t",
|
"-t",
|
||||||
"--tortoise-orm",
|
"--tortoise-orm",
|
||||||
required=True,
|
required=True,
|
||||||
help="Tortoise-ORM config dict location, like `settings.TORTOISE_ORM`.",
|
help="Tortoise-ORM config module dict variable, like settings.TORTOISE_ORM.",
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"--location",
|
"--location",
|
||||||
default="./migrations",
|
default="./migrations",
|
||||||
show_default=True,
|
show_default=True,
|
||||||
help="Migrations folder.",
|
help="Migrate store location.",
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"-s",
|
"-s",
|
||||||
@ -221,7 +186,8 @@ def _write_config(config_path, doc, table) -> None:
|
|||||||
help="Folder of the source, relative to the project root.",
|
help="Folder of the source, relative to the project root.",
|
||||||
)
|
)
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
async def init(ctx: Context, tortoise_orm, location, src_folder) -> None:
|
@coro
|
||||||
|
async def init(ctx: Context, tortoise_orm, location, src_folder):
|
||||||
config_file = ctx.obj["config_file"]
|
config_file = ctx.obj["config_file"]
|
||||||
|
|
||||||
if os.path.isabs(src_folder):
|
if os.path.isabs(src_folder):
|
||||||
@ -234,48 +200,52 @@ async def init(ctx: Context, tortoise_orm, location, src_folder) -> None:
|
|||||||
add_src_path(src_folder)
|
add_src_path(src_folder)
|
||||||
get_tortoise_config(ctx, tortoise_orm)
|
get_tortoise_config(ctx, tortoise_orm)
|
||||||
config_path = Path(config_file)
|
config_path = Path(config_file)
|
||||||
content = config_path.read_text("utf-8") if config_path.exists() else "[tool.aerich]"
|
if config_path.exists():
|
||||||
doc: dict = tomllib.loads(content)
|
content = config_path.read_text()
|
||||||
|
doc = tomlkit.parse(content)
|
||||||
table = {"tortoise_orm": tortoise_orm, "location": location, "src_folder": src_folder}
|
|
||||||
if (aerich_config := doc.get("tool", {}).get("aerich")) and all(
|
|
||||||
aerich_config.get(k) == v for k, v in table.items()
|
|
||||||
):
|
|
||||||
click.echo(f"Aerich config {config_file} already inited.")
|
|
||||||
else:
|
else:
|
||||||
_write_config(config_path, doc, table)
|
doc = tomlkit.parse("[tool.aerich]")
|
||||||
click.secho(f"Success writing aerich config to {config_file}", fg=Color.green)
|
table = tomlkit.table()
|
||||||
|
table["tortoise_orm"] = tortoise_orm
|
||||||
|
table["location"] = location
|
||||||
|
table["src_folder"] = src_folder
|
||||||
|
doc["tool"]["aerich"] = table
|
||||||
|
|
||||||
|
config_path.write_text(tomlkit.dumps(doc))
|
||||||
|
|
||||||
Path(location).mkdir(parents=True, exist_ok=True)
|
Path(location).mkdir(parents=True, exist_ok=True)
|
||||||
click.secho(f"Success creating migrations folder {location}", fg=Color.green)
|
|
||||||
|
click.secho(f"Success create migrate location {location}", fg=Color.green)
|
||||||
|
click.secho(f"Success write config to {config_file}", fg=Color.green)
|
||||||
|
|
||||||
|
|
||||||
@cli.command(help="Generate schema and generate app migration folder.")
|
@cli.command(help="Generate schema and generate app migrate location.")
|
||||||
@click.option(
|
@click.option(
|
||||||
"-s",
|
"-s",
|
||||||
"--safe",
|
"--safe",
|
||||||
type=bool,
|
type=bool,
|
||||||
is_flag=True,
|
is_flag=True,
|
||||||
default=True,
|
default=True,
|
||||||
help="Create tables only when they do not already exist.",
|
help="When set to true, creates the table only when it does not already exist.",
|
||||||
show_default=True,
|
show_default=True,
|
||||||
)
|
)
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
async def init_db(ctx: Context, safe: bool) -> None:
|
@coro
|
||||||
|
async def init_db(ctx: Context, safe: bool):
|
||||||
command = ctx.obj["command"]
|
command = ctx.obj["command"]
|
||||||
app = command.app
|
app = command.app
|
||||||
dirname = Path(command.location, app)
|
dirname = Path(command.location, app)
|
||||||
try:
|
try:
|
||||||
await command.init_db(safe)
|
await command.init_db(safe)
|
||||||
click.secho(f"Success creating app migration folder {dirname}", fg=Color.green)
|
click.secho(f"Success create app migrate location {dirname}", fg=Color.green)
|
||||||
click.secho(f'Success generating initial migration file for app "{app}"', fg=Color.green)
|
click.secho(f'Success generate schema for app "{app}"', fg=Color.green)
|
||||||
except FileExistsError:
|
except FileExistsError:
|
||||||
return click.secho(
|
return click.secho(
|
||||||
f"App {app} is already initialized. Delete {dirname} and try again.", fg=Color.yellow
|
f"Inited {app} already, or delete {dirname} and try again.", fg=Color.yellow
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@cli.command(help="Prints the current database tables to stdout as Tortoise-ORM models.")
|
@cli.command(help="Introspects the database tables to standard output as TortoiseORM model.")
|
||||||
@click.option(
|
@click.option(
|
||||||
"-t",
|
"-t",
|
||||||
"--table",
|
"--table",
|
||||||
@ -284,13 +254,14 @@ async def init_db(ctx: Context, safe: bool) -> None:
|
|||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
async def inspectdb(ctx: Context, table: list[str]) -> None:
|
@coro
|
||||||
|
async def inspectdb(ctx: Context, table: List[str]):
|
||||||
command = ctx.obj["command"]
|
command = ctx.obj["command"]
|
||||||
ret = await command.inspectdb(table)
|
ret = await command.inspectdb(table)
|
||||||
click.secho(ret)
|
click.secho(ret)
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
def main():
|
||||||
cli()
|
cli()
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,19 +1,13 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import json
|
import json
|
||||||
import pickle # nosec: B301,B403
|
import pickle # nosec: B301,B403
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from tortoise.indexes import Index
|
from tortoise.indexes import Index
|
||||||
|
|
||||||
|
|
||||||
class JsonEncoder(json.JSONEncoder):
|
class JsonEncoder(json.JSONEncoder):
|
||||||
def default(self, obj) -> Any:
|
def default(self, obj):
|
||||||
if isinstance(obj, Index):
|
if isinstance(obj, Index):
|
||||||
if hasattr(obj, "describe"):
|
|
||||||
# For tortoise>=0.24
|
|
||||||
return obj.describe()
|
|
||||||
return {
|
return {
|
||||||
"type": "index",
|
"type": "index",
|
||||||
"val": base64.b64encode(pickle.dumps(obj)).decode(), # nosec: B301
|
"val": base64.b64encode(pickle.dumps(obj)).decode(), # nosec: B301
|
||||||
@ -22,28 +16,16 @@ class JsonEncoder(json.JSONEncoder):
|
|||||||
return super().default(obj)
|
return super().default(obj)
|
||||||
|
|
||||||
|
|
||||||
def object_hook(obj) -> Any:
|
def object_hook(obj):
|
||||||
if (type_ := obj.get("type")) and type_ == "index" and (val := obj.get("val")):
|
_type = obj.get("type")
|
||||||
return pickle.loads(base64.b64decode(val)) # nosec: B301
|
if not _type:
|
||||||
return obj
|
return obj
|
||||||
|
return pickle.loads(base64.b64decode(obj["val"])) # nosec: B301
|
||||||
|
|
||||||
|
|
||||||
def load_index(obj: dict) -> Index:
|
def encoder(obj: dict):
|
||||||
"""Convert a dict that generated by `Index.decribe()` to a Index instance"""
|
|
||||||
try:
|
|
||||||
index = Index(fields=obj["fields"] or obj["expressions"], name=obj.get("name"))
|
|
||||||
except KeyError:
|
|
||||||
return object_hook(obj)
|
|
||||||
if extra := obj.get("extra"):
|
|
||||||
index.extra = extra
|
|
||||||
if idx_type := obj.get("type"):
|
|
||||||
index.INDEX_TYPE = idx_type
|
|
||||||
return index
|
|
||||||
|
|
||||||
|
|
||||||
def encoder(obj: dict) -> str:
|
|
||||||
return json.dumps(obj, cls=JsonEncoder)
|
return json.dumps(obj, cls=JsonEncoder)
|
||||||
|
|
||||||
|
|
||||||
def decoder(obj: str | bytes) -> Any:
|
def decoder(obj: str):
|
||||||
return json.loads(obj, object_hook=object_hook)
|
return json.loads(obj, object_hook=object_hook)
|
||||||
|
@ -1,20 +1,14 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import re
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import TYPE_CHECKING, Any, cast
|
from typing import List, Type
|
||||||
|
|
||||||
|
from tortoise import BaseDBAsyncClient, Model
|
||||||
from tortoise.backends.base.schema_generator import BaseSchemaGenerator
|
from tortoise.backends.base.schema_generator import BaseSchemaGenerator
|
||||||
|
|
||||||
from aerich._compat import tortoise_version_less_than
|
|
||||||
from aerich.utils import is_default_function
|
from aerich.utils import is_default_function
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from tortoise import BaseDBAsyncClient, Model
|
|
||||||
|
|
||||||
|
|
||||||
class BaseDDL:
|
class BaseDDL:
|
||||||
schema_generator_cls: type[BaseSchemaGenerator] = BaseSchemaGenerator
|
schema_generator_cls: Type[BaseSchemaGenerator] = BaseSchemaGenerator
|
||||||
DIALECT = "sql"
|
DIALECT = "sql"
|
||||||
_DROP_TABLE_TEMPLATE = 'DROP TABLE IF EXISTS "{table_name}"'
|
_DROP_TABLE_TEMPLATE = 'DROP TABLE IF EXISTS "{table_name}"'
|
||||||
_ADD_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ADD {column}'
|
_ADD_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ADD {column}'
|
||||||
@ -23,8 +17,10 @@ class BaseDDL:
|
|||||||
_RENAME_COLUMN_TEMPLATE = (
|
_RENAME_COLUMN_TEMPLATE = (
|
||||||
'ALTER TABLE "{table_name}" RENAME COLUMN "{old_column_name}" TO "{new_column_name}"'
|
'ALTER TABLE "{table_name}" RENAME COLUMN "{old_column_name}" TO "{new_column_name}"'
|
||||||
)
|
)
|
||||||
_ADD_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" ADD {index_type}{unique}INDEX "{index_name}" ({column_names}){extra}'
|
_ADD_INDEX_TEMPLATE = (
|
||||||
_DROP_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" DROP INDEX IF EXISTS "{index_name}"'
|
'ALTER TABLE "{table_name}" ADD {unique}INDEX "{index_name}" ({column_names})'
|
||||||
|
)
|
||||||
|
_DROP_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" DROP INDEX "{index_name}"'
|
||||||
_ADD_FK_TEMPLATE = 'ALTER TABLE "{table_name}" ADD CONSTRAINT "{fk_name}" FOREIGN KEY ("{db_column}") REFERENCES "{table}" ("{field}") ON DELETE {on_delete}'
|
_ADD_FK_TEMPLATE = 'ALTER TABLE "{table_name}" ADD CONSTRAINT "{fk_name}" FOREIGN KEY ("{db_column}") REFERENCES "{table}" ("{field}") ON DELETE {on_delete}'
|
||||||
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP FOREIGN KEY "{fk_name}"'
|
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP FOREIGN KEY "{fk_name}"'
|
||||||
_M2M_TABLE_TEMPLATE = (
|
_M2M_TABLE_TEMPLATE = (
|
||||||
@ -39,32 +35,25 @@ class BaseDDL:
|
|||||||
)
|
)
|
||||||
_RENAME_TABLE_TEMPLATE = 'ALTER TABLE "{old_table_name}" RENAME TO "{new_table_name}"'
|
_RENAME_TABLE_TEMPLATE = 'ALTER TABLE "{old_table_name}" RENAME TO "{new_table_name}"'
|
||||||
|
|
||||||
def __init__(self, client: BaseDBAsyncClient) -> None:
|
def __init__(self, client: "BaseDBAsyncClient"):
|
||||||
self.client = client
|
self.client = client
|
||||||
self.schema_generator = self.schema_generator_cls(client)
|
self.schema_generator = self.schema_generator_cls(client)
|
||||||
|
|
||||||
@staticmethod
|
def create_table(self, model: "Type[Model]"):
|
||||||
def get_table_name(model: type[Model]) -> str:
|
return self.schema_generator._get_table_sql(model, True)["table_creation_string"].rstrip(
|
||||||
return model._meta.db_table
|
";"
|
||||||
|
)
|
||||||
|
|
||||||
def create_table(self, model: type[Model]) -> str:
|
def drop_table(self, table_name: str):
|
||||||
schema = self.schema_generator._get_table_sql(model, True)["table_creation_string"]
|
|
||||||
if tortoise_version_less_than("0.23.1"):
|
|
||||||
# Remove extra space
|
|
||||||
schema = re.sub(r'(["()A-Za-z]) (["()A-Za-z])', r"\1 \2", schema)
|
|
||||||
return schema.rstrip(";")
|
|
||||||
|
|
||||||
def drop_table(self, table_name: str) -> str:
|
|
||||||
return self._DROP_TABLE_TEMPLATE.format(table_name=table_name)
|
return self._DROP_TABLE_TEMPLATE.format(table_name=table_name)
|
||||||
|
|
||||||
def create_m2m(
|
def create_m2m(
|
||||||
self, model: type[Model], field_describe: dict, reference_table_describe: dict
|
self, model: "Type[Model]", field_describe: dict, reference_table_describe: dict
|
||||||
) -> str:
|
):
|
||||||
through = cast(str, field_describe.get("through"))
|
through = field_describe.get("through")
|
||||||
description = field_describe.get("description")
|
description = field_describe.get("description")
|
||||||
pk_field = cast(dict, reference_table_describe.get("pk_field"))
|
reference_id = reference_table_describe.get("pk_field").get("db_column")
|
||||||
reference_id = pk_field.get("db_column")
|
db_field_types = reference_table_describe.get("pk_field").get("db_field_types")
|
||||||
db_field_types = cast(dict, pk_field.get("db_field_types"))
|
|
||||||
return self._M2M_TABLE_TEMPLATE.format(
|
return self._M2M_TABLE_TEMPLATE.format(
|
||||||
table_name=through,
|
table_name=through,
|
||||||
backward_table=model._meta.db_table,
|
backward_table=model._meta.db_table,
|
||||||
@ -77,22 +66,22 @@ class BaseDDL:
|
|||||||
forward_type=db_field_types.get(self.DIALECT) or db_field_types.get(""),
|
forward_type=db_field_types.get(self.DIALECT) or db_field_types.get(""),
|
||||||
on_delete=field_describe.get("on_delete"),
|
on_delete=field_describe.get("on_delete"),
|
||||||
extra=self.schema_generator._table_generate_extra(table=through),
|
extra=self.schema_generator._table_generate_extra(table=through),
|
||||||
comment=(
|
comment=self.schema_generator._table_comment_generator(
|
||||||
self.schema_generator._table_comment_generator(table=through, comment=description)
|
table=through, comment=description
|
||||||
|
)
|
||||||
if description
|
if description
|
||||||
else ""
|
else "",
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def drop_m2m(self, table_name: str) -> str:
|
def drop_m2m(self, table_name: str):
|
||||||
return self._DROP_TABLE_TEMPLATE.format(table_name=table_name)
|
return self._DROP_TABLE_TEMPLATE.format(table_name=table_name)
|
||||||
|
|
||||||
def _get_default(self, model: type[Model], field_describe: dict) -> Any:
|
def _get_default(self, model: "Type[Model]", field_describe: dict):
|
||||||
db_table = model._meta.db_table
|
db_table = model._meta.db_table
|
||||||
default = field_describe.get("default")
|
default = field_describe.get("default")
|
||||||
if isinstance(default, Enum):
|
if isinstance(default, Enum):
|
||||||
default = default.value
|
default = default.value
|
||||||
db_column = cast(str, field_describe.get("db_column"))
|
db_column = field_describe.get("db_column")
|
||||||
auto_now_add = field_describe.get("auto_now_add", False)
|
auto_now_add = field_describe.get("auto_now_add", False)
|
||||||
auto_now = field_describe.get("auto_now", False)
|
auto_now = field_describe.get("auto_now", False)
|
||||||
if default is not None or auto_now_add:
|
if default is not None or auto_now_add:
|
||||||
@ -113,58 +102,68 @@ class BaseDDL:
|
|||||||
)
|
)
|
||||||
except NotImplementedError:
|
except NotImplementedError:
|
||||||
default = ""
|
default = ""
|
||||||
|
else:
|
||||||
|
default = None
|
||||||
return default
|
return default
|
||||||
|
|
||||||
def add_column(self, model: type[Model], field_describe: dict, is_pk: bool = False) -> str:
|
def add_column(self, model: "Type[Model]", field_describe: dict, is_pk: bool = False):
|
||||||
return self._add_or_modify_column(model, field_describe, is_pk)
|
|
||||||
|
|
||||||
def _add_or_modify_column(
|
|
||||||
self, model: type[Model], field_describe: dict, is_pk: bool, modify: bool = False
|
|
||||||
) -> str:
|
|
||||||
db_table = model._meta.db_table
|
db_table = model._meta.db_table
|
||||||
description = field_describe.get("description")
|
description = field_describe.get("description")
|
||||||
db_column = cast(str, field_describe.get("db_column"))
|
db_column = field_describe.get("db_column")
|
||||||
db_field_types = cast(dict, field_describe.get("db_field_types"))
|
db_field_types = field_describe.get("db_field_types")
|
||||||
default = self._get_default(model, field_describe)
|
default = self._get_default(model, field_describe)
|
||||||
if default is None:
|
if default is None:
|
||||||
default = ""
|
default = ""
|
||||||
if modify:
|
return self._ADD_COLUMN_TEMPLATE.format(
|
||||||
unique = ""
|
table_name=db_table,
|
||||||
template = self._MODIFY_COLUMN_TEMPLATE
|
column=self.schema_generator._create_string(
|
||||||
else:
|
|
||||||
# sqlite does not support alter table to add unique column
|
|
||||||
unique = " UNIQUE" if field_describe.get("unique") and self.DIALECT != "sqlite" else ""
|
|
||||||
template = self._ADD_COLUMN_TEMPLATE
|
|
||||||
column = self.schema_generator._create_string(
|
|
||||||
db_column=db_column,
|
db_column=db_column,
|
||||||
field_type=db_field_types.get(self.DIALECT, db_field_types.get("")),
|
field_type=db_field_types.get(self.DIALECT, db_field_types.get("")),
|
||||||
nullable=" NOT NULL" if not field_describe.get("nullable") else "",
|
nullable="NOT NULL" if not field_describe.get("nullable") else "",
|
||||||
unique=unique,
|
unique="UNIQUE" if field_describe.get("unique") else "",
|
||||||
comment=(
|
comment=self.schema_generator._column_comment_generator(
|
||||||
self.schema_generator._column_comment_generator(
|
|
||||||
table=db_table,
|
table=db_table,
|
||||||
column=db_column,
|
column=db_column,
|
||||||
comment=description,
|
comment=field_describe.get("description"),
|
||||||
)
|
)
|
||||||
if description
|
if description
|
||||||
else ""
|
else "",
|
||||||
),
|
|
||||||
is_primary_key=is_pk,
|
is_primary_key=is_pk,
|
||||||
default=default,
|
default=default,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
if tortoise_version_less_than("0.23.1"):
|
|
||||||
column = column.replace(" ", " ")
|
|
||||||
return template.format(table_name=db_table, column=column)
|
|
||||||
|
|
||||||
def drop_column(self, model: type[Model], column_name: str) -> str:
|
def drop_column(self, model: "Type[Model]", column_name: str):
|
||||||
return self._DROP_COLUMN_TEMPLATE.format(
|
return self._DROP_COLUMN_TEMPLATE.format(
|
||||||
table_name=model._meta.db_table, column_name=column_name
|
table_name=model._meta.db_table, column_name=column_name
|
||||||
)
|
)
|
||||||
|
|
||||||
def modify_column(self, model: type[Model], field_describe: dict, is_pk: bool = False) -> str:
|
def modify_column(self, model: "Type[Model]", field_describe: dict, is_pk: bool = False):
|
||||||
return self._add_or_modify_column(model, field_describe, is_pk, modify=True)
|
db_table = model._meta.db_table
|
||||||
|
db_field_types = field_describe.get("db_field_types")
|
||||||
|
default = self._get_default(model, field_describe)
|
||||||
|
if default is None:
|
||||||
|
default = ""
|
||||||
|
return self._MODIFY_COLUMN_TEMPLATE.format(
|
||||||
|
table_name=db_table,
|
||||||
|
column=self.schema_generator._create_string(
|
||||||
|
db_column=field_describe.get("db_column"),
|
||||||
|
field_type=db_field_types.get(self.DIALECT) or db_field_types.get(""),
|
||||||
|
nullable="NOT NULL" if not field_describe.get("nullable") else "",
|
||||||
|
unique="",
|
||||||
|
comment=self.schema_generator._column_comment_generator(
|
||||||
|
table=db_table,
|
||||||
|
column=field_describe.get("db_column"),
|
||||||
|
comment=field_describe.get("description"),
|
||||||
|
)
|
||||||
|
if field_describe.get("description")
|
||||||
|
else "",
|
||||||
|
is_primary_key=is_pk,
|
||||||
|
default=default,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
def rename_column(self, model: type[Model], old_column_name: str, new_column_name: str) -> str:
|
def rename_column(self, model: "Type[Model]", old_column_name: str, new_column_name: str):
|
||||||
return self._RENAME_COLUMN_TEMPLATE.format(
|
return self._RENAME_COLUMN_TEMPLATE.format(
|
||||||
table_name=model._meta.db_table,
|
table_name=model._meta.db_table,
|
||||||
old_column_name=old_column_name,
|
old_column_name=old_column_name,
|
||||||
@ -172,8 +171,8 @@ class BaseDDL:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def change_column(
|
def change_column(
|
||||||
self, model: type[Model], old_column_name: str, new_column_name: str, new_column_type: str
|
self, model: "Type[Model]", old_column_name: str, new_column_name: str, new_column_type: str
|
||||||
) -> str:
|
):
|
||||||
return self._CHANGE_COLUMN_TEMPLATE.format(
|
return self._CHANGE_COLUMN_TEMPLATE.format(
|
||||||
table_name=model._meta.db_table,
|
table_name=model._meta.db_table,
|
||||||
old_column_name=old_column_name,
|
old_column_name=old_column_name,
|
||||||
@ -181,92 +180,63 @@ class BaseDDL:
|
|||||||
new_column_type=new_column_type,
|
new_column_type=new_column_type,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _index_name(self, unique: bool | None, model: type[Model], field_names: list[str]) -> str:
|
def add_index(self, model: "Type[Model]", field_names: List[str], unique=False):
|
||||||
func_name = "_get_index_name"
|
|
||||||
if not hasattr(self.schema_generator, func_name):
|
|
||||||
# For tortoise-orm<0.24.1
|
|
||||||
func_name = "_generate_index_name"
|
|
||||||
return getattr(self.schema_generator, func_name)(
|
|
||||||
"idx" if not unique else "uid", model, field_names
|
|
||||||
)
|
|
||||||
|
|
||||||
def add_index(
|
|
||||||
self,
|
|
||||||
model: type[Model],
|
|
||||||
field_names: list[str],
|
|
||||||
unique: bool | None = False,
|
|
||||||
name: str | None = None,
|
|
||||||
index_type: str = "",
|
|
||||||
extra: str | None = "",
|
|
||||||
) -> str:
|
|
||||||
return self._ADD_INDEX_TEMPLATE.format(
|
return self._ADD_INDEX_TEMPLATE.format(
|
||||||
unique="UNIQUE " if unique else "",
|
unique="UNIQUE " if unique else "",
|
||||||
index_name=name or self._index_name(unique, model, field_names),
|
index_name=self.schema_generator._generate_index_name(
|
||||||
|
"idx" if not unique else "uid", model, field_names
|
||||||
|
),
|
||||||
table_name=model._meta.db_table,
|
table_name=model._meta.db_table,
|
||||||
column_names=", ".join(self.schema_generator.quote(f) for f in field_names),
|
column_names=", ".join(self.schema_generator.quote(f) for f in field_names),
|
||||||
index_type=f"{index_type} " if index_type else "",
|
|
||||||
extra=f"{extra}" if extra else "",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def drop_index(
|
def drop_index(self, model: "Type[Model]", field_names: List[str], unique=False):
|
||||||
self,
|
|
||||||
model: type[Model],
|
|
||||||
field_names: list[str],
|
|
||||||
unique: bool | None = False,
|
|
||||||
name: str | None = None,
|
|
||||||
) -> str:
|
|
||||||
return self._DROP_INDEX_TEMPLATE.format(
|
return self._DROP_INDEX_TEMPLATE.format(
|
||||||
index_name=name or self._index_name(unique, model, field_names),
|
index_name=self.schema_generator._generate_index_name(
|
||||||
|
"idx" if not unique else "uid", model, field_names
|
||||||
|
),
|
||||||
table_name=model._meta.db_table,
|
table_name=model._meta.db_table,
|
||||||
)
|
)
|
||||||
|
|
||||||
def drop_index_by_name(self, model: type[Model], index_name: str) -> str:
|
def drop_index_by_name(self, model: "Type[Model]", index_name: str):
|
||||||
return self.drop_index(model, [], name=index_name)
|
return self._DROP_INDEX_TEMPLATE.format(
|
||||||
|
index_name=index_name,
|
||||||
def _generate_fk_name(
|
table_name=model._meta.db_table,
|
||||||
self, db_table: str, field_describe: dict, reference_table_describe: dict
|
|
||||||
) -> str:
|
|
||||||
"""Generate fk name"""
|
|
||||||
db_column = cast(str, field_describe.get("raw_field"))
|
|
||||||
pk_field = cast(dict, reference_table_describe.get("pk_field"))
|
|
||||||
to_field = cast(str, pk_field.get("db_column"))
|
|
||||||
to_table = cast(str, reference_table_describe.get("table"))
|
|
||||||
func_name = "_get_fk_name"
|
|
||||||
if not hasattr(self.schema_generator, func_name):
|
|
||||||
# For tortoise-orm<0.24.1
|
|
||||||
func_name = "_generate_fk_name"
|
|
||||||
return getattr(self.schema_generator, func_name)(
|
|
||||||
from_table=db_table,
|
|
||||||
from_field=db_column,
|
|
||||||
to_table=to_table,
|
|
||||||
to_field=to_field,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def add_fk(
|
def add_fk(self, model: "Type[Model]", field_describe: dict, reference_table_describe: dict):
|
||||||
self, model: type[Model], field_describe: dict, reference_table_describe: dict
|
|
||||||
) -> str:
|
|
||||||
db_table = model._meta.db_table
|
db_table = model._meta.db_table
|
||||||
|
|
||||||
db_column = field_describe.get("raw_field")
|
db_column = field_describe.get("raw_field")
|
||||||
pk_field = cast(dict, reference_table_describe.get("pk_field"))
|
reference_id = reference_table_describe.get("pk_field").get("db_column")
|
||||||
reference_id = pk_field.get("db_column")
|
fk_name = self.schema_generator._generate_fk_name(
|
||||||
|
from_table=db_table,
|
||||||
|
from_field=db_column,
|
||||||
|
to_table=reference_table_describe.get("table"),
|
||||||
|
to_field=reference_table_describe.get("pk_field").get("db_column"),
|
||||||
|
)
|
||||||
return self._ADD_FK_TEMPLATE.format(
|
return self._ADD_FK_TEMPLATE.format(
|
||||||
table_name=db_table,
|
table_name=db_table,
|
||||||
fk_name=self._generate_fk_name(db_table, field_describe, reference_table_describe),
|
fk_name=fk_name,
|
||||||
db_column=db_column,
|
db_column=db_column,
|
||||||
table=reference_table_describe.get("table"),
|
table=reference_table_describe.get("table"),
|
||||||
field=reference_id,
|
field=reference_id,
|
||||||
on_delete=field_describe.get("on_delete"),
|
on_delete=field_describe.get("on_delete"),
|
||||||
)
|
)
|
||||||
|
|
||||||
def drop_fk(
|
def drop_fk(self, model: "Type[Model]", field_describe: dict, reference_table_describe: dict):
|
||||||
self, model: type[Model], field_describe: dict, reference_table_describe: dict
|
|
||||||
) -> str:
|
|
||||||
db_table = model._meta.db_table
|
db_table = model._meta.db_table
|
||||||
fk_name = self._generate_fk_name(db_table, field_describe, reference_table_describe)
|
return self._DROP_FK_TEMPLATE.format(
|
||||||
return self._DROP_FK_TEMPLATE.format(table_name=db_table, fk_name=fk_name)
|
table_name=db_table,
|
||||||
|
fk_name=self.schema_generator._generate_fk_name(
|
||||||
|
from_table=db_table,
|
||||||
|
from_field=field_describe.get("raw_field"),
|
||||||
|
to_table=reference_table_describe.get("table"),
|
||||||
|
to_field=reference_table_describe.get("pk_field").get("db_column"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
def alter_column_default(self, model: type[Model], field_describe: dict) -> str:
|
def alter_column_default(self, model: "Type[Model]", field_describe: dict):
|
||||||
db_table = model._meta.db_table
|
db_table = model._meta.db_table
|
||||||
default = self._get_default(model, field_describe)
|
default = self._get_default(model, field_describe)
|
||||||
return self._ALTER_DEFAULT_TEMPLATE.format(
|
return self._ALTER_DEFAULT_TEMPLATE.format(
|
||||||
@ -275,28 +245,14 @@ class BaseDDL:
|
|||||||
default="SET" + default if default is not None else "DROP DEFAULT",
|
default="SET" + default if default is not None else "DROP DEFAULT",
|
||||||
)
|
)
|
||||||
|
|
||||||
def alter_column_null(self, model: type[Model], field_describe: dict) -> str:
|
def alter_column_null(self, model: "Type[Model]", field_describe: dict):
|
||||||
return self.modify_column(model, field_describe)
|
return self.modify_column(model, field_describe)
|
||||||
|
|
||||||
def set_comment(self, model: type[Model], field_describe: dict) -> str:
|
def set_comment(self, model: "Type[Model]", field_describe: dict):
|
||||||
return self.modify_column(model, field_describe)
|
return self.modify_column(model, field_describe)
|
||||||
|
|
||||||
def rename_table(self, model: type[Model], old_table_name: str, new_table_name: str) -> str:
|
def rename_table(self, model: "Type[Model]", old_table_name: str, new_table_name: str):
|
||||||
db_table = model._meta.db_table
|
db_table = model._meta.db_table
|
||||||
return self._RENAME_TABLE_TEMPLATE.format(
|
return self._RENAME_TABLE_TEMPLATE.format(
|
||||||
table_name=db_table, old_table_name=old_table_name, new_table_name=new_table_name
|
table_name=db_table, old_table_name=old_table_name, new_table_name=new_table_name
|
||||||
)
|
)
|
||||||
|
|
||||||
def alter_indexed_column_unique(
|
|
||||||
self, model: type[Model], field_name: str, drop: bool = False
|
|
||||||
) -> list[str]:
|
|
||||||
"""Change unique constraint for indexed field, e.g.: Field(db_index=True) --> Field(unique=True)"""
|
|
||||||
fields = [field_name]
|
|
||||||
if drop:
|
|
||||||
drop_unique = self.drop_index(model, fields, unique=True)
|
|
||||||
add_normal_index = self.add_index(model, fields, unique=False)
|
|
||||||
return [drop_unique, add_normal_index]
|
|
||||||
else:
|
|
||||||
drop_index = self.drop_index(model, fields, unique=False)
|
|
||||||
add_unique_index = self.add_index(model, fields, unique=True)
|
|
||||||
return [drop_index, add_unique_index]
|
|
||||||
|
@ -1,14 +1,7 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator
|
from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator
|
||||||
|
|
||||||
from aerich.ddl import BaseDDL
|
from aerich.ddl import BaseDDL
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from tortoise import Model
|
|
||||||
|
|
||||||
|
|
||||||
class MysqlDDL(BaseDDL):
|
class MysqlDDL(BaseDDL):
|
||||||
schema_generator_cls = MySQLSchemaGenerator
|
schema_generator_cls = MySQLSchemaGenerator
|
||||||
@ -23,14 +16,10 @@ class MysqlDDL(BaseDDL):
|
|||||||
_RENAME_COLUMN_TEMPLATE = (
|
_RENAME_COLUMN_TEMPLATE = (
|
||||||
"ALTER TABLE `{table_name}` RENAME COLUMN `{old_column_name}` TO `{new_column_name}`"
|
"ALTER TABLE `{table_name}` RENAME COLUMN `{old_column_name}` TO `{new_column_name}`"
|
||||||
)
|
)
|
||||||
_ADD_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` ADD {index_type}{unique}INDEX `{index_name}` ({column_names}){extra}"
|
_ADD_INDEX_TEMPLATE = (
|
||||||
|
"ALTER TABLE `{table_name}` ADD {unique}INDEX `{index_name}` ({column_names})"
|
||||||
|
)
|
||||||
_DROP_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` DROP INDEX `{index_name}`"
|
_DROP_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` DROP INDEX `{index_name}`"
|
||||||
_ADD_INDEXED_UNIQUE_TEMPLATE = (
|
|
||||||
"ALTER TABLE `{table_name}` DROP INDEX `{index_name}`, ADD UNIQUE (`{column_name}`)"
|
|
||||||
)
|
|
||||||
_DROP_INDEXED_UNIQUE_TEMPLATE = (
|
|
||||||
"ALTER TABLE `{table_name}` DROP INDEX `{column_name}`, ADD INDEX (`{index_name}`)"
|
|
||||||
)
|
|
||||||
_ADD_FK_TEMPLATE = "ALTER TABLE `{table_name}` ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}"
|
_ADD_FK_TEMPLATE = "ALTER TABLE `{table_name}` ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}"
|
||||||
_DROP_FK_TEMPLATE = "ALTER TABLE `{table_name}` DROP FOREIGN KEY `{fk_name}`"
|
_DROP_FK_TEMPLATE = "ALTER TABLE `{table_name}` DROP FOREIGN KEY `{fk_name}`"
|
||||||
_M2M_TABLE_TEMPLATE = (
|
_M2M_TABLE_TEMPLATE = (
|
||||||
@ -41,21 +30,3 @@ class MysqlDDL(BaseDDL):
|
|||||||
)
|
)
|
||||||
_MODIFY_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` MODIFY COLUMN {column}"
|
_MODIFY_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` MODIFY COLUMN {column}"
|
||||||
_RENAME_TABLE_TEMPLATE = "ALTER TABLE `{old_table_name}` RENAME TO `{new_table_name}`"
|
_RENAME_TABLE_TEMPLATE = "ALTER TABLE `{old_table_name}` RENAME TO `{new_table_name}`"
|
||||||
|
|
||||||
def _index_name(self, unique: bool | None, model: type[Model], field_names: list[str]) -> str:
|
|
||||||
if unique and len(field_names) == 1:
|
|
||||||
# Example: `email = CharField(max_length=50, unique=True)`
|
|
||||||
# Generate schema: `"email" VARCHAR(10) NOT NULL UNIQUE`
|
|
||||||
# Unique index key is the same as field name: `email`
|
|
||||||
return field_names[0]
|
|
||||||
return super()._index_name(unique, model, field_names)
|
|
||||||
|
|
||||||
def alter_indexed_column_unique(
|
|
||||||
self, model: type[Model], field_name: str, drop: bool = False
|
|
||||||
) -> list[str]:
|
|
||||||
# if drop is false: Drop index and add unique
|
|
||||||
# else: Drop unique index and add normal index
|
|
||||||
template = self._DROP_INDEXED_UNIQUE_TEMPLATE if drop else self._ADD_INDEXED_UNIQUE_TEMPLATE
|
|
||||||
table = self.get_table_name(model)
|
|
||||||
index = self._index_name(unique=False, model=model, field_names=[field_name])
|
|
||||||
return [template.format(table_name=table, index_name=index, column_name=field_name)]
|
|
||||||
|
@ -1,26 +1,24 @@
|
|||||||
from __future__ import annotations
|
from typing import Type
|
||||||
|
|
||||||
from typing import cast
|
|
||||||
|
|
||||||
from tortoise import Model
|
from tortoise import Model
|
||||||
from tortoise.backends.base_postgres.schema_generator import BasePostgresSchemaGenerator
|
from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator
|
||||||
|
|
||||||
from aerich.ddl import BaseDDL
|
from aerich.ddl import BaseDDL
|
||||||
|
|
||||||
|
|
||||||
class PostgresDDL(BaseDDL):
|
class PostgresDDL(BaseDDL):
|
||||||
schema_generator_cls = BasePostgresSchemaGenerator
|
schema_generator_cls = AsyncpgSchemaGenerator
|
||||||
DIALECT = BasePostgresSchemaGenerator.DIALECT
|
DIALECT = AsyncpgSchemaGenerator.DIALECT
|
||||||
_ADD_INDEX_TEMPLATE = 'CREATE {unique}INDEX IF NOT EXISTS "{index_name}" ON "{table_name}" {index_type}({column_names}){extra}'
|
_ADD_INDEX_TEMPLATE = 'CREATE {unique}INDEX "{index_name}" ON "{table_name}" ({column_names})'
|
||||||
_DROP_INDEX_TEMPLATE = 'DROP INDEX IF EXISTS "{index_name}"'
|
_DROP_INDEX_TEMPLATE = 'DROP INDEX "{index_name}"'
|
||||||
_ALTER_NULL_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {set_drop} NOT NULL'
|
_ALTER_NULL_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {set_drop} NOT NULL'
|
||||||
_MODIFY_COLUMN_TEMPLATE = (
|
_MODIFY_COLUMN_TEMPLATE = (
|
||||||
'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" TYPE {datatype}{using}'
|
'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" TYPE {datatype}{using}'
|
||||||
)
|
)
|
||||||
_SET_COMMENT_TEMPLATE = 'COMMENT ON COLUMN "{table_name}"."{column}" IS {comment}'
|
_SET_COMMENT_TEMPLATE = 'COMMENT ON COLUMN "{table_name}"."{column}" IS {comment}'
|
||||||
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP CONSTRAINT IF EXISTS "{fk_name}"'
|
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP CONSTRAINT "{fk_name}"'
|
||||||
|
|
||||||
def alter_column_null(self, model: type[Model], field_describe: dict) -> str:
|
def alter_column_null(self, model: "Type[Model]", field_describe: dict):
|
||||||
db_table = model._meta.db_table
|
db_table = model._meta.db_table
|
||||||
return self._ALTER_NULL_TEMPLATE.format(
|
return self._ALTER_NULL_TEMPLATE.format(
|
||||||
table_name=db_table,
|
table_name=db_table,
|
||||||
@ -28,9 +26,9 @@ class PostgresDDL(BaseDDL):
|
|||||||
set_drop="DROP" if field_describe.get("nullable") else "SET",
|
set_drop="DROP" if field_describe.get("nullable") else "SET",
|
||||||
)
|
)
|
||||||
|
|
||||||
def modify_column(self, model: type[Model], field_describe: dict, is_pk: bool = False) -> str:
|
def modify_column(self, model: "Type[Model]", field_describe: dict, is_pk: bool = False):
|
||||||
db_table = model._meta.db_table
|
db_table = model._meta.db_table
|
||||||
db_field_types = cast(dict, field_describe.get("db_field_types"))
|
db_field_types = field_describe.get("db_field_types")
|
||||||
db_column = field_describe.get("db_column")
|
db_column = field_describe.get("db_column")
|
||||||
datatype = db_field_types.get(self.DIALECT) or db_field_types.get("")
|
datatype = db_field_types.get(self.DIALECT) or db_field_types.get("")
|
||||||
return self._MODIFY_COLUMN_TEMPLATE.format(
|
return self._MODIFY_COLUMN_TEMPLATE.format(
|
||||||
@ -40,14 +38,12 @@ class PostgresDDL(BaseDDL):
|
|||||||
using=f' USING "{db_column}"::{datatype}',
|
using=f' USING "{db_column}"::{datatype}',
|
||||||
)
|
)
|
||||||
|
|
||||||
def set_comment(self, model: type[Model], field_describe: dict) -> str:
|
def set_comment(self, model: "Type[Model]", field_describe: dict):
|
||||||
db_table = model._meta.db_table
|
db_table = model._meta.db_table
|
||||||
return self._SET_COMMENT_TEMPLATE.format(
|
return self._SET_COMMENT_TEMPLATE.format(
|
||||||
table_name=db_table,
|
table_name=db_table,
|
||||||
column=field_describe.get("db_column") or field_describe.get("raw_field"),
|
column=field_describe.get("db_column") or field_describe.get("raw_field"),
|
||||||
comment=(
|
comment="'{}'".format(field_describe.get("description"))
|
||||||
"'{}'".format(field_describe.get("description"))
|
|
||||||
if field_describe.get("description")
|
if field_describe.get("description")
|
||||||
else "NULL"
|
else "NULL",
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
from __future__ import annotations
|
from typing import Type
|
||||||
|
|
||||||
from tortoise import Model
|
from tortoise import Model
|
||||||
from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator
|
from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator
|
||||||
@ -10,17 +10,15 @@ from aerich.exceptions import NotSupportError
|
|||||||
class SqliteDDL(BaseDDL):
|
class SqliteDDL(BaseDDL):
|
||||||
schema_generator_cls = SqliteSchemaGenerator
|
schema_generator_cls = SqliteSchemaGenerator
|
||||||
DIALECT = SqliteSchemaGenerator.DIALECT
|
DIALECT = SqliteSchemaGenerator.DIALECT
|
||||||
_ADD_INDEX_TEMPLATE = 'CREATE {unique}INDEX "{index_name}" ON "{table_name}" ({column_names})'
|
|
||||||
_DROP_INDEX_TEMPLATE = 'DROP INDEX IF EXISTS "{index_name}"'
|
|
||||||
|
|
||||||
def modify_column(self, model: type[Model], field_object: dict, is_pk: bool = True):
|
def modify_column(self, model: "Type[Model]", field_object: dict, is_pk: bool = True):
|
||||||
raise NotSupportError("Modify column is unsupported in SQLite.")
|
raise NotSupportError("Modify column is unsupported in SQLite.")
|
||||||
|
|
||||||
def alter_column_default(self, model: type[Model], field_describe: dict):
|
def alter_column_default(self, model: "Type[Model]", field_describe: dict):
|
||||||
raise NotSupportError("Alter column default is unsupported in SQLite.")
|
raise NotSupportError("Alter column default is unsupported in SQLite.")
|
||||||
|
|
||||||
def alter_column_null(self, model: type[Model], field_describe: dict):
|
def alter_column_null(self, model: "Type[Model]", field_describe: dict):
|
||||||
raise NotSupportError("Alter column null is unsupported in SQLite.")
|
raise NotSupportError("Alter column null is unsupported in SQLite.")
|
||||||
|
|
||||||
def set_comment(self, model: type[Model], field_describe: dict):
|
def set_comment(self, model: "Type[Model]", field_describe: dict):
|
||||||
raise NotSupportError("Alter column comment is unsupported in SQLite.")
|
raise NotSupportError("Alter column comment is unsupported in SQLite.")
|
||||||
|
@ -1,69 +1,52 @@
|
|||||||
from __future__ import annotations
|
from typing import Any, List, Optional
|
||||||
|
|
||||||
import contextlib
|
|
||||||
from typing import Any, Callable, Dict, TypedDict
|
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from tortoise import BaseDBAsyncClient
|
from tortoise import BaseDBAsyncClient
|
||||||
|
|
||||||
|
|
||||||
class ColumnInfoDict(TypedDict):
|
|
||||||
name: str
|
|
||||||
pk: str
|
|
||||||
index: str
|
|
||||||
null: str
|
|
||||||
default: str
|
|
||||||
length: str
|
|
||||||
comment: str
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: use dict to replace typing.Dict when dropping support for Python3.8
|
|
||||||
FieldMapDict = Dict[str, Callable[..., str]]
|
|
||||||
|
|
||||||
|
|
||||||
class Column(BaseModel):
|
class Column(BaseModel):
|
||||||
name: str
|
name: str
|
||||||
data_type: str
|
data_type: str
|
||||||
null: bool
|
null: bool
|
||||||
default: Any
|
default: Any
|
||||||
comment: str | None = None
|
comment: Optional[str]
|
||||||
pk: bool
|
pk: bool
|
||||||
unique: bool
|
unique: bool
|
||||||
index: bool
|
index: bool
|
||||||
length: int | None = None
|
length: Optional[int]
|
||||||
extra: str | None = None
|
extra: Optional[str]
|
||||||
decimal_places: int | None = None
|
decimal_places: Optional[int]
|
||||||
max_digits: int | None = None
|
max_digits: Optional[int]
|
||||||
|
|
||||||
def translate(self) -> ColumnInfoDict:
|
def translate(self) -> dict:
|
||||||
comment = default = length = index = null = pk = ""
|
comment = default = length = index = null = pk = ""
|
||||||
if self.pk:
|
if self.pk:
|
||||||
pk = "primary_key=True, "
|
pk = "pk=True, "
|
||||||
else:
|
else:
|
||||||
if self.unique:
|
if self.unique:
|
||||||
index = "unique=True, "
|
index = "unique=True, "
|
||||||
elif self.index:
|
else:
|
||||||
index = "db_index=True, "
|
if self.index:
|
||||||
if self.data_type in ("varchar", "VARCHAR"):
|
index = "index=True, "
|
||||||
|
if self.data_type in ["varchar", "VARCHAR"]:
|
||||||
length = f"max_length={self.length}, "
|
length = f"max_length={self.length}, "
|
||||||
elif self.data_type in ("decimal", "numeric"):
|
if self.data_type in ["decimal", "numeric"]:
|
||||||
length_parts = []
|
length_parts = []
|
||||||
if self.max_digits:
|
if self.max_digits:
|
||||||
length_parts.append(f"max_digits={self.max_digits}")
|
length_parts.append(f"max_digits={self.max_digits}")
|
||||||
if self.decimal_places:
|
if self.decimal_places:
|
||||||
length_parts.append(f"decimal_places={self.decimal_places}")
|
length_parts.append(f"decimal_places={self.decimal_places}")
|
||||||
if length_parts:
|
length = ", ".join(length_parts)
|
||||||
length = ", ".join(length_parts) + ", "
|
|
||||||
if self.null:
|
if self.null:
|
||||||
null = "null=True, "
|
null = "null=True, "
|
||||||
if self.default is not None and not self.pk:
|
if self.default is not None:
|
||||||
if self.data_type in ("tinyint", "INT"):
|
if self.data_type in ["tinyint", "INT"]:
|
||||||
default = f"default={'True' if self.default == '1' else 'False'}, "
|
default = f"default={'True' if self.default == '1' else 'False'}, "
|
||||||
elif self.data_type == "bool":
|
elif self.data_type == "bool":
|
||||||
default = f"default={'True' if self.default == 'true' else 'False'}, "
|
default = f"default={'True' if self.default == 'true' else 'False'}, "
|
||||||
elif self.data_type in ("datetime", "timestamptz", "TIMESTAMP"):
|
elif self.data_type in ["datetime", "timestamptz", "TIMESTAMP"]:
|
||||||
if self.default == "CURRENT_TIMESTAMP":
|
if "CURRENT_TIMESTAMP" == self.default:
|
||||||
if self.extra == "DEFAULT_GENERATED on update CURRENT_TIMESTAMP":
|
if "DEFAULT_GENERATED on update CURRENT_TIMESTAMP" == self.extra:
|
||||||
default = "auto_now=True, "
|
default = "auto_now=True, "
|
||||||
else:
|
else:
|
||||||
default = "auto_now_add=True, "
|
default = "auto_now_add=True, "
|
||||||
@ -72,8 +55,6 @@ class Column(BaseModel):
|
|||||||
default = f"default={self.default.split('::')[0]}, "
|
default = f"default={self.default.split('::')[0]}, "
|
||||||
elif self.default.endswith("()"):
|
elif self.default.endswith("()"):
|
||||||
default = ""
|
default = ""
|
||||||
elif self.default == "":
|
|
||||||
default = 'default=""'
|
|
||||||
else:
|
else:
|
||||||
default = f"default={self.default}, "
|
default = f"default={self.default}, "
|
||||||
|
|
||||||
@ -93,14 +74,16 @@ class Column(BaseModel):
|
|||||||
class Inspect:
|
class Inspect:
|
||||||
_table_template = "class {table}(Model):\n"
|
_table_template = "class {table}(Model):\n"
|
||||||
|
|
||||||
def __init__(self, conn: BaseDBAsyncClient, tables: list[str] | None = None) -> None:
|
def __init__(self, conn: BaseDBAsyncClient, tables: Optional[List[str]] = None):
|
||||||
self.conn = conn
|
self.conn = conn
|
||||||
with contextlib.suppress(AttributeError):
|
try:
|
||||||
self.database = conn.database # type:ignore[attr-defined]
|
self.database = conn.database
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
self.tables = tables
|
self.tables = tables
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def field_map(self) -> FieldMapDict:
|
def field_map(self) -> dict:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
async def inspect(self) -> str:
|
async def inspect(self) -> str:
|
||||||
@ -118,75 +101,68 @@ class Inspect:
|
|||||||
tables.append(model + "\n".join(fields))
|
tables.append(model + "\n".join(fields))
|
||||||
return result + "\n\n\n".join(tables)
|
return result + "\n\n\n".join(tables)
|
||||||
|
|
||||||
async def get_columns(self, table: str) -> list[Column]:
|
async def get_columns(self, table: str) -> List[Column]:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
async def get_all_tables(self) -> list[str]:
|
async def get_all_tables(self) -> List[str]:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_field_string(
|
|
||||||
field_class: str, arguments: str = "{null}{default}{comment}", **kwargs
|
|
||||||
) -> str:
|
|
||||||
name = kwargs["name"]
|
|
||||||
field_params = arguments.format(**kwargs).strip().rstrip(",")
|
|
||||||
return f"{name} = fields.{field_class}({field_params})"
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def decimal_field(cls, **kwargs) -> str:
|
def decimal_field(cls, **kwargs) -> str:
|
||||||
return cls.get_field_string("DecimalField", **kwargs)
|
return "{name} = fields.DecimalField({pk}{index}{length}{null}{default}{comment})".format(
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def time_field(cls, **kwargs) -> str:
|
def time_field(cls, **kwargs) -> str:
|
||||||
return cls.get_field_string("TimeField", **kwargs)
|
return "{name} = fields.TimeField({null}{default}{comment})".format(**kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def date_field(cls, **kwargs) -> str:
|
def date_field(cls, **kwargs) -> str:
|
||||||
return cls.get_field_string("DateField", **kwargs)
|
return "{name} = fields.DateField({null}{default}{comment})".format(**kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def float_field(cls, **kwargs) -> str:
|
def float_field(cls, **kwargs) -> str:
|
||||||
return cls.get_field_string("FloatField", **kwargs)
|
return "{name} = fields.FloatField({null}{default}{comment})".format(**kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def datetime_field(cls, **kwargs) -> str:
|
def datetime_field(cls, **kwargs) -> str:
|
||||||
return cls.get_field_string("DatetimeField", **kwargs)
|
return "{name} = fields.DatetimeField({null}{default}{comment})".format(**kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def text_field(cls, **kwargs) -> str:
|
def text_field(cls, **kwargs) -> str:
|
||||||
return cls.get_field_string("TextField", **kwargs)
|
return "{name} = fields.TextField({null}{default}{comment})".format(**kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def char_field(cls, **kwargs) -> str:
|
def char_field(cls, **kwargs) -> str:
|
||||||
arguments = "{pk}{index}{length}{null}{default}{comment}"
|
return "{name} = fields.CharField({pk}{index}{length}{null}{default}{comment})".format(
|
||||||
return cls.get_field_string("CharField", arguments, **kwargs)
|
**kwargs
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def int_field(cls, field_class="IntField", **kwargs) -> str:
|
def int_field(cls, **kwargs) -> str:
|
||||||
arguments = "{pk}{index}{default}{comment}"
|
return "{name} = fields.IntField({pk}{index}{comment})".format(**kwargs)
|
||||||
return cls.get_field_string(field_class, arguments, **kwargs)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def smallint_field(cls, **kwargs) -> str:
|
def smallint_field(cls, **kwargs) -> str:
|
||||||
return cls.int_field("SmallIntField", **kwargs)
|
return "{name} = fields.SmallIntField({pk}{index}{comment})".format(**kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def bigint_field(cls, **kwargs) -> str:
|
def bigint_field(cls, **kwargs) -> str:
|
||||||
return cls.int_field("BigIntField", **kwargs)
|
return "{name} = fields.BigIntField({pk}{index}{default}{comment})".format(**kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def bool_field(cls, **kwargs) -> str:
|
def bool_field(cls, **kwargs) -> str:
|
||||||
return cls.get_field_string("BooleanField", **kwargs)
|
return "{name} = fields.BooleanField({null}{default}{comment})".format(**kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def uuid_field(cls, **kwargs) -> str:
|
def uuid_field(cls, **kwargs) -> str:
|
||||||
arguments = "{pk}{index}{default}{comment}"
|
return "{name} = fields.UUIDField({pk}{index}{default}{comment})".format(**kwargs)
|
||||||
return cls.get_field_string("UUIDField", arguments, **kwargs)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def json_field(cls, **kwargs) -> str:
|
def json_field(cls, **kwargs) -> str:
|
||||||
return cls.get_field_string("JSONField", **kwargs)
|
return "{name} = fields.JSONField({null}{default}{comment})".format(**kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def binary_field(cls, **kwargs) -> str:
|
def binary_field(cls, **kwargs) -> str:
|
||||||
return cls.get_field_string("BinaryField", **kwargs)
|
return "{name} = fields.BinaryField({null}{default}{comment})".format(**kwargs)
|
||||||
|
@ -1,23 +1,21 @@
|
|||||||
from __future__ import annotations
|
from typing import List
|
||||||
|
|
||||||
from aerich.inspectdb import Column, FieldMapDict, Inspect
|
from aerich.inspectdb import Column, Inspect
|
||||||
|
|
||||||
|
|
||||||
class InspectMySQL(Inspect):
|
class InspectMySQL(Inspect):
|
||||||
@property
|
@property
|
||||||
def field_map(self) -> FieldMapDict:
|
def field_map(self) -> dict:
|
||||||
return {
|
return {
|
||||||
"int": self.int_field,
|
"int": self.int_field,
|
||||||
"smallint": self.smallint_field,
|
"smallint": self.smallint_field,
|
||||||
"tinyint": self.bool_field,
|
"tinyint": self.bool_field,
|
||||||
"bigint": self.bigint_field,
|
"bigint": self.bigint_field,
|
||||||
"varchar": self.char_field,
|
"varchar": self.char_field,
|
||||||
"char": self.uuid_field,
|
|
||||||
"longtext": self.text_field,
|
"longtext": self.text_field,
|
||||||
"text": self.text_field,
|
"text": self.text_field,
|
||||||
"datetime": self.datetime_field,
|
"datetime": self.datetime_field,
|
||||||
"float": self.float_field,
|
"float": self.float_field,
|
||||||
"double": self.float_field,
|
|
||||||
"date": self.date_field,
|
"date": self.date_field,
|
||||||
"time": self.time_field,
|
"time": self.time_field,
|
||||||
"decimal": self.decimal_field,
|
"decimal": self.decimal_field,
|
||||||
@ -25,12 +23,12 @@ class InspectMySQL(Inspect):
|
|||||||
"longblob": self.binary_field,
|
"longblob": self.binary_field,
|
||||||
}
|
}
|
||||||
|
|
||||||
async def get_all_tables(self) -> list[str]:
|
async def get_all_tables(self) -> List[str]:
|
||||||
sql = "select TABLE_NAME from information_schema.TABLES where TABLE_SCHEMA=%s"
|
sql = "select TABLE_NAME from information_schema.TABLES where TABLE_SCHEMA=%s"
|
||||||
ret = await self.conn.execute_query_dict(sql, [self.database])
|
ret = await self.conn.execute_query_dict(sql, [self.database])
|
||||||
return list(map(lambda x: x["TABLE_NAME"], ret))
|
return list(map(lambda x: x["TABLE_NAME"], ret))
|
||||||
|
|
||||||
async def get_columns(self, table: str) -> list[Column]:
|
async def get_columns(self, table: str) -> List[Column]:
|
||||||
columns = []
|
columns = []
|
||||||
sql = """select c.*, s.NON_UNIQUE, s.INDEX_NAME
|
sql = """select c.*, s.NON_UNIQUE, s.INDEX_NAME
|
||||||
from information_schema.COLUMNS c
|
from information_schema.COLUMNS c
|
||||||
@ -41,13 +39,16 @@ where c.TABLE_SCHEMA = %s
|
|||||||
and c.TABLE_NAME = %s"""
|
and c.TABLE_NAME = %s"""
|
||||||
ret = await self.conn.execute_query_dict(sql, [self.database, table])
|
ret = await self.conn.execute_query_dict(sql, [self.database, table])
|
||||||
for row in ret:
|
for row in ret:
|
||||||
unique = index = False
|
non_unique = row["NON_UNIQUE"]
|
||||||
if (non_unique := row["NON_UNIQUE"]) is not None:
|
if non_unique is None:
|
||||||
|
unique = False
|
||||||
|
else:
|
||||||
unique = not non_unique
|
unique = not non_unique
|
||||||
elif row["COLUMN_KEY"] == "UNI":
|
index_name = row["INDEX_NAME"]
|
||||||
unique = True
|
if index_name is None:
|
||||||
if (index_name := row["INDEX_NAME"]) is not None:
|
index = False
|
||||||
index = index_name != "PRIMARY"
|
else:
|
||||||
|
index = row["INDEX_NAME"] != "PRIMARY"
|
||||||
columns.append(
|
columns.append(
|
||||||
Column(
|
Column(
|
||||||
name=row["COLUMN_NAME"],
|
name=row["COLUMN_NAME"],
|
||||||
@ -56,8 +57,9 @@ where c.TABLE_SCHEMA = %s
|
|||||||
default=row["COLUMN_DEFAULT"],
|
default=row["COLUMN_DEFAULT"],
|
||||||
pk=row["COLUMN_KEY"] == "PRI",
|
pk=row["COLUMN_KEY"] == "PRI",
|
||||||
comment=row["COLUMN_COMMENT"],
|
comment=row["COLUMN_COMMENT"],
|
||||||
unique=unique,
|
unique=row["COLUMN_KEY"] == "UNI",
|
||||||
extra=row["EXTRA"],
|
extra=row["EXTRA"],
|
||||||
|
unque=unique,
|
||||||
index=index,
|
index=index,
|
||||||
length=row["CHARACTER_MAXIMUM_LENGTH"],
|
length=row["CHARACTER_MAXIMUM_LENGTH"],
|
||||||
max_digits=row["NUMERIC_PRECISION"],
|
max_digits=row["NUMERIC_PRECISION"],
|
||||||
|
@ -1,29 +1,24 @@
|
|||||||
from __future__ import annotations
|
from typing import List, Optional
|
||||||
|
|
||||||
import re
|
from tortoise import BaseDBAsyncClient
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from aerich.inspectdb import Column, FieldMapDict, Inspect
|
from aerich.inspectdb import Column, Inspect
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from tortoise.backends.base_postgres.client import BasePostgresClient
|
|
||||||
|
|
||||||
|
|
||||||
class InspectPostgres(Inspect):
|
class InspectPostgres(Inspect):
|
||||||
def __init__(self, conn: BasePostgresClient, tables: list[str] | None = None) -> None:
|
def __init__(self, conn: BaseDBAsyncClient, tables: Optional[List[str]] = None):
|
||||||
super().__init__(conn, tables)
|
super().__init__(conn, tables)
|
||||||
self.schema = conn.server_settings.get("schema") or "public"
|
self.schema = self.conn.server_settings.get("schema") or "public"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def field_map(self) -> FieldMapDict:
|
def field_map(self) -> dict:
|
||||||
return {
|
return {
|
||||||
"int2": self.smallint_field,
|
|
||||||
"int4": self.int_field,
|
"int4": self.int_field,
|
||||||
"int8": self.bigint_field,
|
"int8": self.int_field,
|
||||||
"smallint": self.smallint_field,
|
"smallint": self.smallint_field,
|
||||||
"bigint": self.bigint_field,
|
|
||||||
"varchar": self.char_field,
|
"varchar": self.char_field,
|
||||||
"text": self.text_field,
|
"text": self.text_field,
|
||||||
|
"bigint": self.bigint_field,
|
||||||
"timestamptz": self.datetime_field,
|
"timestamptz": self.datetime_field,
|
||||||
"float4": self.float_field,
|
"float4": self.float_field,
|
||||||
"float8": self.float_field,
|
"float8": self.float_field,
|
||||||
@ -38,12 +33,12 @@ class InspectPostgres(Inspect):
|
|||||||
"timestamp": self.datetime_field,
|
"timestamp": self.datetime_field,
|
||||||
}
|
}
|
||||||
|
|
||||||
async def get_all_tables(self) -> list[str]:
|
async def get_all_tables(self) -> List[str]:
|
||||||
sql = "select TABLE_NAME from information_schema.TABLES where table_catalog=$1 and table_schema=$2"
|
sql = "select TABLE_NAME from information_schema.TABLES where table_catalog=$1 and table_schema=$2"
|
||||||
ret = await self.conn.execute_query_dict(sql, [self.database, self.schema])
|
ret = await self.conn.execute_query_dict(sql, [self.database, self.schema])
|
||||||
return list(map(lambda x: x["table_name"], ret))
|
return list(map(lambda x: x["table_name"], ret))
|
||||||
|
|
||||||
async def get_columns(self, table: str) -> list[Column]:
|
async def get_columns(self, table: str) -> List[Column]:
|
||||||
columns = []
|
columns = []
|
||||||
sql = f"""select c.column_name,
|
sql = f"""select c.column_name,
|
||||||
col_description('public.{table}'::regclass, ordinal_position) as column_comment,
|
col_description('public.{table}'::regclass, ordinal_position) as column_comment,
|
||||||
@ -60,9 +55,7 @@ from information_schema.constraint_column_usage const
|
|||||||
right join information_schema.columns c using (column_name, table_catalog, table_schema, table_name)
|
right join information_schema.columns c using (column_name, table_catalog, table_schema, table_name)
|
||||||
where c.table_catalog = $1
|
where c.table_catalog = $1
|
||||||
and c.table_name = $2
|
and c.table_name = $2
|
||||||
and c.table_schema = $3""" # nosec:B608
|
and c.table_schema = $3"""
|
||||||
if "psycopg" in str(type(self.conn)).lower():
|
|
||||||
sql = re.sub(r"\$[123]", "%s", sql)
|
|
||||||
ret = await self.conn.execute_query_dict(sql, [self.database, table, self.schema])
|
ret = await self.conn.execute_query_dict(sql, [self.database, table, self.schema])
|
||||||
for row in ret:
|
for row in ret:
|
||||||
columns.append(
|
columns.append(
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
from __future__ import annotations
|
from typing import List
|
||||||
|
|
||||||
from aerich.inspectdb import Column, FieldMapDict, Inspect
|
from aerich.inspectdb import Column, Inspect
|
||||||
|
|
||||||
|
|
||||||
class InspectSQLite(Inspect):
|
class InspectSQLite(Inspect):
|
||||||
@property
|
@property
|
||||||
def field_map(self) -> FieldMapDict:
|
def field_map(self) -> dict:
|
||||||
return {
|
return {
|
||||||
"INTEGER": self.int_field,
|
"INTEGER": self.int_field,
|
||||||
"INT": self.bool_field,
|
"INT": self.bool_field,
|
||||||
@ -21,7 +21,7 @@ class InspectSQLite(Inspect):
|
|||||||
"BLOB": self.binary_field,
|
"BLOB": self.binary_field,
|
||||||
}
|
}
|
||||||
|
|
||||||
async def get_columns(self, table: str) -> list[Column]:
|
async def get_columns(self, table: str) -> List[Column]:
|
||||||
columns = []
|
columns = []
|
||||||
sql = f"PRAGMA table_info({table})"
|
sql = f"PRAGMA table_info({table})"
|
||||||
ret = await self.conn.execute_query_dict(sql)
|
ret = await self.conn.execute_query_dict(sql)
|
||||||
@ -45,7 +45,7 @@ class InspectSQLite(Inspect):
|
|||||||
)
|
)
|
||||||
return columns
|
return columns
|
||||||
|
|
||||||
async def _get_columns_index(self, table: str) -> dict[str, str]:
|
async def _get_columns_index(self, table: str):
|
||||||
sql = f"PRAGMA index_list ({table})"
|
sql = f"PRAGMA index_list ({table})"
|
||||||
indexes = await self.conn.execute_query_dict(sql)
|
indexes = await self.conn.execute_query_dict(sql)
|
||||||
ret = {}
|
ret = {}
|
||||||
@ -55,7 +55,7 @@ class InspectSQLite(Inspect):
|
|||||||
ret[index_info["name"]] = "unique" if index["unique"] else "index"
|
ret[index_info["name"]] = "unique" if index["unique"] else "index"
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
async def get_all_tables(self) -> list[str]:
|
async def get_all_tables(self) -> List[str]:
|
||||||
sql = "select tbl_name from sqlite_master where type='table' and name!='sqlite_sequence'"
|
sql = "select tbl_name from sqlite_master where type='table' and name!='sqlite_sequence'"
|
||||||
ret = await self.conn.execute_query_dict(sql)
|
ret = await self.conn.execute_query_dict(sql)
|
||||||
return list(map(lambda x: x["tbl_name"], ret))
|
return list(map(lambda x: x["tbl_name"], ret))
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -9,7 +9,7 @@ MAX_APP_LENGTH = 100
|
|||||||
class Aerich(Model):
|
class Aerich(Model):
|
||||||
version = fields.CharField(max_length=MAX_VERSION_LENGTH)
|
version = fields.CharField(max_length=MAX_VERSION_LENGTH)
|
||||||
app = fields.CharField(max_length=MAX_APP_LENGTH)
|
app = fields.CharField(max_length=MAX_APP_LENGTH)
|
||||||
content: dict = fields.JSONField(encoder=encoder, decoder=decoder)
|
content = fields.JSONField(encoder=encoder, decoder=decoder)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ["-id"]
|
ordering = ["-id"]
|
||||||
|
@ -1,15 +1,11 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import importlib.util
|
import importlib.util
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from collections.abc import Generator
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from types import ModuleType
|
from typing import Dict
|
||||||
|
|
||||||
from asyncclick import BadOptionUsage, ClickException, Context
|
from click import BadOptionUsage, ClickException, Context
|
||||||
from dictdiffer import diff
|
|
||||||
from tortoise import BaseDBAsyncClient, Tortoise
|
from tortoise import BaseDBAsyncClient, Tortoise
|
||||||
|
|
||||||
|
|
||||||
@ -34,19 +30,23 @@ def get_app_connection_name(config, app_name: str) -> str:
|
|||||||
get connection name
|
get connection name
|
||||||
:param config:
|
:param config:
|
||||||
:param app_name:
|
:param app_name:
|
||||||
:return: the default connection name (Usally it is 'default')
|
:return:
|
||||||
"""
|
"""
|
||||||
if app := config.get("apps").get(app_name):
|
app = config.get("apps").get(app_name)
|
||||||
|
if app:
|
||||||
return app.get("default_connection", "default")
|
return app.get("default_connection", "default")
|
||||||
raise BadOptionUsage(option_name="--app", message=f"Can't get app named {app_name!r}")
|
raise BadOptionUsage(
|
||||||
|
option_name="--app",
|
||||||
|
message=f'Can\'t get app named "{app_name}"',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_app_connection(config, app) -> BaseDBAsyncClient:
|
def get_app_connection(config, app) -> BaseDBAsyncClient:
|
||||||
"""
|
"""
|
||||||
get connection client
|
get connection name
|
||||||
:param config:
|
:param config:
|
||||||
:param app:
|
:param app:
|
||||||
:return: client instance
|
:return:
|
||||||
"""
|
"""
|
||||||
return Tortoise.get_connection(get_app_connection_name(config, app))
|
return Tortoise.get_connection(get_app_connection_name(config, app))
|
||||||
|
|
||||||
@ -77,67 +77,26 @@ def get_tortoise_config(ctx: Context, tortoise_orm: str) -> dict:
|
|||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
def get_models_describe(app: str) -> dict:
|
def get_models_describe(app: str) -> Dict:
|
||||||
"""
|
"""
|
||||||
get app models describe
|
get app models describe
|
||||||
:param app:
|
:param app:
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
ret = {}
|
ret = {}
|
||||||
for model in Tortoise.apps[app].values():
|
for model in Tortoise.apps.get(app).values():
|
||||||
managed = getattr(model.Meta, "managed", None)
|
|
||||||
describe = model.describe()
|
describe = model.describe()
|
||||||
ret[describe.get("name")] = dict(describe, managed=managed)
|
ret[describe.get("name")] = describe
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def is_default_function(string: str) -> re.Match | None:
|
def is_default_function(string: str):
|
||||||
return re.match(r"^<function.+>$", str(string or ""))
|
return re.match(r"^<function.+>$", str(string or ""))
|
||||||
|
|
||||||
|
|
||||||
def import_py_file(file: str | Path) -> ModuleType:
|
def import_py_file(file: Path):
|
||||||
module_name, file_ext = os.path.splitext(os.path.split(file)[-1])
|
module_name, file_ext = os.path.splitext(os.path.split(file)[-1])
|
||||||
spec = importlib.util.spec_from_file_location(module_name, file)
|
spec = importlib.util.spec_from_file_location(module_name, file)
|
||||||
module = importlib.util.module_from_spec(spec) # type:ignore[arg-type]
|
module = importlib.util.module_from_spec(spec)
|
||||||
spec.loader.exec_module(module) # type:ignore[union-attr]
|
spec.loader.exec_module(module)
|
||||||
return module
|
return module
|
||||||
|
|
||||||
|
|
||||||
def get_dict_diff_by_key(
|
|
||||||
old_fields: list[dict], new_fields: list[dict], key="through"
|
|
||||||
) -> Generator[tuple]:
|
|
||||||
"""
|
|
||||||
Compare two list by key instead of by index
|
|
||||||
|
|
||||||
:param old_fields: previous field info list
|
|
||||||
:param new_fields: current field info list
|
|
||||||
:param key: if two dicts have the same value of this key, action is change; otherwise, is remove/add
|
|
||||||
:return: similar to dictdiffer.diff
|
|
||||||
|
|
||||||
Example::
|
|
||||||
|
|
||||||
>>> old = [{'through': 'a'}, {'through': 'b'}, {'through': 'c'}]
|
|
||||||
>>> new = [{'through': 'a'}, {'through': 'c'}] # remove the second element
|
|
||||||
>>> list(diff(old, new))
|
|
||||||
[('change', [1, 'through'], ('b', 'c')),
|
|
||||||
('remove', '', [(2, {'through': 'c'})])]
|
|
||||||
>>> list(get_dict_diff_by_key(old, new))
|
|
||||||
[('remove', '', [(0, {'through': 'b'})])]
|
|
||||||
|
|
||||||
"""
|
|
||||||
length_old, length_new = len(old_fields), len(new_fields)
|
|
||||||
if length_old == 0 or length_new == 0 or length_old == length_new == 1:
|
|
||||||
yield from diff(old_fields, new_fields)
|
|
||||||
else:
|
|
||||||
value_index: dict[str, int] = {f[key]: i for i, f in enumerate(new_fields)}
|
|
||||||
additions = set(range(length_new))
|
|
||||||
for field in old_fields:
|
|
||||||
value = field[key]
|
|
||||||
if (index := value_index.get(value)) is not None:
|
|
||||||
additions.remove(index)
|
|
||||||
yield from diff([field], [new_fields[index]]) # change
|
|
||||||
else:
|
|
||||||
yield from diff([field], []) # remove
|
|
||||||
if additions:
|
|
||||||
for index in sorted(additions):
|
|
||||||
yield from diff([], [new_fields[index]]) # add
|
|
||||||
|
@ -1,3 +1 @@
|
|||||||
from importlib.metadata import version
|
__version__ = "0.7.2"
|
||||||
|
|
||||||
__version__ = version(__package__)
|
|
||||||
|
64
conftest.py
64
conftest.py
@ -1,30 +1,23 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
from collections.abc import Generator
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from tortoise import Tortoise, expand_db_url
|
from tortoise import Tortoise, expand_db_url, generate_schema_for_client
|
||||||
from tortoise.backends.base_postgres.schema_generator import BasePostgresSchemaGenerator
|
from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator
|
||||||
from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator
|
from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator
|
||||||
from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator
|
from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator
|
||||||
from tortoise.contrib.test import MEMORY_SQLITE
|
|
||||||
|
|
||||||
from aerich.ddl.mysql import MysqlDDL
|
from aerich.ddl.mysql import MysqlDDL
|
||||||
from aerich.ddl.postgres import PostgresDDL
|
from aerich.ddl.postgres import PostgresDDL
|
||||||
from aerich.ddl.sqlite import SqliteDDL
|
from aerich.ddl.sqlite import SqliteDDL
|
||||||
from aerich.migrate import Migrate
|
from aerich.migrate import Migrate
|
||||||
from tests._utils import chdir, copy_files, init_db, run_shell
|
|
||||||
|
|
||||||
db_url = os.getenv("TEST_DB", MEMORY_SQLITE)
|
db_url = os.getenv("TEST_DB", "sqlite://:memory:")
|
||||||
db_url_second = os.getenv("TEST_DB_SECOND", MEMORY_SQLITE)
|
db_url_second = os.getenv("TEST_DB_SECOND", "sqlite://:memory:")
|
||||||
tortoise_orm = {
|
tortoise_orm = {
|
||||||
"connections": {
|
"connections": {
|
||||||
"default": expand_db_url(db_url, testing=True),
|
"default": expand_db_url(db_url, True),
|
||||||
"second": expand_db_url(db_url_second, testing=True),
|
"second": expand_db_url(db_url_second, True),
|
||||||
},
|
},
|
||||||
"apps": {
|
"apps": {
|
||||||
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"},
|
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"},
|
||||||
@ -34,7 +27,7 @@ tortoise_orm = {
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function", autouse=True)
|
@pytest.fixture(scope="function", autouse=True)
|
||||||
def reset_migrate() -> None:
|
def reset_migrate():
|
||||||
Migrate.upgrade_operators = []
|
Migrate.upgrade_operators = []
|
||||||
Migrate.downgrade_operators = []
|
Migrate.downgrade_operators = []
|
||||||
Migrate._upgrade_fk_m2m_index_operators = []
|
Migrate._upgrade_fk_m2m_index_operators = []
|
||||||
@ -44,54 +37,29 @@ def reset_migrate() -> None:
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
def event_loop() -> Generator:
|
def event_loop():
|
||||||
policy = asyncio.get_event_loop_policy()
|
policy = asyncio.get_event_loop_policy()
|
||||||
res = policy.new_event_loop()
|
res = policy.new_event_loop()
|
||||||
asyncio.set_event_loop(res)
|
asyncio.set_event_loop(res)
|
||||||
res._close = res.close # type:ignore[attr-defined]
|
res._close = res.close
|
||||||
res.close = lambda: None # type:ignore[method-assign]
|
res.close = lambda: None
|
||||||
|
|
||||||
yield res
|
yield res
|
||||||
|
|
||||||
res._close() # type:ignore[attr-defined]
|
res._close()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session", autouse=True)
|
@pytest.fixture(scope="session", autouse=True)
|
||||||
async def initialize_tests(event_loop, request) -> None:
|
async def initialize_tests(event_loop, request):
|
||||||
await init_db(tortoise_orm)
|
await Tortoise.init(config=tortoise_orm, _create_db=True)
|
||||||
|
await generate_schema_for_client(Tortoise.get_connection("default"), safe=True)
|
||||||
|
|
||||||
client = Tortoise.get_connection("default")
|
client = Tortoise.get_connection("default")
|
||||||
if client.schema_generator is MySQLSchemaGenerator:
|
if client.schema_generator is MySQLSchemaGenerator:
|
||||||
Migrate.ddl = MysqlDDL(client)
|
Migrate.ddl = MysqlDDL(client)
|
||||||
elif client.schema_generator is SqliteSchemaGenerator:
|
elif client.schema_generator is SqliteSchemaGenerator:
|
||||||
Migrate.ddl = SqliteDDL(client)
|
Migrate.ddl = SqliteDDL(client)
|
||||||
elif issubclass(client.schema_generator, BasePostgresSchemaGenerator):
|
elif client.schema_generator is AsyncpgSchemaGenerator:
|
||||||
Migrate.ddl = PostgresDDL(client)
|
Migrate.ddl = PostgresDDL(client)
|
||||||
Migrate.dialect = Migrate.ddl.DIALECT
|
Migrate.dialect = Migrate.ddl.DIALECT
|
||||||
request.addfinalizer(lambda: event_loop.run_until_complete(Tortoise._drop_databases()))
|
request.addfinalizer(lambda: event_loop.run_until_complete(Tortoise._drop_databases()))
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def new_aerich_project(tmp_path: Path):
|
|
||||||
test_dir = Path(__file__).parent / "tests"
|
|
||||||
asset_dir = test_dir / "assets" / "fake"
|
|
||||||
settings_py = asset_dir / "settings.py"
|
|
||||||
_tests_py = asset_dir / "_tests.py"
|
|
||||||
db_py = asset_dir / "db.py"
|
|
||||||
models_py = test_dir / "models.py"
|
|
||||||
models_second_py = test_dir / "models_second.py"
|
|
||||||
copy_files(settings_py, _tests_py, models_py, models_second_py, db_py, target_dir=tmp_path)
|
|
||||||
dst_dir = tmp_path / "tests"
|
|
||||||
dst_dir.mkdir()
|
|
||||||
dst_dir.joinpath("__init__.py").touch()
|
|
||||||
copy_files(test_dir / "_utils.py", test_dir / "indexes.py", target_dir=dst_dir)
|
|
||||||
if should_remove := str(tmp_path) not in sys.path:
|
|
||||||
sys.path.append(str(tmp_path))
|
|
||||||
with chdir(tmp_path):
|
|
||||||
run_shell("python db.py create", capture_output=False)
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
if not os.getenv("AERICH_DONT_DROP_FAKE_DB"):
|
|
||||||
run_shell("python db.py drop", capture_output=False)
|
|
||||||
if should_remove:
|
|
||||||
sys.path.remove(str(tmp_path))
|
|
||||||
|
2128
poetry.lock
generated
2128
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
132
pyproject.toml
132
pyproject.toml
@ -1,60 +1,43 @@
|
|||||||
[project]
|
[tool.poetry]
|
||||||
name = "aerich"
|
name = "aerich"
|
||||||
version = "0.8.2"
|
version = "0.7.2"
|
||||||
description = "A database migrations tool for Tortoise ORM."
|
description = "A database migrations tool for Tortoise ORM."
|
||||||
authors = [{name="long2ice", email="long2ice@gmail.com>"}]
|
authors = ["long2ice <long2ice@gmail.com>"]
|
||||||
license = { text = "Apache-2.0" }
|
license = "Apache-2.0"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
keywords = ["migrate", "Tortoise-ORM", "mysql"]
|
|
||||||
packages = [{ include = "aerich" }]
|
|
||||||
include = ["CHANGELOG.md", "LICENSE", "README.md"]
|
|
||||||
requires-python = ">=3.8"
|
|
||||||
dependencies = [
|
|
||||||
"tortoise-orm (>=0.21.0,<1.0.0); python_version < '4.0'",
|
|
||||||
"pydantic (>=2.0.2,!=2.1.0,!=2.7.0,<3.0.0)",
|
|
||||||
"dictdiffer (>=0.9.0,<1.0.0)",
|
|
||||||
"asyncclick (>=8.1.7,<9.0.0)",
|
|
||||||
"eval-type-backport (>=0.2.2,<1.0.0); python_version < '3.10'",
|
|
||||||
]
|
|
||||||
|
|
||||||
[project.optional-dependencies]
|
|
||||||
toml = [
|
|
||||||
"tomli-w (>=1.1.0,<2.0.0); python_version >= '3.11'",
|
|
||||||
"tomlkit (>=0.11.4,<1.0.0); python_version < '3.11'",
|
|
||||||
]
|
|
||||||
# Need asyncpg or psyncopg for PostgreSQL
|
|
||||||
asyncpg = ["asyncpg"]
|
|
||||||
psycopg = ["psycopg[pool,binary] (>=3.0.12,<4.0.0)"]
|
|
||||||
# Need asyncmy or aiomysql for MySQL
|
|
||||||
asyncmy = ["asyncmy>=0.2.9; python_version < '4.0'"]
|
|
||||||
mysql = ["aiomysql>=0.2.0"]
|
|
||||||
|
|
||||||
[project.urls]
|
|
||||||
homepage = "https://github.com/tortoise/aerich"
|
homepage = "https://github.com/tortoise/aerich"
|
||||||
repository = "https://github.com/tortoise/aerich.git"
|
repository = "https://github.com/tortoise/aerich.git"
|
||||||
documentation = "https://github.com/tortoise/aerich"
|
documentation = "https://github.com/tortoise/aerich"
|
||||||
|
keywords = ["migrate", "Tortoise-ORM", "mysql"]
|
||||||
|
packages = [
|
||||||
|
{ include = "aerich" }
|
||||||
|
]
|
||||||
|
include = ["CHANGELOG.md", "LICENSE", "README.md"]
|
||||||
|
|
||||||
[project.scripts]
|
[tool.poetry.dependencies]
|
||||||
aerich = "aerich.cli:main"
|
python = "^3.7"
|
||||||
|
tortoise-orm = "*"
|
||||||
|
click = "*"
|
||||||
|
asyncpg = { version = "*", optional = true }
|
||||||
|
asyncmy = { version = "^0.2.8rc1", optional = true, allow-prereleases = true }
|
||||||
|
pydantic = "*"
|
||||||
|
dictdiffer = "*"
|
||||||
|
tomlkit = "*"
|
||||||
|
|
||||||
[tool.poetry]
|
[tool.poetry.dev-dependencies]
|
||||||
requires-poetry = ">=2.0"
|
ruff = "*"
|
||||||
|
isort = "*"
|
||||||
|
black = "*"
|
||||||
|
pytest = "*"
|
||||||
|
pytest-xdist = "*"
|
||||||
|
pytest-asyncio = "*"
|
||||||
|
bandit = "*"
|
||||||
|
pytest-mock = "*"
|
||||||
|
cryptography = "*"
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.extras]
|
||||||
ruff = "^0.9.0"
|
asyncmy = ["asyncmy"]
|
||||||
bandit = "^1.7.0"
|
asyncpg = ["asyncpg"]
|
||||||
mypy = "^1.10.0"
|
|
||||||
twine = "^6.1.0"
|
|
||||||
|
|
||||||
[tool.poetry.group.test.dependencies]
|
|
||||||
pytest = "^8.3.0"
|
|
||||||
pytest-mock = "^3.14.0"
|
|
||||||
pytest-xdist = "^3.6.0"
|
|
||||||
# Breaking change in 0.23.*
|
|
||||||
# https://github.com/pytest-dev/pytest-asyncio/issues/706
|
|
||||||
pytest-asyncio = "^0.21.2"
|
|
||||||
# required for sha256_password by asyncmy
|
|
||||||
cryptography = {version="^44.0.1", python="!=3.9.0,!=3.9.1"}
|
|
||||||
|
|
||||||
[tool.aerich]
|
[tool.aerich]
|
||||||
tortoise_orm = "conftest.tortoise_orm"
|
tortoise_orm = "conftest.tortoise_orm"
|
||||||
@ -62,55 +45,22 @@ location = "./migrations"
|
|||||||
src_folder = "./."
|
src_folder = "./."
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core>=2.0.0"]
|
requires = ["poetry>=0.12"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.masonry.api"
|
||||||
|
|
||||||
|
[tool.poetry.scripts]
|
||||||
|
aerich = "aerich.cli:main"
|
||||||
|
|
||||||
|
[tool.black]
|
||||||
|
line-length = 100
|
||||||
|
target-version = ['py36', 'py37', 'py38', 'py39']
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest.ini_options]
|
||||||
asyncio_mode = 'auto'
|
asyncio_mode = 'auto'
|
||||||
|
|
||||||
[tool.coverage.run]
|
|
||||||
branch = true
|
|
||||||
source = ["aerich"]
|
|
||||||
|
|
||||||
[tool.coverage.report]
|
|
||||||
exclude_also = [
|
|
||||||
"if TYPE_CHECKING:"
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.mypy]
|
[tool.mypy]
|
||||||
pretty = true
|
pretty = true
|
||||||
python_version = "3.8"
|
|
||||||
check_untyped_defs = true
|
|
||||||
warn_unused_ignores = true
|
|
||||||
disallow_incomplete_defs = false
|
|
||||||
exclude = ["tests/assets", "migrations"]
|
|
||||||
|
|
||||||
[[tool.mypy.overrides]]
|
|
||||||
module = [
|
|
||||||
'dictdiffer.*',
|
|
||||||
'tomlkit',
|
|
||||||
'tomli_w',
|
|
||||||
'tomli',
|
|
||||||
]
|
|
||||||
ignore_missing_imports = true
|
ignore_missing_imports = true
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
line-length = 100
|
ignore = ['E501']
|
||||||
|
|
||||||
[tool.ruff.lint]
|
|
||||||
extend-select = [
|
|
||||||
"I", # https://docs.astral.sh/ruff/rules/#isort-i
|
|
||||||
"SIM", # https://docs.astral.sh/ruff/rules/#flake8-simplify-sim
|
|
||||||
"FA", # https://docs.astral.sh/ruff/rules/#flake8-future-annotations-fa
|
|
||||||
"UP", # https://docs.astral.sh/ruff/rules/#pyupgrade-up
|
|
||||||
"RUF100", # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf
|
|
||||||
]
|
|
||||||
ignore = ["UP031"] # https://docs.astral.sh/ruff/rules/printf-string-formatting/
|
|
||||||
|
|
||||||
[tool.ruff.lint.per-file-ignores]
|
|
||||||
# TODO: Remove this line when dropping support for Python3.8
|
|
||||||
"aerich/inspectdb/__init__.py" = ["UP006", "UP035"]
|
|
||||||
"aerich/_compat.py" = ["F401"]
|
|
||||||
|
|
||||||
[tool.bandit]
|
|
||||||
exclude_dirs = ["tests", "conftest.py"]
|
|
||||||
|
@ -1,87 +0,0 @@
|
|||||||
import contextlib
|
|
||||||
import os
|
|
||||||
import platform
|
|
||||||
import shlex
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from tortoise import Tortoise, generate_schema_for_client
|
|
||||||
from tortoise.exceptions import DBConnectionError, OperationalError
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 11):
|
|
||||||
from contextlib import chdir
|
|
||||||
else:
|
|
||||||
|
|
||||||
class chdir(contextlib.AbstractContextManager): # Copied from source code of Python3.13
|
|
||||||
"""Non thread-safe context manager to change the current working directory."""
|
|
||||||
|
|
||||||
def __init__(self, path):
|
|
||||||
self.path = path
|
|
||||||
self._old_cwd = []
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self._old_cwd.append(os.getcwd())
|
|
||||||
os.chdir(self.path)
|
|
||||||
|
|
||||||
def __exit__(self, *excinfo):
|
|
||||||
os.chdir(self._old_cwd.pop())
|
|
||||||
|
|
||||||
|
|
||||||
async def drop_db(tortoise_orm) -> None:
|
|
||||||
# Placing init outside the try-block(suppress) since it doesn't
|
|
||||||
# establish connections to the DB eagerly.
|
|
||||||
await Tortoise.init(config=tortoise_orm)
|
|
||||||
with contextlib.suppress(DBConnectionError, OperationalError):
|
|
||||||
await Tortoise._drop_databases()
|
|
||||||
|
|
||||||
|
|
||||||
async def init_db(tortoise_orm, generate_schemas=True) -> None:
|
|
||||||
await drop_db(tortoise_orm)
|
|
||||||
await Tortoise.init(config=tortoise_orm, _create_db=True)
|
|
||||||
if generate_schemas:
|
|
||||||
await generate_schema_for_client(Tortoise.get_connection("default"), safe=True)
|
|
||||||
|
|
||||||
|
|
||||||
def copy_files(*src_files: Path, target_dir: Path) -> None:
|
|
||||||
for src in src_files:
|
|
||||||
shutil.copy(src, target_dir)
|
|
||||||
|
|
||||||
|
|
||||||
class Dialect:
|
|
||||||
test_db_url: str
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def load_env(cls) -> None:
|
|
||||||
if getattr(cls, "test_db_url", None) is None:
|
|
||||||
cls.test_db_url = os.getenv("TEST_DB", "")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def is_postgres(cls) -> bool:
|
|
||||||
cls.load_env()
|
|
||||||
return "postgres" in cls.test_db_url
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def is_mysql(cls) -> bool:
|
|
||||||
cls.load_env()
|
|
||||||
return "mysql" in cls.test_db_url
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def is_sqlite(cls) -> bool:
|
|
||||||
cls.load_env()
|
|
||||||
return not cls.test_db_url or "sqlite" in cls.test_db_url
|
|
||||||
|
|
||||||
|
|
||||||
WINDOWS = platform.system() == "Windows"
|
|
||||||
|
|
||||||
|
|
||||||
def run_shell(command: str, capture_output=True, **kw) -> str:
|
|
||||||
if WINDOWS and command.startswith("aerich "):
|
|
||||||
command = "python -m " + command
|
|
||||||
r = subprocess.run(shlex.split(command), capture_output=capture_output)
|
|
||||||
if r.returncode != 0 and r.stderr:
|
|
||||||
return r.stderr.decode()
|
|
||||||
if not r.stdout:
|
|
||||||
return ""
|
|
||||||
return r.stdout.decode()
|
|
@ -1,80 +0,0 @@
|
|||||||
import pytest
|
|
||||||
from models import NewModel
|
|
||||||
from models_second import Config
|
|
||||||
from settings import TORTOISE_ORM
|
|
||||||
from tortoise import Tortoise
|
|
||||||
from tortoise.exceptions import OperationalError
|
|
||||||
|
|
||||||
try:
|
|
||||||
# This error does not translate to tortoise's OperationalError
|
|
||||||
from psycopg.errors import UndefinedColumn
|
|
||||||
except ImportError:
|
|
||||||
errors = (OperationalError,)
|
|
||||||
else:
|
|
||||||
errors = (OperationalError, UndefinedColumn)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
|
||||||
def anyio_backend() -> str:
|
|
||||||
return "asyncio"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
|
||||||
async def init_connections():
|
|
||||||
await Tortoise.init(TORTOISE_ORM)
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
await Tortoise.close_connections()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.anyio
|
|
||||||
async def test_init_db():
|
|
||||||
m1 = await NewModel.filter(name="")
|
|
||||||
assert isinstance(m1, list)
|
|
||||||
m2 = await Config.filter(key="")
|
|
||||||
assert isinstance(m2, list)
|
|
||||||
await NewModel.create(name="")
|
|
||||||
await Config.create(key="", label="", value={})
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.anyio
|
|
||||||
async def test_fake_field_1():
|
|
||||||
assert "field_1" in NewModel._meta.fields_map
|
|
||||||
assert "field_1" in Config._meta.fields_map
|
|
||||||
with pytest.raises(errors):
|
|
||||||
await NewModel.create(name="", field_1=1)
|
|
||||||
with pytest.raises(errors):
|
|
||||||
await Config.create(key="", label="", value={}, field_1=1)
|
|
||||||
|
|
||||||
obj1 = NewModel(name="", field_1=1)
|
|
||||||
with pytest.raises(errors):
|
|
||||||
await obj1.save()
|
|
||||||
obj1 = NewModel(name="")
|
|
||||||
with pytest.raises(errors):
|
|
||||||
await obj1.save()
|
|
||||||
with pytest.raises(errors):
|
|
||||||
obj1 = await NewModel.first()
|
|
||||||
obj1 = await NewModel.all().first().values("id", "name")
|
|
||||||
assert obj1 and obj1["id"]
|
|
||||||
|
|
||||||
obj2 = Config(key="", label="", value={}, field_1=1)
|
|
||||||
with pytest.raises(errors):
|
|
||||||
await obj2.save()
|
|
||||||
obj2 = Config(key="", label="", value={})
|
|
||||||
with pytest.raises(errors):
|
|
||||||
await obj2.save()
|
|
||||||
with pytest.raises(errors):
|
|
||||||
obj2 = await Config.first()
|
|
||||||
obj2 = await Config.all().first().values("id", "key")
|
|
||||||
assert obj2 and obj2["id"]
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.anyio
|
|
||||||
async def test_fake_field_2():
|
|
||||||
assert "field_2" in NewModel._meta.fields_map
|
|
||||||
assert "field_2" in Config._meta.fields_map
|
|
||||||
with pytest.raises(errors):
|
|
||||||
await NewModel.create(name="")
|
|
||||||
with pytest.raises(errors):
|
|
||||||
await Config.create(key="", label="", value={})
|
|
@ -1,28 +0,0 @@
|
|||||||
import asyncclick as click
|
|
||||||
from settings import TORTOISE_ORM
|
|
||||||
|
|
||||||
from tests._utils import drop_db, init_db
|
|
||||||
|
|
||||||
|
|
||||||
@click.group()
|
|
||||||
def cli(): ...
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
|
||||||
async def create():
|
|
||||||
await init_db(TORTOISE_ORM, False)
|
|
||||||
click.echo(f"Success to create databases for {TORTOISE_ORM['connections']}")
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
|
||||||
async def drop():
|
|
||||||
await drop_db(TORTOISE_ORM)
|
|
||||||
click.echo(f"Dropped databases for {TORTOISE_ORM['connections']}")
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
cli()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
@ -1,22 +0,0 @@
|
|||||||
import os
|
|
||||||
from datetime import date
|
|
||||||
|
|
||||||
from tortoise.contrib.test import MEMORY_SQLITE
|
|
||||||
|
|
||||||
DB_URL = (
|
|
||||||
_u.replace("\\{\\}", f"aerich_fake_{date.today():%Y%m%d}")
|
|
||||||
if (_u := os.getenv("TEST_DB"))
|
|
||||||
else MEMORY_SQLITE
|
|
||||||
)
|
|
||||||
DB_URL_SECOND = (DB_URL + "_second") if DB_URL != MEMORY_SQLITE else MEMORY_SQLITE
|
|
||||||
|
|
||||||
TORTOISE_ORM = {
|
|
||||||
"connections": {
|
|
||||||
"default": DB_URL.replace(MEMORY_SQLITE, "sqlite://db.sqlite3"),
|
|
||||||
"second": DB_URL_SECOND.replace(MEMORY_SQLITE, "sqlite://db_second.sqlite3"),
|
|
||||||
},
|
|
||||||
"apps": {
|
|
||||||
"models": {"models": ["models", "aerich.models"], "default_connection": "default"},
|
|
||||||
"models_second": {"models": ["models_second"], "default_connection": "second"},
|
|
||||||
},
|
|
||||||
}
|
|
@ -1,76 +0,0 @@
|
|||||||
import uuid
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from models import Foo
|
|
||||||
from tortoise.exceptions import IntegrityError
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_allow_duplicate() -> None:
|
|
||||||
await Foo.all().delete()
|
|
||||||
await Foo.create(name="foo")
|
|
||||||
obj = await Foo.create(name="foo")
|
|
||||||
assert (await Foo.all().count()) == 2
|
|
||||||
await obj.delete()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_unique_is_true() -> None:
|
|
||||||
with pytest.raises(IntegrityError):
|
|
||||||
await Foo.create(name="foo")
|
|
||||||
await Foo.create(name="foo")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_add_unique_field() -> None:
|
|
||||||
if not await Foo.filter(age=0).exists():
|
|
||||||
await Foo.create(name="0_" + uuid.uuid4().hex, age=0)
|
|
||||||
with pytest.raises(IntegrityError):
|
|
||||||
await Foo.create(name=uuid.uuid4().hex, age=0)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_drop_unique_field() -> None:
|
|
||||||
name = "1_" + uuid.uuid4().hex
|
|
||||||
await Foo.create(name=name, age=0)
|
|
||||||
assert await Foo.filter(name=name).exists()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_with_age_field() -> None:
|
|
||||||
name = "2_" + uuid.uuid4().hex
|
|
||||||
await Foo.create(name=name, age=0)
|
|
||||||
obj = await Foo.get(name=name)
|
|
||||||
assert obj.age == 0
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_without_age_field() -> None:
|
|
||||||
name = "3_" + uuid.uuid4().hex
|
|
||||||
await Foo.create(name=name, age=0)
|
|
||||||
obj = await Foo.get(name=name)
|
|
||||||
assert getattr(obj, "age", None) is None
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_m2m_with_custom_through() -> None:
|
|
||||||
from models import FooGroup, Group
|
|
||||||
|
|
||||||
name = "4_" + uuid.uuid4().hex
|
|
||||||
foo = await Foo.create(name=name)
|
|
||||||
group = await Group.create(name=name + "1")
|
|
||||||
await FooGroup.all().delete()
|
|
||||||
await foo.groups.add(group)
|
|
||||||
foo_group = await FooGroup.get(foo=foo, group=group)
|
|
||||||
assert not foo_group.is_active
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_add_m2m_field_after_init_db() -> None:
|
|
||||||
from models import Group
|
|
||||||
|
|
||||||
name = "5_" + uuid.uuid4().hex
|
|
||||||
foo = await Foo.create(name=name)
|
|
||||||
group = await Group.create(name=name + "1")
|
|
||||||
await foo.groups.add(group)
|
|
||||||
assert (await group.users.all().first()) == foo
|
|
@ -1,28 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from collections.abc import Generator
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
import pytest_asyncio
|
|
||||||
import settings
|
|
||||||
from tortoise import Tortoise, connections
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
|
||||||
def event_loop() -> Generator:
|
|
||||||
policy = asyncio.get_event_loop_policy()
|
|
||||||
res = policy.new_event_loop()
|
|
||||||
asyncio.set_event_loop(res)
|
|
||||||
res._close = res.close # type:ignore[attr-defined]
|
|
||||||
res.close = lambda: None # type:ignore[method-assign]
|
|
||||||
|
|
||||||
yield res
|
|
||||||
|
|
||||||
res._close() # type:ignore[attr-defined]
|
|
||||||
|
|
||||||
|
|
||||||
@pytest_asyncio.fixture(scope="session", autouse=True)
|
|
||||||
async def api(event_loop, request):
|
|
||||||
await Tortoise.init(config=settings.TORTOISE_ORM)
|
|
||||||
request.addfinalizer(lambda: event_loop.run_until_complete(connections.close_all(discard=True)))
|
|
@ -1,5 +0,0 @@
|
|||||||
from tortoise import Model, fields
|
|
||||||
|
|
||||||
|
|
||||||
class Foo(Model):
|
|
||||||
name = fields.CharField(max_length=60, db_index=False)
|
|
@ -1,4 +0,0 @@
|
|||||||
TORTOISE_ORM = {
|
|
||||||
"connections": {"default": "sqlite://db.sqlite3"},
|
|
||||||
"apps": {"models": {"models": ["models", "aerich.models"]}},
|
|
||||||
}
|
|
@ -1,7 +0,0 @@
|
|||||||
from tortoise.indexes import Index
|
|
||||||
|
|
||||||
|
|
||||||
class CustomIndex(Index):
|
|
||||||
def __init__(self, *args, **kw) -> None:
|
|
||||||
super().__init__(*args, **kw)
|
|
||||||
self._foo = ""
|
|
@ -1,16 +1,8 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import uuid
|
import uuid
|
||||||
from enum import IntEnum
|
from enum import IntEnum
|
||||||
|
|
||||||
from tortoise import Model, fields
|
from tortoise import Model, fields
|
||||||
from tortoise.contrib.mysql.indexes import FullTextIndex
|
|
||||||
from tortoise.contrib.postgres.indexes import HashIndex
|
|
||||||
from tortoise.indexes import Index
|
|
||||||
|
|
||||||
from tests._utils import Dialect
|
|
||||||
from tests.indexes import CustomIndex
|
|
||||||
|
|
||||||
|
|
||||||
class ProductType(IntEnum):
|
class ProductType(IntEnum):
|
||||||
@ -39,21 +31,13 @@ class User(Model):
|
|||||||
intro = fields.TextField(default="")
|
intro = fields.TextField(default="")
|
||||||
longitude = fields.DecimalField(max_digits=10, decimal_places=8)
|
longitude = fields.DecimalField(max_digits=10, decimal_places=8)
|
||||||
|
|
||||||
products: fields.ManyToManyRelation[Product]
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
# reverse indexes elements
|
|
||||||
indexes = [CustomIndex(fields=("is_superuser",)), Index(fields=("username", "is_active"))]
|
|
||||||
|
|
||||||
|
|
||||||
class Email(Model):
|
class Email(Model):
|
||||||
email_id = fields.IntField(primary_key=True)
|
email_id = fields.IntField(pk=True)
|
||||||
email = fields.CharField(max_length=200, db_index=True)
|
email = fields.CharField(max_length=200, index=True)
|
||||||
company = fields.CharField(max_length=100, db_index=True, unique=True)
|
|
||||||
is_primary = fields.BooleanField(default=False)
|
is_primary = fields.BooleanField(default=False)
|
||||||
address = fields.CharField(max_length=200)
|
address = fields.CharField(max_length=200)
|
||||||
users: fields.ManyToManyRelation[User] = fields.ManyToManyField("models.User")
|
users = fields.ManyToManyField("models.User")
|
||||||
config: fields.OneToOneRelation[Config] = fields.OneToOneField("models.Config")
|
|
||||||
|
|
||||||
|
|
||||||
def default_name():
|
def default_name():
|
||||||
@ -63,78 +47,34 @@ def default_name():
|
|||||||
class Category(Model):
|
class Category(Model):
|
||||||
slug = fields.CharField(max_length=100)
|
slug = fields.CharField(max_length=100)
|
||||||
name = fields.CharField(max_length=200, null=True, default=default_name)
|
name = fields.CharField(max_length=200, null=True, default=default_name)
|
||||||
owner: fields.ForeignKeyRelation[User] = fields.ForeignKeyField(
|
user = fields.ForeignKeyField("models.User", description="User")
|
||||||
"models.User", description="User"
|
|
||||||
)
|
|
||||||
title = fields.CharField(max_length=20, unique=False)
|
|
||||||
created_at = fields.DatetimeField(auto_now_add=True)
|
created_at = fields.DatetimeField(auto_now_add=True)
|
||||||
|
|
||||||
class Meta:
|
|
||||||
if Dialect.is_postgres():
|
|
||||||
indexes = [HashIndex(fields=("slug",))]
|
|
||||||
elif Dialect.is_mysql():
|
|
||||||
indexes = [FullTextIndex(fields=("slug",))] # type:ignore
|
|
||||||
else:
|
|
||||||
indexes = [Index(fields=("slug",))] # type:ignore
|
|
||||||
|
|
||||||
|
|
||||||
class Product(Model):
|
class Product(Model):
|
||||||
id = fields.BigIntField(primary_key=True)
|
categories = fields.ManyToManyField("models.Category")
|
||||||
categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField(
|
|
||||||
"models.Category", null=False
|
|
||||||
)
|
|
||||||
users: fields.ManyToManyRelation[User] = fields.ManyToManyField(
|
|
||||||
"models.User", related_name="products"
|
|
||||||
)
|
|
||||||
name = fields.CharField(max_length=50)
|
name = fields.CharField(max_length=50)
|
||||||
view_num = fields.IntField(description="View Num", default=0)
|
view_num = fields.IntField(description="View Num", default=0)
|
||||||
sort = fields.IntField()
|
sort = fields.IntField()
|
||||||
is_reviewed = fields.BooleanField(description="Is Reviewed")
|
is_reviewed = fields.BooleanField(description="Is Reviewed")
|
||||||
type: int = fields.IntEnumField(
|
type = fields.IntEnumField(
|
||||||
ProductType, description="Product Type", source_field="type_db_alias"
|
ProductType, description="Product Type", source_field="type_db_alias"
|
||||||
)
|
)
|
||||||
pic = fields.CharField(max_length=200)
|
pic = fields.CharField(max_length=200)
|
||||||
body = fields.TextField()
|
body = fields.TextField()
|
||||||
price = fields.FloatField(null=True)
|
|
||||||
no = fields.UUIDField(db_index=True)
|
|
||||||
created_at = fields.DatetimeField(auto_now_add=True)
|
created_at = fields.DatetimeField(auto_now_add=True)
|
||||||
is_deleted = fields.BooleanField(default=False)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
unique_together = (("name", "type"),)
|
unique_together = (("name", "type"),)
|
||||||
indexes = (("name", "type"),)
|
indexes = (("name", "type"),)
|
||||||
managed = True
|
|
||||||
|
|
||||||
|
|
||||||
class Config(Model):
|
class Config(Model):
|
||||||
slug = fields.CharField(primary_key=True, max_length=20)
|
|
||||||
categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField(
|
|
||||||
"models.Category", through="config_category_map", related_name="category_set"
|
|
||||||
)
|
|
||||||
label = fields.CharField(max_length=200)
|
label = fields.CharField(max_length=200)
|
||||||
key = fields.CharField(max_length=20)
|
key = fields.CharField(max_length=20)
|
||||||
value: dict = fields.JSONField()
|
value = fields.JSONField()
|
||||||
status: Status = fields.IntEnumField(Status)
|
status: Status = fields.IntEnumField(Status)
|
||||||
user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField(
|
user = fields.ForeignKeyField("models.User", description="User")
|
||||||
"models.User", description="User"
|
|
||||||
)
|
|
||||||
|
|
||||||
email: fields.OneToOneRelation[Email]
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
managed = True
|
|
||||||
|
|
||||||
|
|
||||||
class DontManageMe(Model):
|
|
||||||
name = fields.CharField(max_length=50)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
managed = False
|
|
||||||
|
|
||||||
|
|
||||||
class Ignore(Model):
|
|
||||||
class Meta:
|
|
||||||
managed = False
|
|
||||||
|
|
||||||
|
|
||||||
class NewModel(Model):
|
class NewModel(Model):
|
||||||
|
@ -34,29 +34,23 @@ class User(Model):
|
|||||||
class Email(Model):
|
class Email(Model):
|
||||||
email = fields.CharField(max_length=200)
|
email = fields.CharField(max_length=200)
|
||||||
is_primary = fields.BooleanField(default=False)
|
is_primary = fields.BooleanField(default=False)
|
||||||
user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField(
|
user = fields.ForeignKeyField("models_second.User", db_constraint=False)
|
||||||
"models_second.User", db_constraint=False
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Category(Model):
|
class Category(Model):
|
||||||
slug = fields.CharField(max_length=200)
|
slug = fields.CharField(max_length=200)
|
||||||
name = fields.CharField(max_length=200)
|
name = fields.CharField(max_length=200)
|
||||||
user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField(
|
user = fields.ForeignKeyField("models_second.User", description="User")
|
||||||
"models_second.User", description="User"
|
|
||||||
)
|
|
||||||
created_at = fields.DatetimeField(auto_now_add=True)
|
created_at = fields.DatetimeField(auto_now_add=True)
|
||||||
|
|
||||||
|
|
||||||
class Product(Model):
|
class Product(Model):
|
||||||
categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField(
|
categories = fields.ManyToManyField("models_second.Category")
|
||||||
"models_second.Category"
|
|
||||||
)
|
|
||||||
name = fields.CharField(max_length=50)
|
name = fields.CharField(max_length=50)
|
||||||
view_num = fields.IntField(description="View Num")
|
view_num = fields.IntField(description="View Num")
|
||||||
sort = fields.IntField()
|
sort = fields.IntField()
|
||||||
is_reviewed = fields.BooleanField(description="Is Reviewed")
|
is_reviewed = fields.BooleanField(description="Is Reviewed")
|
||||||
type: int = fields.IntEnumField(
|
type = fields.IntEnumField(
|
||||||
ProductType, description="Product Type", source_field="type_db_alias"
|
ProductType, description="Product Type", source_field="type_db_alias"
|
||||||
)
|
)
|
||||||
image = fields.CharField(max_length=200)
|
image = fields.CharField(max_length=200)
|
||||||
@ -67,5 +61,5 @@ class Product(Model):
|
|||||||
class Config(Model):
|
class Config(Model):
|
||||||
label = fields.CharField(max_length=200)
|
label = fields.CharField(max_length=200)
|
||||||
key = fields.CharField(max_length=20)
|
key = fields.CharField(max_length=20)
|
||||||
value: dict = fields.JSONField()
|
value = fields.JSONField()
|
||||||
status: Status = fields.IntEnumField(Status, default=Status.on)
|
status: Status = fields.IntEnumField(Status, default=Status.on)
|
||||||
|
@ -2,9 +2,6 @@ import datetime
|
|||||||
from enum import IntEnum
|
from enum import IntEnum
|
||||||
|
|
||||||
from tortoise import Model, fields
|
from tortoise import Model, fields
|
||||||
from tortoise.indexes import Index
|
|
||||||
|
|
||||||
from tests.indexes import CustomIndex
|
|
||||||
|
|
||||||
|
|
||||||
class ProductType(IntEnum):
|
class ProductType(IntEnum):
|
||||||
@ -34,96 +31,39 @@ class User(Model):
|
|||||||
intro = fields.TextField(default="")
|
intro = fields.TextField(default="")
|
||||||
longitude = fields.DecimalField(max_digits=12, decimal_places=9)
|
longitude = fields.DecimalField(max_digits=12, decimal_places=9)
|
||||||
|
|
||||||
class Meta:
|
|
||||||
indexes = [Index(fields=("username", "is_active")), CustomIndex(fields=("is_superuser",))]
|
|
||||||
|
|
||||||
|
|
||||||
class Email(Model):
|
class Email(Model):
|
||||||
email = fields.CharField(max_length=200)
|
email = fields.CharField(max_length=200)
|
||||||
company = fields.CharField(max_length=100, db_index=True)
|
|
||||||
is_primary = fields.BooleanField(default=False)
|
is_primary = fields.BooleanField(default=False)
|
||||||
user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField(
|
user = fields.ForeignKeyField("models.User", db_constraint=False)
|
||||||
"models.User", db_constraint=False
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Category(Model):
|
class Category(Model):
|
||||||
slug = fields.CharField(max_length=200)
|
slug = fields.CharField(max_length=200)
|
||||||
name = fields.CharField(max_length=200)
|
name = fields.CharField(max_length=200)
|
||||||
user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField(
|
user = fields.ForeignKeyField("models.User", description="User")
|
||||||
"models.User", description="User"
|
|
||||||
)
|
|
||||||
title = fields.CharField(max_length=20, unique=True)
|
|
||||||
created_at = fields.DatetimeField(auto_now_add=True)
|
created_at = fields.DatetimeField(auto_now_add=True)
|
||||||
|
|
||||||
class Meta:
|
|
||||||
indexes = [Index(fields=("slug",))]
|
|
||||||
|
|
||||||
|
|
||||||
class Product(Model):
|
class Product(Model):
|
||||||
categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField("models.Category")
|
categories = fields.ManyToManyField("models.Category")
|
||||||
uid = fields.IntField(source_field="uuid", unique=True)
|
|
||||||
name = fields.CharField(max_length=50)
|
name = fields.CharField(max_length=50)
|
||||||
view_num = fields.IntField(description="View Num")
|
view_num = fields.IntField(description="View Num")
|
||||||
sort = fields.IntField()
|
sort = fields.IntField()
|
||||||
is_review = fields.BooleanField(description="Is Reviewed")
|
is_reviewed = fields.BooleanField(description="Is Reviewed")
|
||||||
type: int = fields.IntEnumField(
|
type = fields.IntEnumField(
|
||||||
ProductType, description="Product Type", source_field="type_db_alias"
|
ProductType, description="Product Type", source_field="type_db_alias"
|
||||||
)
|
)
|
||||||
image = fields.CharField(max_length=200)
|
image = fields.CharField(max_length=200)
|
||||||
body = fields.TextField()
|
body = fields.TextField()
|
||||||
created_at = fields.DatetimeField(auto_now_add=True)
|
created_at = fields.DatetimeField(auto_now_add=True)
|
||||||
is_delete = fields.BooleanField(default=False)
|
|
||||||
|
|
||||||
|
|
||||||
class Config(Model):
|
class Config(Model):
|
||||||
slug = fields.CharField(primary_key=True, max_length=10)
|
|
||||||
category: fields.ManyToManyRelation[Category] = fields.ManyToManyField("models.Category")
|
|
||||||
categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField(
|
|
||||||
"models.Category", through="config_category_map", related_name="config_set"
|
|
||||||
)
|
|
||||||
name = fields.CharField(max_length=100, unique=True)
|
|
||||||
label = fields.CharField(max_length=200)
|
label = fields.CharField(max_length=200)
|
||||||
key = fields.CharField(max_length=20)
|
key = fields.CharField(max_length=20)
|
||||||
value: dict = fields.JSONField()
|
value = fields.JSONField()
|
||||||
status: Status = fields.IntEnumField(Status, default=Status.on)
|
status: Status = fields.IntEnumField(Status, default=Status.on)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table = "configs"
|
table = "configs"
|
||||||
|
|
||||||
|
|
||||||
class DontManageMe(Model):
|
|
||||||
name = fields.CharField(max_length=50)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
table = "dont_manage"
|
|
||||||
|
|
||||||
|
|
||||||
class Ignore(Model):
|
|
||||||
name = fields.CharField(max_length=50)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
managed = True
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
|
||||||
"""Generate a python file for the old_models_describe"""
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from tortoise import run_async
|
|
||||||
from tortoise.contrib.test import init_memory_sqlite
|
|
||||||
|
|
||||||
from aerich.utils import get_models_describe
|
|
||||||
|
|
||||||
@init_memory_sqlite
|
|
||||||
async def run() -> None:
|
|
||||||
old_models_describe = get_models_describe("models")
|
|
||||||
p = Path("old_models_describe.py")
|
|
||||||
p.write_text(f"{old_models_describe = }", encoding="utf-8")
|
|
||||||
print(f"Write value to {p}\nYou can reformat it by `ruff format {p}`")
|
|
||||||
|
|
||||||
run_async(run())
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
|
@ -1,11 +0,0 @@
|
|||||||
from aerich import Command
|
|
||||||
from conftest import tortoise_orm
|
|
||||||
|
|
||||||
|
|
||||||
async def test_command(mocker):
|
|
||||||
mocker.patch("os.listdir", return_value=[])
|
|
||||||
async with Command(tortoise_orm) as command:
|
|
||||||
history = await command.history()
|
|
||||||
heads = await command.heads()
|
|
||||||
assert history == []
|
|
||||||
assert heads == []
|
|
@ -1,5 +1,3 @@
|
|||||||
import tortoise
|
|
||||||
|
|
||||||
from aerich.ddl.mysql import MysqlDDL
|
from aerich.ddl.mysql import MysqlDDL
|
||||||
from aerich.ddl.postgres import PostgresDDL
|
from aerich.ddl.postgres import PostgresDDL
|
||||||
from aerich.ddl.sqlite import SqliteDDL
|
from aerich.ddl.sqlite import SqliteDDL
|
||||||
@ -10,48 +8,28 @@ from tests.models import Category, Product, User
|
|||||||
def test_create_table():
|
def test_create_table():
|
||||||
ret = Migrate.ddl.create_table(Category)
|
ret = Migrate.ddl.create_table(Category)
|
||||||
if isinstance(Migrate.ddl, MysqlDDL):
|
if isinstance(Migrate.ddl, MysqlDDL):
|
||||||
if tortoise.__version__ >= "0.24":
|
|
||||||
assert (
|
assert (
|
||||||
ret
|
ret
|
||||||
== """CREATE TABLE IF NOT EXISTS `category` (
|
== """CREATE TABLE IF NOT EXISTS `category` (
|
||||||
`id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
|
`id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
|
||||||
`slug` VARCHAR(100) NOT NULL,
|
`slug` VARCHAR(100) NOT NULL,
|
||||||
`name` VARCHAR(200),
|
`name` VARCHAR(200),
|
||||||
`title` VARCHAR(20) NOT NULL,
|
|
||||||
`created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6),
|
`created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6),
|
||||||
`owner_id` INT NOT NULL COMMENT 'User',
|
`user_id` INT NOT NULL COMMENT 'User',
|
||||||
CONSTRAINT `fk_category_user_110d4c63` FOREIGN KEY (`owner_id`) REFERENCES `user` (`id`) ON DELETE CASCADE,
|
CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE
|
||||||
FULLTEXT KEY `idx_category_slug_e9bcff` (`slug`)
|
|
||||||
) CHARACTER SET utf8mb4"""
|
) CHARACTER SET utf8mb4"""
|
||||||
)
|
)
|
||||||
return
|
|
||||||
assert (
|
|
||||||
ret
|
|
||||||
== """CREATE TABLE IF NOT EXISTS `category` (
|
|
||||||
`id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
|
|
||||||
`slug` VARCHAR(100) NOT NULL,
|
|
||||||
`name` VARCHAR(200),
|
|
||||||
`title` VARCHAR(20) NOT NULL,
|
|
||||||
`created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6),
|
|
||||||
`owner_id` INT NOT NULL COMMENT 'User',
|
|
||||||
CONSTRAINT `fk_category_user_110d4c63` FOREIGN KEY (`owner_id`) REFERENCES `user` (`id`) ON DELETE CASCADE
|
|
||||||
) CHARACTER SET utf8mb4;
|
|
||||||
CREATE FULLTEXT INDEX `idx_category_slug_e9bcff` ON `category` (`slug`)"""
|
|
||||||
)
|
|
||||||
|
|
||||||
elif isinstance(Migrate.ddl, SqliteDDL):
|
elif isinstance(Migrate.ddl, SqliteDDL):
|
||||||
exists = "IF NOT EXISTS " if tortoise.__version__ >= "0.24" else ""
|
|
||||||
assert (
|
assert (
|
||||||
ret
|
ret
|
||||||
== f"""CREATE TABLE IF NOT EXISTS "category" (
|
== """CREATE TABLE IF NOT EXISTS "category" (
|
||||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
"id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
"slug" VARCHAR(100) NOT NULL,
|
"slug" VARCHAR(100) NOT NULL,
|
||||||
"name" VARCHAR(200),
|
"name" VARCHAR(200),
|
||||||
"title" VARCHAR(20) NOT NULL,
|
|
||||||
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
"owner_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE /* User */
|
"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE /* User */
|
||||||
);
|
)"""
|
||||||
CREATE INDEX {exists}"idx_category_slug_e9bcff" ON "category" ("slug")"""
|
|
||||||
)
|
)
|
||||||
|
|
||||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||||
@ -61,12 +39,10 @@ CREATE INDEX {exists}"idx_category_slug_e9bcff" ON "category" ("slug")"""
|
|||||||
"id" SERIAL NOT NULL PRIMARY KEY,
|
"id" SERIAL NOT NULL PRIMARY KEY,
|
||||||
"slug" VARCHAR(100) NOT NULL,
|
"slug" VARCHAR(100) NOT NULL,
|
||||||
"name" VARCHAR(200),
|
"name" VARCHAR(200),
|
||||||
"title" VARCHAR(20) NOT NULL,
|
|
||||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
"owner_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE
|
"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE
|
||||||
);
|
);
|
||||||
CREATE INDEX IF NOT EXISTS "idx_category_slug_e9bcff" ON "category" USING HASH ("slug");
|
COMMENT ON COLUMN "category"."user_id" IS 'User'"""
|
||||||
COMMENT ON COLUMN "category"."owner_id" IS 'User'"""
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -79,27 +55,21 @@ def test_drop_table():
|
|||||||
|
|
||||||
|
|
||||||
def test_add_column():
|
def test_add_column():
|
||||||
ret = Migrate.ddl.add_column(Category, Category._meta.fields_map["name"].describe(False))
|
ret = Migrate.ddl.add_column(Category, Category._meta.fields_map.get("name").describe(False))
|
||||||
if isinstance(Migrate.ddl, MysqlDDL):
|
if isinstance(Migrate.ddl, MysqlDDL):
|
||||||
assert ret == "ALTER TABLE `category` ADD `name` VARCHAR(200)"
|
assert ret == "ALTER TABLE `category` ADD `name` VARCHAR(200)"
|
||||||
else:
|
else:
|
||||||
assert ret == 'ALTER TABLE "category" ADD "name" VARCHAR(200)'
|
assert ret == 'ALTER TABLE "category" ADD "name" VARCHAR(200)'
|
||||||
# add unique column
|
|
||||||
ret = Migrate.ddl.add_column(User, User._meta.fields_map["username"].describe(False))
|
|
||||||
if isinstance(Migrate.ddl, MysqlDDL):
|
|
||||||
assert ret == "ALTER TABLE `user` ADD `username` VARCHAR(20) NOT NULL UNIQUE"
|
|
||||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
|
||||||
assert ret == 'ALTER TABLE "user" ADD "username" VARCHAR(20) NOT NULL UNIQUE'
|
|
||||||
else:
|
|
||||||
assert ret == 'ALTER TABLE "user" ADD "username" VARCHAR(20) NOT NULL'
|
|
||||||
|
|
||||||
|
|
||||||
def test_modify_column():
|
def test_modify_column():
|
||||||
if isinstance(Migrate.ddl, SqliteDDL):
|
if isinstance(Migrate.ddl, SqliteDDL):
|
||||||
return
|
return
|
||||||
|
|
||||||
ret0 = Migrate.ddl.modify_column(Category, Category._meta.fields_map["name"].describe(False))
|
ret0 = Migrate.ddl.modify_column(
|
||||||
ret1 = Migrate.ddl.modify_column(User, User._meta.fields_map["is_active"].describe(False))
|
Category, Category._meta.fields_map.get("name").describe(False)
|
||||||
|
)
|
||||||
|
ret1 = Migrate.ddl.modify_column(User, User._meta.fields_map.get("is_active").describe(False))
|
||||||
if isinstance(Migrate.ddl, MysqlDDL):
|
if isinstance(Migrate.ddl, MysqlDDL):
|
||||||
assert ret0 == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)"
|
assert ret0 == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)"
|
||||||
assert (
|
assert (
|
||||||
@ -120,14 +90,14 @@ def test_modify_column():
|
|||||||
def test_alter_column_default():
|
def test_alter_column_default():
|
||||||
if isinstance(Migrate.ddl, SqliteDDL):
|
if isinstance(Migrate.ddl, SqliteDDL):
|
||||||
return
|
return
|
||||||
ret = Migrate.ddl.alter_column_default(User, User._meta.fields_map["intro"].describe(False))
|
ret = Migrate.ddl.alter_column_default(User, User._meta.fields_map.get("intro").describe(False))
|
||||||
if isinstance(Migrate.ddl, PostgresDDL):
|
if isinstance(Migrate.ddl, PostgresDDL):
|
||||||
assert ret == 'ALTER TABLE "user" ALTER COLUMN "intro" SET DEFAULT \'\''
|
assert ret == 'ALTER TABLE "user" ALTER COLUMN "intro" SET DEFAULT \'\''
|
||||||
elif isinstance(Migrate.ddl, MysqlDDL):
|
elif isinstance(Migrate.ddl, MysqlDDL):
|
||||||
assert ret == "ALTER TABLE `user` ALTER COLUMN `intro` SET DEFAULT ''"
|
assert ret == "ALTER TABLE `user` ALTER COLUMN `intro` SET DEFAULT ''"
|
||||||
|
|
||||||
ret = Migrate.ddl.alter_column_default(
|
ret = Migrate.ddl.alter_column_default(
|
||||||
Category, Category._meta.fields_map["created_at"].describe(False)
|
Category, Category._meta.fields_map.get("created_at").describe(False)
|
||||||
)
|
)
|
||||||
if isinstance(Migrate.ddl, PostgresDDL):
|
if isinstance(Migrate.ddl, PostgresDDL):
|
||||||
assert (
|
assert (
|
||||||
@ -140,7 +110,7 @@ def test_alter_column_default():
|
|||||||
)
|
)
|
||||||
|
|
||||||
ret = Migrate.ddl.alter_column_default(
|
ret = Migrate.ddl.alter_column_default(
|
||||||
Product, Product._meta.fields_map["view_num"].describe(False)
|
Product, Product._meta.fields_map.get("view_num").describe(False)
|
||||||
)
|
)
|
||||||
if isinstance(Migrate.ddl, PostgresDDL):
|
if isinstance(Migrate.ddl, PostgresDDL):
|
||||||
assert ret == 'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0'
|
assert ret == 'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0'
|
||||||
@ -151,7 +121,9 @@ def test_alter_column_default():
|
|||||||
def test_alter_column_null():
|
def test_alter_column_null():
|
||||||
if isinstance(Migrate.ddl, (SqliteDDL, MysqlDDL)):
|
if isinstance(Migrate.ddl, (SqliteDDL, MysqlDDL)):
|
||||||
return
|
return
|
||||||
ret = Migrate.ddl.alter_column_null(Category, Category._meta.fields_map["name"].describe(False))
|
ret = Migrate.ddl.alter_column_null(
|
||||||
|
Category, Category._meta.fields_map.get("name").describe(False)
|
||||||
|
)
|
||||||
if isinstance(Migrate.ddl, PostgresDDL):
|
if isinstance(Migrate.ddl, PostgresDDL):
|
||||||
assert ret == 'ALTER TABLE "category" ALTER COLUMN "name" DROP NOT NULL'
|
assert ret == 'ALTER TABLE "category" ALTER COLUMN "name" DROP NOT NULL'
|
||||||
|
|
||||||
@ -159,11 +131,11 @@ def test_alter_column_null():
|
|||||||
def test_set_comment():
|
def test_set_comment():
|
||||||
if isinstance(Migrate.ddl, (SqliteDDL, MysqlDDL)):
|
if isinstance(Migrate.ddl, (SqliteDDL, MysqlDDL)):
|
||||||
return
|
return
|
||||||
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map["name"].describe(False))
|
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("name").describe(False))
|
||||||
assert ret == 'COMMENT ON COLUMN "category"."name" IS NULL'
|
assert ret == 'COMMENT ON COLUMN "category"."name" IS NULL'
|
||||||
|
|
||||||
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map["owner"].describe(False))
|
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("user").describe(False))
|
||||||
assert ret == 'COMMENT ON COLUMN "category"."owner_id" IS \'User\''
|
assert ret == 'COMMENT ON COLUMN "category"."user_id" IS \'User\''
|
||||||
|
|
||||||
|
|
||||||
def test_drop_column():
|
def test_drop_column():
|
||||||
@ -179,18 +151,17 @@ def test_add_index():
|
|||||||
index_u = Migrate.ddl.add_index(Category, ["name"], True)
|
index_u = Migrate.ddl.add_index(Category, ["name"], True)
|
||||||
if isinstance(Migrate.ddl, MysqlDDL):
|
if isinstance(Migrate.ddl, MysqlDDL):
|
||||||
assert index == "ALTER TABLE `category` ADD INDEX `idx_category_name_8b0cb9` (`name`)"
|
assert index == "ALTER TABLE `category` ADD INDEX `idx_category_name_8b0cb9` (`name`)"
|
||||||
assert index_u == "ALTER TABLE `category` ADD UNIQUE INDEX `name` (`name`)"
|
assert (
|
||||||
|
index_u == "ALTER TABLE `category` ADD UNIQUE INDEX `uid_category_name_8b0cb9` (`name`)"
|
||||||
|
)
|
||||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||||
assert (
|
|
||||||
index == 'CREATE INDEX IF NOT EXISTS "idx_category_name_8b0cb9" ON "category" ("name")'
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
index_u
|
|
||||||
== 'CREATE UNIQUE INDEX IF NOT EXISTS "uid_category_name_8b0cb9" ON "category" ("name")'
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
assert index == 'CREATE INDEX "idx_category_name_8b0cb9" ON "category" ("name")'
|
assert index == 'CREATE INDEX "idx_category_name_8b0cb9" ON "category" ("name")'
|
||||||
assert index_u == 'CREATE UNIQUE INDEX "uid_category_name_8b0cb9" ON "category" ("name")'
|
assert index_u == 'CREATE UNIQUE INDEX "uid_category_name_8b0cb9" ON "category" ("name")'
|
||||||
|
else:
|
||||||
|
assert index == 'ALTER TABLE "category" ADD INDEX "idx_category_name_8b0cb9" ("name")'
|
||||||
|
assert (
|
||||||
|
index_u == 'ALTER TABLE "category" ADD UNIQUE INDEX "uid_category_name_8b0cb9" ("name")'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_drop_index():
|
def test_drop_index():
|
||||||
@ -198,35 +169,38 @@ def test_drop_index():
|
|||||||
ret_u = Migrate.ddl.drop_index(Category, ["name"], True)
|
ret_u = Migrate.ddl.drop_index(Category, ["name"], True)
|
||||||
if isinstance(Migrate.ddl, MysqlDDL):
|
if isinstance(Migrate.ddl, MysqlDDL):
|
||||||
assert ret == "ALTER TABLE `category` DROP INDEX `idx_category_name_8b0cb9`"
|
assert ret == "ALTER TABLE `category` DROP INDEX `idx_category_name_8b0cb9`"
|
||||||
assert ret_u == "ALTER TABLE `category` DROP INDEX `name`"
|
assert ret_u == "ALTER TABLE `category` DROP INDEX `uid_category_name_8b0cb9`"
|
||||||
|
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||||
|
assert ret == 'DROP INDEX "idx_category_name_8b0cb9"'
|
||||||
|
assert ret_u == 'DROP INDEX "uid_category_name_8b0cb9"'
|
||||||
else:
|
else:
|
||||||
assert ret == 'DROP INDEX IF EXISTS "idx_category_name_8b0cb9"'
|
assert ret == 'ALTER TABLE "category" DROP INDEX "idx_category_name_8b0cb9"'
|
||||||
assert ret_u == 'DROP INDEX IF EXISTS "uid_category_name_8b0cb9"'
|
assert ret_u == 'ALTER TABLE "category" DROP INDEX "uid_category_name_8b0cb9"'
|
||||||
|
|
||||||
|
|
||||||
def test_add_fk():
|
def test_add_fk():
|
||||||
ret = Migrate.ddl.add_fk(
|
ret = Migrate.ddl.add_fk(
|
||||||
Category, Category._meta.fields_map["owner"].describe(False), User.describe(False)
|
Category, Category._meta.fields_map.get("user").describe(False), User.describe(False)
|
||||||
)
|
)
|
||||||
if isinstance(Migrate.ddl, MysqlDDL):
|
if isinstance(Migrate.ddl, MysqlDDL):
|
||||||
assert (
|
assert (
|
||||||
ret
|
ret
|
||||||
== "ALTER TABLE `category` ADD CONSTRAINT `fk_category_user_110d4c63` FOREIGN KEY (`owner_id`) REFERENCES `user` (`id`) ON DELETE CASCADE"
|
== "ALTER TABLE `category` ADD CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
assert (
|
assert (
|
||||||
ret
|
ret
|
||||||
== 'ALTER TABLE "category" ADD CONSTRAINT "fk_category_user_110d4c63" FOREIGN KEY ("owner_id") REFERENCES "user" ("id") ON DELETE CASCADE'
|
== 'ALTER TABLE "category" ADD CONSTRAINT "fk_category_user_e2e3874c" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_drop_fk():
|
def test_drop_fk():
|
||||||
ret = Migrate.ddl.drop_fk(
|
ret = Migrate.ddl.drop_fk(
|
||||||
Category, Category._meta.fields_map["owner"].describe(False), User.describe(False)
|
Category, Category._meta.fields_map.get("user").describe(False), User.describe(False)
|
||||||
)
|
)
|
||||||
if isinstance(Migrate.ddl, MysqlDDL):
|
if isinstance(Migrate.ddl, MysqlDDL):
|
||||||
assert ret == "ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_110d4c63`"
|
assert ret == "ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_e2e3874c`"
|
||||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||||
assert ret == 'ALTER TABLE "category" DROP CONSTRAINT IF EXISTS "fk_category_user_110d4c63"'
|
assert ret == 'ALTER TABLE "category" DROP CONSTRAINT "fk_category_user_e2e3874c"'
|
||||||
else:
|
else:
|
||||||
assert ret == 'ALTER TABLE "category" DROP FOREIGN KEY "fk_category_user_110d4c63"'
|
assert ret == 'ALTER TABLE "category" DROP FOREIGN KEY "fk_category_user_e2e3874c"'
|
||||||
|
@ -1,106 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from tests._utils import Dialect, run_shell
|
|
||||||
|
|
||||||
|
|
||||||
def _append_field(*files: str, name="field_1") -> None:
|
|
||||||
for file in files:
|
|
||||||
p = Path(file)
|
|
||||||
field = f" {name} = fields.IntField(default=0)"
|
|
||||||
with p.open("a") as f:
|
|
||||||
f.write(os.linesep + field)
|
|
||||||
|
|
||||||
|
|
||||||
def test_fake(new_aerich_project):
|
|
||||||
if Dialect.is_sqlite():
|
|
||||||
# TODO: go ahead if sqlite alter-column supported
|
|
||||||
return
|
|
||||||
output = run_shell("aerich init -t settings.TORTOISE_ORM")
|
|
||||||
assert "Success" in output
|
|
||||||
output = run_shell("aerich init-db")
|
|
||||||
assert "Success" in output
|
|
||||||
output = run_shell("aerich --app models_second init-db")
|
|
||||||
assert "Success" in output
|
|
||||||
output = run_shell("pytest _tests.py::test_init_db")
|
|
||||||
assert "error" not in output.lower()
|
|
||||||
_append_field("models.py", "models_second.py")
|
|
||||||
output = run_shell("aerich migrate")
|
|
||||||
assert "Success" in output
|
|
||||||
output = run_shell("aerich --app models_second migrate")
|
|
||||||
assert "Success" in output
|
|
||||||
output = run_shell("aerich upgrade --fake")
|
|
||||||
assert "FAKED" in output
|
|
||||||
output = run_shell("aerich --app models_second upgrade --fake")
|
|
||||||
assert "FAKED" in output
|
|
||||||
output = run_shell("pytest _tests.py::test_fake_field_1")
|
|
||||||
assert "error" not in output.lower()
|
|
||||||
_append_field("models.py", "models_second.py", name="field_2")
|
|
||||||
output = run_shell("aerich migrate")
|
|
||||||
assert "Success" in output
|
|
||||||
output = run_shell("aerich --app models_second migrate")
|
|
||||||
assert "Success" in output
|
|
||||||
output = run_shell("aerich heads")
|
|
||||||
assert "_update.py" in output
|
|
||||||
output = run_shell("aerich upgrade --fake")
|
|
||||||
assert "FAKED" in output
|
|
||||||
output = run_shell("aerich --app models_second upgrade --fake")
|
|
||||||
assert "FAKED" in output
|
|
||||||
output = run_shell("pytest _tests.py::test_fake_field_2")
|
|
||||||
assert "error" not in output.lower()
|
|
||||||
output = run_shell("aerich heads")
|
|
||||||
assert "No available heads." in output
|
|
||||||
output = run_shell("aerich --app models_second heads")
|
|
||||||
assert "No available heads." in output
|
|
||||||
_append_field("models.py", "models_second.py", name="field_3")
|
|
||||||
run_shell("aerich migrate", capture_output=False)
|
|
||||||
run_shell("aerich --app models_second migrate", capture_output=False)
|
|
||||||
run_shell("aerich upgrade --fake", capture_output=False)
|
|
||||||
run_shell("aerich --app models_second upgrade --fake", capture_output=False)
|
|
||||||
output = run_shell("aerich downgrade --fake -v 2 --yes", input="y\n")
|
|
||||||
assert "FAKED" in output
|
|
||||||
output = run_shell("aerich --app models_second downgrade --fake -v 2 --yes", input="y\n")
|
|
||||||
assert "FAKED" in output
|
|
||||||
output = run_shell("aerich heads")
|
|
||||||
assert "No available heads." not in output
|
|
||||||
assert not re.search(r"1_\d+_update\.py", output)
|
|
||||||
assert re.search(r"2_\d+_update\.py", output)
|
|
||||||
output = run_shell("aerich --app models_second heads")
|
|
||||||
assert "No available heads." not in output
|
|
||||||
assert not re.search(r"1_\d+_update\.py", output)
|
|
||||||
assert re.search(r"2_\d+_update\.py", output)
|
|
||||||
output = run_shell("aerich downgrade --fake -v 1 --yes", input="y\n")
|
|
||||||
assert "FAKED" in output
|
|
||||||
output = run_shell("aerich --app models_second downgrade --fake -v 1 --yes", input="y\n")
|
|
||||||
assert "FAKED" in output
|
|
||||||
output = run_shell("aerich heads")
|
|
||||||
assert "No available heads." not in output
|
|
||||||
assert re.search(r"1_\d+_update\.py", output)
|
|
||||||
assert re.search(r"2_\d+_update\.py", output)
|
|
||||||
output = run_shell("aerich --app models_second heads")
|
|
||||||
assert "No available heads." not in output
|
|
||||||
assert re.search(r"1_\d+_update\.py", output)
|
|
||||||
assert re.search(r"2_\d+_update\.py", output)
|
|
||||||
output = run_shell("aerich upgrade --fake")
|
|
||||||
assert "FAKED" in output
|
|
||||||
output = run_shell("aerich --app models_second upgrade --fake")
|
|
||||||
assert "FAKED" in output
|
|
||||||
output = run_shell("aerich heads")
|
|
||||||
assert "No available heads." in output
|
|
||||||
output = run_shell("aerich --app models_second heads")
|
|
||||||
assert "No available heads." in output
|
|
||||||
output = run_shell("aerich downgrade --fake -v 1 --yes", input="y\n")
|
|
||||||
assert "FAKED" in output
|
|
||||||
output = run_shell("aerich --app models_second downgrade --fake -v 1 --yes", input="y\n")
|
|
||||||
assert "FAKED" in output
|
|
||||||
output = run_shell("aerich heads")
|
|
||||||
assert "No available heads." not in output
|
|
||||||
assert re.search(r"1_\d+_update\.py", output)
|
|
||||||
assert re.search(r"2_\d+_update\.py", output)
|
|
||||||
output = run_shell("aerich --app models_second heads")
|
|
||||||
assert "No available heads." not in output
|
|
||||||
assert re.search(r"1_\d+_update\.py", output)
|
|
||||||
assert re.search(r"2_\d+_update\.py", output)
|
|
@ -1,17 +0,0 @@
|
|||||||
from tests._utils import Dialect, run_shell
|
|
||||||
|
|
||||||
|
|
||||||
def test_inspect(new_aerich_project):
|
|
||||||
if Dialect.is_sqlite():
|
|
||||||
# TODO: test sqlite after #384 fixed
|
|
||||||
return
|
|
||||||
run_shell("aerich init -t settings.TORTOISE_ORM")
|
|
||||||
run_shell("aerich init-db")
|
|
||||||
ret = run_shell("aerich inspectdb -t product")
|
|
||||||
assert ret.startswith("from tortoise import Model, fields")
|
|
||||||
assert "primary_key=True" in ret
|
|
||||||
assert "fields.DatetimeField" in ret
|
|
||||||
assert "fields.FloatField" in ret
|
|
||||||
assert "fields.UUIDField" in ret
|
|
||||||
if Dialect.is_mysql():
|
|
||||||
assert "db_index=True" in ret
|
|
@ -1,34 +1,13 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import tortoise
|
|
||||||
from pytest_mock import MockerFixture
|
from pytest_mock import MockerFixture
|
||||||
from tortoise.indexes import Index
|
|
||||||
|
|
||||||
from aerich._compat import tortoise_version_less_than
|
|
||||||
from aerich.ddl.mysql import MysqlDDL
|
from aerich.ddl.mysql import MysqlDDL
|
||||||
from aerich.ddl.postgres import PostgresDDL
|
from aerich.ddl.postgres import PostgresDDL
|
||||||
from aerich.ddl.sqlite import SqliteDDL
|
from aerich.ddl.sqlite import SqliteDDL
|
||||||
from aerich.exceptions import NotSupportError
|
from aerich.exceptions import NotSupportError
|
||||||
from aerich.migrate import MIGRATE_TEMPLATE, Migrate
|
from aerich.migrate import Migrate
|
||||||
from aerich.utils import get_models_describe
|
from aerich.utils import get_models_describe
|
||||||
from tests.indexes import CustomIndex
|
|
||||||
|
|
||||||
|
|
||||||
def describe_index(idx: Index) -> Index | dict:
|
|
||||||
# tortoise-orm>=0.24 changes Index desribe to be dict
|
|
||||||
if tortoise_version_less_than("0.24"):
|
|
||||||
return idx
|
|
||||||
if hasattr(idx, "describe"):
|
|
||||||
return idx.describe()
|
|
||||||
return idx
|
|
||||||
|
|
||||||
|
|
||||||
# tortoise-orm>=0.21 changes IntField constraints
|
|
||||||
# from {"ge": 1, "le": 2147483647} to {"ge": -2147483648, "le": 2147483647}
|
|
||||||
MIN_INT = 1 if tortoise.__version__ < "0.21" else -2147483648
|
|
||||||
old_models_describe = {
|
old_models_describe = {
|
||||||
"models.Category": {
|
"models.Category": {
|
||||||
"name": "models.Category",
|
"name": "models.Category",
|
||||||
@ -38,7 +17,7 @@ old_models_describe = {
|
|||||||
"description": None,
|
"description": None,
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"unique_together": [],
|
"unique_together": [],
|
||||||
"indexes": [describe_index(Index(fields=("slug",)))],
|
"indexes": [],
|
||||||
"pk_field": {
|
"pk_field": {
|
||||||
"name": "id",
|
"name": "id",
|
||||||
"field_type": "IntField",
|
"field_type": "IntField",
|
||||||
@ -51,7 +30,7 @@ old_models_describe = {
|
|||||||
"default": None,
|
"default": None,
|
||||||
"description": None,
|
"description": None,
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"constraints": {"ge": MIN_INT, "le": 2147483647},
|
"constraints": {"ge": 1, "le": 2147483647},
|
||||||
"db_field_types": {"": "INT"},
|
"db_field_types": {"": "INT"},
|
||||||
},
|
},
|
||||||
"data_fields": [
|
"data_fields": [
|
||||||
@ -118,24 +97,9 @@ old_models_describe = {
|
|||||||
"default": None,
|
"default": None,
|
||||||
"description": "User",
|
"description": "User",
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"constraints": {"ge": MIN_INT, "le": 2147483647},
|
"constraints": {"ge": 1, "le": 2147483647},
|
||||||
"db_field_types": {"": "INT"},
|
"db_field_types": {"": "INT"},
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "title",
|
|
||||||
"field_type": "CharField",
|
|
||||||
"db_column": "title",
|
|
||||||
"python_type": "str",
|
|
||||||
"generated": False,
|
|
||||||
"nullable": False,
|
|
||||||
"unique": True,
|
|
||||||
"indexed": True,
|
|
||||||
"default": None,
|
|
||||||
"description": None,
|
|
||||||
"docstring": None,
|
|
||||||
"constraints": {"max_length": 20},
|
|
||||||
"db_field_types": {"": "VARCHAR(20)"},
|
|
||||||
},
|
|
||||||
],
|
],
|
||||||
"fk_fields": [
|
"fk_fields": [
|
||||||
{
|
{
|
||||||
@ -190,36 +154,21 @@ old_models_describe = {
|
|||||||
"unique_together": [],
|
"unique_together": [],
|
||||||
"indexes": [],
|
"indexes": [],
|
||||||
"pk_field": {
|
"pk_field": {
|
||||||
"name": "slug",
|
"name": "id",
|
||||||
"field_type": "CharField",
|
"field_type": "IntField",
|
||||||
"db_column": "slug",
|
"db_column": "id",
|
||||||
"python_type": "str",
|
"python_type": "int",
|
||||||
"generated": False,
|
"generated": True,
|
||||||
"nullable": False,
|
"nullable": False,
|
||||||
"unique": True,
|
"unique": True,
|
||||||
"indexed": True,
|
"indexed": True,
|
||||||
"default": None,
|
"default": None,
|
||||||
"description": None,
|
"description": None,
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"constraints": {"max_length": 10},
|
"constraints": {"ge": 1, "le": 2147483647},
|
||||||
"db_field_types": {"": "VARCHAR(10)"},
|
"db_field_types": {"": "INT"},
|
||||||
},
|
},
|
||||||
"data_fields": [
|
"data_fields": [
|
||||||
{
|
|
||||||
"name": "name",
|
|
||||||
"field_type": "CharField",
|
|
||||||
"db_column": "name",
|
|
||||||
"python_type": "str",
|
|
||||||
"generated": False,
|
|
||||||
"nullable": False,
|
|
||||||
"unique": True,
|
|
||||||
"indexed": True,
|
|
||||||
"default": None,
|
|
||||||
"description": None,
|
|
||||||
"docstring": None,
|
|
||||||
"constraints": {"max_length": 100},
|
|
||||||
"db_field_types": {"": "VARCHAR(100)"},
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "label",
|
"name": "label",
|
||||||
"field_type": "CharField",
|
"field_type": "CharField",
|
||||||
@ -285,48 +234,7 @@ old_models_describe = {
|
|||||||
"backward_fk_fields": [],
|
"backward_fk_fields": [],
|
||||||
"o2o_fields": [],
|
"o2o_fields": [],
|
||||||
"backward_o2o_fields": [],
|
"backward_o2o_fields": [],
|
||||||
"m2m_fields": [
|
"m2m_fields": [],
|
||||||
{
|
|
||||||
"name": "category",
|
|
||||||
"field_type": "ManyToManyFieldInstance",
|
|
||||||
"python_type": "models.Category",
|
|
||||||
"generated": False,
|
|
||||||
"nullable": False,
|
|
||||||
"unique": False,
|
|
||||||
"indexed": False,
|
|
||||||
"default": None,
|
|
||||||
"description": None,
|
|
||||||
"docstring": None,
|
|
||||||
"constraints": {},
|
|
||||||
"model_name": "models.Category",
|
|
||||||
"related_name": "configs",
|
|
||||||
"forward_key": "category_id",
|
|
||||||
"backward_key": "config_id",
|
|
||||||
"through": "config_category",
|
|
||||||
"on_delete": "CASCADE",
|
|
||||||
"_generated": False,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "categories",
|
|
||||||
"field_type": "ManyToManyFieldInstance",
|
|
||||||
"python_type": "models.Category",
|
|
||||||
"generated": False,
|
|
||||||
"nullable": False,
|
|
||||||
"unique": False,
|
|
||||||
"indexed": False,
|
|
||||||
"default": None,
|
|
||||||
"description": None,
|
|
||||||
"docstring": None,
|
|
||||||
"constraints": {},
|
|
||||||
"model_name": "models.Category",
|
|
||||||
"related_name": "config_set",
|
|
||||||
"forward_key": "category_id",
|
|
||||||
"backward_key": "config_id",
|
|
||||||
"through": "config_category_map",
|
|
||||||
"on_delete": "CASCADE",
|
|
||||||
"_generated": False,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
},
|
||||||
"models.Email": {
|
"models.Email": {
|
||||||
"name": "models.Email",
|
"name": "models.Email",
|
||||||
@ -349,7 +257,7 @@ old_models_describe = {
|
|||||||
"default": None,
|
"default": None,
|
||||||
"description": None,
|
"description": None,
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"constraints": {"ge": MIN_INT, "le": 2147483647},
|
"constraints": {"ge": 1, "le": 2147483647},
|
||||||
"db_field_types": {"": "INT"},
|
"db_field_types": {"": "INT"},
|
||||||
},
|
},
|
||||||
"data_fields": [
|
"data_fields": [
|
||||||
@ -368,21 +276,6 @@ old_models_describe = {
|
|||||||
"constraints": {"max_length": 200},
|
"constraints": {"max_length": 200},
|
||||||
"db_field_types": {"": "VARCHAR(200)"},
|
"db_field_types": {"": "VARCHAR(200)"},
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "company",
|
|
||||||
"field_type": "CharField",
|
|
||||||
"db_column": "company",
|
|
||||||
"python_type": "str",
|
|
||||||
"generated": False,
|
|
||||||
"nullable": False,
|
|
||||||
"unique": False,
|
|
||||||
"indexed": True,
|
|
||||||
"default": None,
|
|
||||||
"description": None,
|
|
||||||
"docstring": None,
|
|
||||||
"constraints": {"max_length": 100},
|
|
||||||
"db_field_types": {"": "VARCHAR(100)"},
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "is_primary",
|
"name": "is_primary",
|
||||||
"field_type": "BooleanField",
|
"field_type": "BooleanField",
|
||||||
@ -396,12 +289,7 @@ old_models_describe = {
|
|||||||
"description": None,
|
"description": None,
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"constraints": {},
|
"constraints": {},
|
||||||
"db_field_types": {
|
"db_field_types": {"": "BOOL", "sqlite": "INT"},
|
||||||
"": "BOOL",
|
|
||||||
"mssql": "BIT",
|
|
||||||
"oracle": "NUMBER(1)",
|
|
||||||
"sqlite": "INT",
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "user_id",
|
"name": "user_id",
|
||||||
@ -415,7 +303,7 @@ old_models_describe = {
|
|||||||
"default": None,
|
"default": None,
|
||||||
"description": None,
|
"description": None,
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"constraints": {"ge": MIN_INT, "le": 2147483647},
|
"constraints": {"ge": 1, "le": 2147483647},
|
||||||
"db_field_types": {"": "INT"},
|
"db_field_types": {"": "INT"},
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
@ -462,7 +350,7 @@ old_models_describe = {
|
|||||||
"default": None,
|
"default": None,
|
||||||
"description": None,
|
"description": None,
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"constraints": {"ge": MIN_INT, "le": 2147483647},
|
"constraints": {"ge": 1, "le": 2147483647},
|
||||||
"db_field_types": {"": "INT"},
|
"db_field_types": {"": "INT"},
|
||||||
},
|
},
|
||||||
"data_fields": [
|
"data_fields": [
|
||||||
@ -481,21 +369,6 @@ old_models_describe = {
|
|||||||
"constraints": {"max_length": 50},
|
"constraints": {"max_length": 50},
|
||||||
"db_field_types": {"": "VARCHAR(50)"},
|
"db_field_types": {"": "VARCHAR(50)"},
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "uid",
|
|
||||||
"field_type": "IntField",
|
|
||||||
"db_column": "uuid",
|
|
||||||
"python_type": "int",
|
|
||||||
"generated": False,
|
|
||||||
"nullable": False,
|
|
||||||
"unique": True,
|
|
||||||
"indexed": True,
|
|
||||||
"default": None,
|
|
||||||
"description": None,
|
|
||||||
"docstring": None,
|
|
||||||
"constraints": {"ge": -2147483648, "le": 2147483647},
|
|
||||||
"db_field_types": {"": "INT"},
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "view_num",
|
"name": "view_num",
|
||||||
"field_type": "IntField",
|
"field_type": "IntField",
|
||||||
@ -527,9 +400,9 @@ old_models_describe = {
|
|||||||
"db_field_types": {"": "INT"},
|
"db_field_types": {"": "INT"},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "is_review",
|
"name": "is_reviewed",
|
||||||
"field_type": "BooleanField",
|
"field_type": "BooleanField",
|
||||||
"db_column": "is_review",
|
"db_column": "is_reviewed",
|
||||||
"python_type": "bool",
|
"python_type": "bool",
|
||||||
"generated": False,
|
"generated": False,
|
||||||
"nullable": False,
|
"nullable": False,
|
||||||
@ -539,12 +412,7 @@ old_models_describe = {
|
|||||||
"description": "Is Reviewed",
|
"description": "Is Reviewed",
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"constraints": {},
|
"constraints": {},
|
||||||
"db_field_types": {
|
"db_field_types": {"": "BOOL", "sqlite": "INT"},
|
||||||
"": "BOOL",
|
|
||||||
"mssql": "BIT",
|
|
||||||
"oracle": "NUMBER(1)",
|
|
||||||
"sqlite": "INT",
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "type",
|
"name": "type",
|
||||||
@ -612,26 +480,6 @@ old_models_describe = {
|
|||||||
"auto_now_add": True,
|
"auto_now_add": True,
|
||||||
"auto_now": False,
|
"auto_now": False,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "is_delete",
|
|
||||||
"field_type": "BooleanField",
|
|
||||||
"db_column": "is_delete",
|
|
||||||
"python_type": "bool",
|
|
||||||
"generated": False,
|
|
||||||
"nullable": False,
|
|
||||||
"unique": False,
|
|
||||||
"indexed": False,
|
|
||||||
"default": False,
|
|
||||||
"description": None,
|
|
||||||
"docstring": None,
|
|
||||||
"constraints": {},
|
|
||||||
"db_field_types": {
|
|
||||||
"": "BOOL",
|
|
||||||
"mssql": "BIT",
|
|
||||||
"oracle": "NUMBER(1)",
|
|
||||||
"sqlite": "INT",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
],
|
||||||
"fk_fields": [],
|
"fk_fields": [],
|
||||||
"backward_fk_fields": [],
|
"backward_fk_fields": [],
|
||||||
@ -668,10 +516,7 @@ old_models_describe = {
|
|||||||
"description": None,
|
"description": None,
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"unique_together": [],
|
"unique_together": [],
|
||||||
"indexes": [
|
"indexes": [],
|
||||||
describe_index(Index(fields=("username", "is_active"))),
|
|
||||||
describe_index(CustomIndex(fields=("is_superuser",))),
|
|
||||||
],
|
|
||||||
"pk_field": {
|
"pk_field": {
|
||||||
"name": "id",
|
"name": "id",
|
||||||
"field_type": "IntField",
|
"field_type": "IntField",
|
||||||
@ -684,7 +529,7 @@ old_models_describe = {
|
|||||||
"default": None,
|
"default": None,
|
||||||
"description": None,
|
"description": None,
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"constraints": {"ge": MIN_INT, "le": 2147483647},
|
"constraints": {"ge": 1, "le": 2147483647},
|
||||||
"db_field_types": {"": "INT"},
|
"db_field_types": {"": "INT"},
|
||||||
},
|
},
|
||||||
"data_fields": [
|
"data_fields": [
|
||||||
@ -752,12 +597,7 @@ old_models_describe = {
|
|||||||
"description": "Is Active",
|
"description": "Is Active",
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"constraints": {},
|
"constraints": {},
|
||||||
"db_field_types": {
|
"db_field_types": {"": "BOOL", "sqlite": "INT"},
|
||||||
"": "BOOL",
|
|
||||||
"mssql": "BIT",
|
|
||||||
"oracle": "NUMBER(1)",
|
|
||||||
"sqlite": "INT",
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "is_superuser",
|
"name": "is_superuser",
|
||||||
@ -772,12 +612,7 @@ old_models_describe = {
|
|||||||
"description": "Is SuperUser",
|
"description": "Is SuperUser",
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"constraints": {},
|
"constraints": {},
|
||||||
"db_field_types": {
|
"db_field_types": {"": "BOOL", "sqlite": "INT"},
|
||||||
"": "BOOL",
|
|
||||||
"mssql": "BIT",
|
|
||||||
"oracle": "NUMBER(1)",
|
|
||||||
"sqlite": "INT",
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "avatar",
|
"name": "avatar",
|
||||||
@ -879,7 +714,7 @@ old_models_describe = {
|
|||||||
"default": None,
|
"default": None,
|
||||||
"description": None,
|
"description": None,
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"constraints": {"ge": MIN_INT, "le": 2147483647},
|
"constraints": {"ge": 1, "le": 2147483647},
|
||||||
"db_field_types": {"": "INT"},
|
"db_field_types": {"": "INT"},
|
||||||
},
|
},
|
||||||
"data_fields": [
|
"data_fields": [
|
||||||
@ -942,40 +777,26 @@ def test_migrate(mocker: MockerFixture):
|
|||||||
"""
|
"""
|
||||||
models.py diff with old_models.py
|
models.py diff with old_models.py
|
||||||
- change email pk: id -> email_id
|
- change email pk: id -> email_id
|
||||||
- change product pk field type: IntField -> BigIntField
|
|
||||||
- change config pk field attribute: max_length=10 -> max_length=20
|
|
||||||
- add field: Email.address
|
- add field: Email.address
|
||||||
- add fk field: Config.user
|
- add fk: Config.user
|
||||||
- drop fk field: Email.user
|
- drop fk: Email.user
|
||||||
- drop field: User.avatar
|
- drop field: User.avatar
|
||||||
- add index: Email.email
|
- add index: Email.email
|
||||||
- add unique to indexed field: Email.company
|
|
||||||
- change index type for indexed field: Email.slug
|
|
||||||
- add many to many: Email.users
|
- add many to many: Email.users
|
||||||
- add one to one: Email.config
|
- remove unique: User.username
|
||||||
- remove unique: Category.title
|
|
||||||
- add unique: User.username
|
|
||||||
- change column: length User.password
|
- change column: length User.password
|
||||||
- add unique_together: (name,type) of Product
|
- add unique_together: (name,type) of Product
|
||||||
- add one more many to many field: Product.users
|
|
||||||
- drop unique field: Config.name
|
|
||||||
- alter default: Config.status
|
- alter default: Config.status
|
||||||
- rename column: Product.image -> Product.pic
|
- rename column: Product.image -> Product.pic
|
||||||
- rename column: Product.is_review -> Product.is_reviewed
|
|
||||||
- rename column: Product.is_delete -> Product.is_deleted
|
|
||||||
- rename fk column: Category.user -> Category.owner
|
|
||||||
"""
|
"""
|
||||||
mocker.patch("asyncclick.prompt", side_effect=(True, True, True, True))
|
mocker.patch("click.prompt", side_effect=(True,))
|
||||||
|
|
||||||
models_describe = get_models_describe("models")
|
models_describe = get_models_describe("models")
|
||||||
Migrate.app = "models"
|
Migrate.app = "models"
|
||||||
if isinstance(Migrate.ddl, SqliteDDL):
|
if isinstance(Migrate.ddl, SqliteDDL):
|
||||||
with pytest.raises(NotSupportError):
|
with pytest.raises(NotSupportError):
|
||||||
Migrate.diff_models(old_models_describe, models_describe)
|
Migrate.diff_models(old_models_describe, models_describe)
|
||||||
Migrate.upgrade_operators.clear()
|
|
||||||
with pytest.raises(NotSupportError):
|
|
||||||
Migrate.diff_models(models_describe, old_models_describe, False)
|
Migrate.diff_models(models_describe, old_models_describe, False)
|
||||||
Migrate.downgrade_operators.clear()
|
|
||||||
else:
|
else:
|
||||||
Migrate.diff_models(old_models_describe, models_describe)
|
Migrate.diff_models(old_models_describe, models_describe)
|
||||||
Migrate.diff_models(models_describe, old_models_describe, False)
|
Migrate.diff_models(models_describe, old_models_describe, False)
|
||||||
@ -984,205 +805,142 @@ def test_migrate(mocker: MockerFixture):
|
|||||||
expected_upgrade_operators = {
|
expected_upgrade_operators = {
|
||||||
"ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)",
|
"ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)",
|
||||||
"ALTER TABLE `category` MODIFY COLUMN `slug` VARCHAR(100) NOT NULL",
|
"ALTER TABLE `category` MODIFY COLUMN `slug` VARCHAR(100) NOT NULL",
|
||||||
"ALTER TABLE `category` DROP INDEX `title`",
|
|
||||||
"ALTER TABLE `category` RENAME COLUMN `user_id` TO `owner_id`",
|
|
||||||
"ALTER TABLE `category` ADD CONSTRAINT `fk_category_user_110d4c63` FOREIGN KEY (`owner_id`) REFERENCES `user` (`id`) ON DELETE CASCADE",
|
|
||||||
"ALTER TABLE `category` ADD FULLTEXT INDEX `idx_category_slug_e9bcff` (`slug`)",
|
|
||||||
"ALTER TABLE `category` DROP INDEX `idx_category_slug_e9bcff`",
|
|
||||||
"ALTER TABLE `email` DROP COLUMN `user_id`",
|
|
||||||
"ALTER TABLE `config` DROP COLUMN `name`",
|
|
||||||
"ALTER TABLE `config` DROP INDEX `name`",
|
|
||||||
"ALTER TABLE `config` ADD `user_id` INT NOT NULL COMMENT 'User'",
|
"ALTER TABLE `config` ADD `user_id` INT NOT NULL COMMENT 'User'",
|
||||||
"ALTER TABLE `config` ADD CONSTRAINT `fk_config_user_17daa970` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE",
|
"ALTER TABLE `config` ADD CONSTRAINT `fk_config_user_17daa970` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE",
|
||||||
"ALTER TABLE `config` ALTER COLUMN `status` DROP DEFAULT",
|
"ALTER TABLE `config` ALTER COLUMN `status` DROP DEFAULT",
|
||||||
|
"ALTER TABLE `config` MODIFY COLUMN `value` JSON NOT NULL",
|
||||||
"ALTER TABLE `email` ADD `address` VARCHAR(200) NOT NULL",
|
"ALTER TABLE `email` ADD `address` VARCHAR(200) NOT NULL",
|
||||||
"ALTER TABLE `email` ADD CONSTRAINT `fk_email_config_88e28c1b` FOREIGN KEY (`config_id`) REFERENCES `config` (`slug`) ON DELETE CASCADE",
|
"ALTER TABLE `email` DROP COLUMN `user_id`",
|
||||||
"ALTER TABLE `email` ADD `config_id` VARCHAR(20) NOT NULL UNIQUE",
|
|
||||||
"ALTER TABLE `email` DROP INDEX `idx_email_company_1c9234`, ADD UNIQUE (`company`)",
|
|
||||||
"ALTER TABLE `configs` RENAME TO `config`",
|
"ALTER TABLE `configs` RENAME TO `config`",
|
||||||
"ALTER TABLE `product` DROP COLUMN `uuid`",
|
|
||||||
"ALTER TABLE `product` DROP INDEX `uuid`",
|
|
||||||
"ALTER TABLE `product` RENAME COLUMN `image` TO `pic`",
|
"ALTER TABLE `product` RENAME COLUMN `image` TO `pic`",
|
||||||
"ALTER TABLE `product` ADD `price` DOUBLE",
|
|
||||||
"ALTER TABLE `product` ADD `no` CHAR(36) NOT NULL",
|
|
||||||
"ALTER TABLE `email` RENAME COLUMN `id` TO `email_id`",
|
"ALTER TABLE `email` RENAME COLUMN `id` TO `email_id`",
|
||||||
"ALTER TABLE `product` ADD INDEX `idx_product_name_869427` (`name`, `type_db_alias`)",
|
"ALTER TABLE `product` ADD INDEX `idx_product_name_869427` (`name`, `type_db_alias`)",
|
||||||
"ALTER TABLE `product` ADD INDEX `idx_product_no_e4d701` (`no`)",
|
|
||||||
"ALTER TABLE `email` ADD INDEX `idx_email_email_4a1a33` (`email`)",
|
"ALTER TABLE `email` ADD INDEX `idx_email_email_4a1a33` (`email`)",
|
||||||
"ALTER TABLE `product` ADD UNIQUE INDEX `uid_product_name_869427` (`name`, `type_db_alias`)",
|
"ALTER TABLE `product` ADD UNIQUE INDEX `uid_product_name_869427` (`name`, `type_db_alias`)",
|
||||||
"ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0",
|
"ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0",
|
||||||
"ALTER TABLE `product` RENAME COLUMN `is_delete` TO `is_deleted`",
|
"ALTER TABLE `product` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
|
||||||
"ALTER TABLE `product` RENAME COLUMN `is_review` TO `is_reviewed`",
|
"ALTER TABLE `product` MODIFY COLUMN `is_reviewed` BOOL NOT NULL COMMENT 'Is Reviewed'",
|
||||||
"ALTER TABLE `product` MODIFY COLUMN `id` BIGINT NOT NULL",
|
|
||||||
"ALTER TABLE `user` DROP COLUMN `avatar`",
|
"ALTER TABLE `user` DROP COLUMN `avatar`",
|
||||||
"ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(100) NOT NULL",
|
"ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(100) NOT NULL",
|
||||||
|
"ALTER TABLE `user` MODIFY COLUMN `intro` LONGTEXT NOT NULL",
|
||||||
|
"ALTER TABLE `user` MODIFY COLUMN `last_login` DATETIME(6) NOT NULL COMMENT 'Last Login'",
|
||||||
|
"ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1",
|
||||||
|
"ALTER TABLE `user` MODIFY COLUMN `is_superuser` BOOL NOT NULL COMMENT 'Is SuperUser' DEFAULT 0",
|
||||||
"ALTER TABLE `user` MODIFY COLUMN `longitude` DECIMAL(10,8) NOT NULL",
|
"ALTER TABLE `user` MODIFY COLUMN `longitude` DECIMAL(10,8) NOT NULL",
|
||||||
"ALTER TABLE `user` ADD UNIQUE INDEX `username` (`username`)",
|
"ALTER TABLE `user` ADD UNIQUE INDEX `uid_user_usernam_9987ab` (`username`)",
|
||||||
"CREATE TABLE `email_user` (\n `email_id` INT NOT NULL REFERENCES `email` (`email_id`) ON DELETE CASCADE,\n `user_id` INT NOT NULL REFERENCES `user` (`id`) ON DELETE CASCADE\n) CHARACTER SET utf8mb4",
|
"CREATE TABLE `email_user` (\n `email_id` INT NOT NULL REFERENCES `email` (`email_id`) ON DELETE CASCADE,\n `user_id` INT NOT NULL REFERENCES `user` (`id`) ON DELETE CASCADE\n) CHARACTER SET utf8mb4",
|
||||||
"CREATE TABLE IF NOT EXISTS `newmodel` (\n `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,\n `name` VARCHAR(50) NOT NULL\n) CHARACTER SET utf8mb4",
|
"CREATE TABLE IF NOT EXISTS `newmodel` (\n `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,\n `name` VARCHAR(50) NOT NULL\n) CHARACTER SET utf8mb4",
|
||||||
"CREATE TABLE `product_user` (\n `product_id` BIGINT NOT NULL REFERENCES `product` (`id`) ON DELETE CASCADE,\n `user_id` INT NOT NULL REFERENCES `user` (`id`) ON DELETE CASCADE\n) CHARACTER SET utf8mb4",
|
"ALTER TABLE `category` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
|
||||||
"CREATE TABLE `config_category_map` (\n `category_id` INT NOT NULL REFERENCES `category` (`id`) ON DELETE CASCADE,\n `config_id` VARCHAR(20) NOT NULL REFERENCES `config` (`slug`) ON DELETE CASCADE\n) CHARACTER SET utf8mb4",
|
"ALTER TABLE `product` MODIFY COLUMN `body` LONGTEXT NOT NULL",
|
||||||
"DROP TABLE IF EXISTS `config_category`",
|
"ALTER TABLE `email` MODIFY COLUMN `is_primary` BOOL NOT NULL DEFAULT 0",
|
||||||
"ALTER TABLE `config` MODIFY COLUMN `slug` VARCHAR(20) NOT NULL",
|
|
||||||
}
|
}
|
||||||
upgrade_operators = set(Migrate.upgrade_operators)
|
|
||||||
upgrade_more_than_expected = upgrade_operators - expected_upgrade_operators
|
|
||||||
assert not upgrade_more_than_expected
|
|
||||||
upgrade_less_than_expected = expected_upgrade_operators - upgrade_operators
|
|
||||||
assert not upgrade_less_than_expected
|
|
||||||
|
|
||||||
expected_downgrade_operators = {
|
expected_downgrade_operators = {
|
||||||
"ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200) NOT NULL",
|
"ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200) NOT NULL",
|
||||||
"ALTER TABLE `category` MODIFY COLUMN `slug` VARCHAR(200) NOT NULL",
|
"ALTER TABLE `category` MODIFY COLUMN `slug` VARCHAR(200) NOT NULL",
|
||||||
"ALTER TABLE `category` ADD UNIQUE INDEX `title` (`title`)",
|
"ALTER TABLE `config` DROP COLUMN `user_id`",
|
||||||
"ALTER TABLE `category` RENAME COLUMN `owner_id` TO `user_id`",
|
|
||||||
"ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_110d4c63`",
|
|
||||||
"ALTER TABLE `category` ADD INDEX `idx_category_slug_e9bcff` (`slug`)",
|
|
||||||
"ALTER TABLE `category` DROP INDEX `idx_category_slug_e9bcff`",
|
|
||||||
"ALTER TABLE `config` ADD `name` VARCHAR(100) NOT NULL UNIQUE",
|
|
||||||
"ALTER TABLE `config` ADD UNIQUE INDEX `name` (`name`)",
|
|
||||||
"ALTER TABLE `config` DROP FOREIGN KEY `fk_config_user_17daa970`",
|
"ALTER TABLE `config` DROP FOREIGN KEY `fk_config_user_17daa970`",
|
||||||
"ALTER TABLE `config` ALTER COLUMN `status` SET DEFAULT 1",
|
"ALTER TABLE `config` ALTER COLUMN `status` SET DEFAULT 1",
|
||||||
"ALTER TABLE `config` DROP COLUMN `user_id`",
|
|
||||||
"ALTER TABLE `config` MODIFY COLUMN `slug` VARCHAR(10) NOT NULL",
|
|
||||||
"ALTER TABLE `config` RENAME TO `configs`",
|
|
||||||
"ALTER TABLE `email` ADD `user_id` INT NOT NULL",
|
"ALTER TABLE `email` ADD `user_id` INT NOT NULL",
|
||||||
"ALTER TABLE `email` DROP COLUMN `address`",
|
"ALTER TABLE `email` DROP COLUMN `address`",
|
||||||
"ALTER TABLE `email` DROP COLUMN `config_id`",
|
"ALTER TABLE `config` RENAME TO `configs`",
|
||||||
"ALTER TABLE `email` DROP FOREIGN KEY `fk_email_config_88e28c1b`",
|
|
||||||
"ALTER TABLE `email` RENAME COLUMN `email_id` TO `id`",
|
|
||||||
"ALTER TABLE `email` DROP INDEX `company`, ADD INDEX (`idx_email_company_1c9234`)",
|
|
||||||
"ALTER TABLE `email` DROP INDEX `idx_email_email_4a1a33`",
|
|
||||||
"ALTER TABLE `product` RENAME COLUMN `pic` TO `image`",
|
"ALTER TABLE `product` RENAME COLUMN `pic` TO `image`",
|
||||||
"ALTER TABLE `product` ADD `uuid` INT NOT NULL UNIQUE",
|
"ALTER TABLE `email` RENAME COLUMN `email_id` TO `id`",
|
||||||
"ALTER TABLE `product` ADD UNIQUE INDEX `uuid` (`uuid`)",
|
|
||||||
"ALTER TABLE `product` DROP INDEX `idx_product_name_869427`",
|
"ALTER TABLE `product` DROP INDEX `idx_product_name_869427`",
|
||||||
"ALTER TABLE `product` DROP COLUMN `price`",
|
"ALTER TABLE `email` DROP INDEX `idx_email_email_4a1a33`",
|
||||||
"ALTER TABLE `product` DROP COLUMN `no`",
|
|
||||||
"ALTER TABLE `product` DROP INDEX `uid_product_name_869427`",
|
"ALTER TABLE `product` DROP INDEX `uid_product_name_869427`",
|
||||||
"ALTER TABLE `product` DROP INDEX `idx_product_no_e4d701`",
|
|
||||||
"ALTER TABLE `product` ALTER COLUMN `view_num` DROP DEFAULT",
|
"ALTER TABLE `product` ALTER COLUMN `view_num` DROP DEFAULT",
|
||||||
"ALTER TABLE `product` RENAME COLUMN `is_deleted` TO `is_delete`",
|
|
||||||
"ALTER TABLE `product` RENAME COLUMN `is_reviewed` TO `is_review`",
|
|
||||||
"ALTER TABLE `product` MODIFY COLUMN `id` INT NOT NULL",
|
|
||||||
"ALTER TABLE `user` ADD `avatar` VARCHAR(200) NOT NULL DEFAULT ''",
|
"ALTER TABLE `user` ADD `avatar` VARCHAR(200) NOT NULL DEFAULT ''",
|
||||||
"ALTER TABLE `user` DROP INDEX `username`",
|
"ALTER TABLE `user` DROP INDEX `idx_user_usernam_9987ab`",
|
||||||
"ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(200) NOT NULL",
|
"ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(200) NOT NULL",
|
||||||
"DROP TABLE IF EXISTS `email_user`",
|
"DROP TABLE IF EXISTS `email_user`",
|
||||||
"DROP TABLE IF EXISTS `newmodel`",
|
"DROP TABLE IF EXISTS `newmodel`",
|
||||||
"DROP TABLE IF EXISTS `product_user`",
|
"ALTER TABLE `user` MODIFY COLUMN `intro` LONGTEXT NOT NULL",
|
||||||
|
"ALTER TABLE `config` MODIFY COLUMN `value` TEXT NOT NULL",
|
||||||
|
"ALTER TABLE `category` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
|
||||||
|
"ALTER TABLE `product` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
|
||||||
|
"ALTER TABLE `product` MODIFY COLUMN `is_reviewed` BOOL NOT NULL COMMENT 'Is Reviewed'",
|
||||||
|
"ALTER TABLE `user` MODIFY COLUMN `last_login` DATETIME(6) NOT NULL COMMENT 'Last Login'",
|
||||||
|
"ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1",
|
||||||
|
"ALTER TABLE `user` MODIFY COLUMN `is_superuser` BOOL NOT NULL COMMENT 'Is SuperUser' DEFAULT 0",
|
||||||
"ALTER TABLE `user` MODIFY COLUMN `longitude` DECIMAL(12,9) NOT NULL",
|
"ALTER TABLE `user` MODIFY COLUMN `longitude` DECIMAL(12,9) NOT NULL",
|
||||||
"CREATE TABLE `config_category` (\n `config_id` VARCHAR(20) NOT NULL REFERENCES `config` (`slug`) ON DELETE CASCADE,\n `category_id` INT NOT NULL REFERENCES `category` (`id`) ON DELETE CASCADE\n) CHARACTER SET utf8mb4",
|
"ALTER TABLE `product` MODIFY COLUMN `body` LONGTEXT NOT NULL",
|
||||||
"DROP TABLE IF EXISTS `config_category_map`",
|
"ALTER TABLE `email` MODIFY COLUMN `is_primary` BOOL NOT NULL DEFAULT 0",
|
||||||
}
|
}
|
||||||
downgrade_operators = set(Migrate.downgrade_operators)
|
assert not set(Migrate.upgrade_operators).symmetric_difference(expected_upgrade_operators)
|
||||||
downgrade_more_than_expected = downgrade_operators - expected_downgrade_operators
|
|
||||||
assert not downgrade_more_than_expected
|
assert not set(Migrate.downgrade_operators).symmetric_difference(
|
||||||
downgrade_less_than_expected = expected_downgrade_operators - downgrade_operators
|
expected_downgrade_operators
|
||||||
assert not downgrade_less_than_expected
|
)
|
||||||
|
|
||||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||||
expected_upgrade_operators = {
|
expected_upgrade_operators = {
|
||||||
'DROP INDEX IF EXISTS "uid_category_title_f7fc03"',
|
|
||||||
'ALTER TABLE "category" ALTER COLUMN "name" DROP NOT NULL',
|
'ALTER TABLE "category" ALTER COLUMN "name" DROP NOT NULL',
|
||||||
'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(100) USING "slug"::VARCHAR(100)',
|
'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(100) USING "slug"::VARCHAR(100)',
|
||||||
'ALTER TABLE "category" RENAME COLUMN "user_id" TO "owner_id"',
|
'ALTER TABLE "category" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
|
||||||
'ALTER TABLE "category" ADD CONSTRAINT "fk_category_user_110d4c63" FOREIGN KEY ("owner_id") REFERENCES "user" ("id") ON DELETE CASCADE',
|
|
||||||
'CREATE INDEX IF NOT EXISTS "idx_category_slug_e9bcff" ON "category" USING HASH ("slug")',
|
|
||||||
'DROP INDEX IF EXISTS "idx_category_slug_e9bcff"',
|
|
||||||
'ALTER TABLE "configs" RENAME TO "config"',
|
|
||||||
'ALTER TABLE "config" DROP COLUMN "name"',
|
|
||||||
'DROP INDEX IF EXISTS "uid_config_name_2c83c8"',
|
|
||||||
'ALTER TABLE "config" ADD "user_id" INT NOT NULL',
|
'ALTER TABLE "config" ADD "user_id" INT NOT NULL',
|
||||||
'ALTER TABLE "config" ADD CONSTRAINT "fk_config_user_17daa970" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE',
|
'ALTER TABLE "config" ADD CONSTRAINT "fk_config_user_17daa970" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE',
|
||||||
'ALTER TABLE "config" ALTER COLUMN "status" DROP DEFAULT',
|
'ALTER TABLE "config" ALTER COLUMN "status" DROP DEFAULT',
|
||||||
'ALTER TABLE "config" ALTER COLUMN "slug" TYPE VARCHAR(20) USING "slug"::VARCHAR(20)',
|
'ALTER TABLE "config" ALTER COLUMN "value" TYPE JSONB USING "value"::JSONB',
|
||||||
'ALTER TABLE "email" ADD "config_id" VARCHAR(20) NOT NULL UNIQUE',
|
'ALTER TABLE "configs" RENAME TO "config"',
|
||||||
'ALTER TABLE "email" ADD "address" VARCHAR(200) NOT NULL',
|
'ALTER TABLE "email" ADD "address" VARCHAR(200) NOT NULL',
|
||||||
'ALTER TABLE "email" RENAME COLUMN "id" TO "email_id"',
|
|
||||||
'ALTER TABLE "email" DROP COLUMN "user_id"',
|
'ALTER TABLE "email" DROP COLUMN "user_id"',
|
||||||
'ALTER TABLE "email" ADD CONSTRAINT "fk_email_config_88e28c1b" FOREIGN KEY ("config_id") REFERENCES "config" ("slug") ON DELETE CASCADE',
|
'ALTER TABLE "email" RENAME COLUMN "id" TO "email_id"',
|
||||||
'DROP INDEX IF EXISTS "idx_email_company_1c9234"',
|
'ALTER TABLE "email" ALTER COLUMN "is_primary" TYPE BOOL USING "is_primary"::BOOL',
|
||||||
'CREATE UNIQUE INDEX IF NOT EXISTS "uid_email_company_1c9234" ON "email" ("company")',
|
|
||||||
'DROP INDEX IF EXISTS "uid_product_uuid_d33c18"',
|
|
||||||
'ALTER TABLE "product" DROP COLUMN "uuid"',
|
|
||||||
'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0',
|
'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0',
|
||||||
'ALTER TABLE "product" RENAME COLUMN "image" TO "pic"',
|
'ALTER TABLE "product" RENAME COLUMN "image" TO "pic"',
|
||||||
'ALTER TABLE "product" RENAME COLUMN "is_review" TO "is_reviewed"',
|
'ALTER TABLE "product" ALTER COLUMN "is_reviewed" TYPE BOOL USING "is_reviewed"::BOOL',
|
||||||
'ALTER TABLE "product" RENAME COLUMN "is_delete" TO "is_deleted"',
|
'ALTER TABLE "product" ALTER COLUMN "body" TYPE TEXT USING "body"::TEXT',
|
||||||
'ALTER TABLE "product" ADD "price" DOUBLE PRECISION',
|
'ALTER TABLE "product" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
|
||||||
'ALTER TABLE "product" ADD "no" UUID NOT NULL',
|
|
||||||
'ALTER TABLE "product" ALTER COLUMN "id" TYPE BIGINT USING "id"::BIGINT',
|
|
||||||
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(100) USING "password"::VARCHAR(100)',
|
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(100) USING "password"::VARCHAR(100)',
|
||||||
'ALTER TABLE "user" DROP COLUMN "avatar"',
|
'ALTER TABLE "user" DROP COLUMN "avatar"',
|
||||||
|
'ALTER TABLE "user" ALTER COLUMN "is_superuser" TYPE BOOL USING "is_superuser"::BOOL',
|
||||||
|
'ALTER TABLE "user" ALTER COLUMN "last_login" TYPE TIMESTAMPTZ USING "last_login"::TIMESTAMPTZ',
|
||||||
|
'ALTER TABLE "user" ALTER COLUMN "intro" TYPE TEXT USING "intro"::TEXT',
|
||||||
|
'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL',
|
||||||
'ALTER TABLE "user" ALTER COLUMN "longitude" TYPE DECIMAL(10,8) USING "longitude"::DECIMAL(10,8)',
|
'ALTER TABLE "user" ALTER COLUMN "longitude" TYPE DECIMAL(10,8) USING "longitude"::DECIMAL(10,8)',
|
||||||
'CREATE INDEX IF NOT EXISTS "idx_product_name_869427" ON "product" ("name", "type_db_alias")',
|
'CREATE INDEX "idx_product_name_869427" ON "product" ("name", "type_db_alias")',
|
||||||
'CREATE INDEX IF NOT EXISTS "idx_email_email_4a1a33" ON "email" ("email")',
|
'CREATE INDEX "idx_email_email_4a1a33" ON "email" ("email")',
|
||||||
'CREATE INDEX IF NOT EXISTS "idx_product_no_e4d701" ON "product" ("no")',
|
|
||||||
'CREATE TABLE "email_user" (\n "email_id" INT NOT NULL REFERENCES "email" ("email_id") ON DELETE CASCADE,\n "user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE\n)',
|
'CREATE TABLE "email_user" (\n "email_id" INT NOT NULL REFERENCES "email" ("email_id") ON DELETE CASCADE,\n "user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE\n)',
|
||||||
'CREATE TABLE IF NOT EXISTS "newmodel" (\n "id" SERIAL NOT NULL PRIMARY KEY,\n "name" VARCHAR(50) NOT NULL\n);\nCOMMENT ON COLUMN "config"."user_id" IS \'User\'',
|
'CREATE TABLE IF NOT EXISTS "newmodel" (\n "id" SERIAL NOT NULL PRIMARY KEY,\n "name" VARCHAR(50) NOT NULL\n);\nCOMMENT ON COLUMN "config"."user_id" IS \'User\'',
|
||||||
'CREATE UNIQUE INDEX IF NOT EXISTS "uid_product_name_869427" ON "product" ("name", "type_db_alias")',
|
'CREATE UNIQUE INDEX "uid_product_name_869427" ON "product" ("name", "type_db_alias")',
|
||||||
'CREATE UNIQUE INDEX IF NOT EXISTS "uid_user_usernam_9987ab" ON "user" ("username")',
|
'CREATE UNIQUE INDEX "uid_user_usernam_9987ab" ON "user" ("username")',
|
||||||
'CREATE TABLE "product_user" (\n "product_id" BIGINT NOT NULL REFERENCES "product" ("id") ON DELETE CASCADE,\n "user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE\n)',
|
|
||||||
'CREATE TABLE "config_category_map" (\n "category_id" INT NOT NULL REFERENCES "category" ("id") ON DELETE CASCADE,\n "config_id" VARCHAR(20) NOT NULL REFERENCES "config" ("slug") ON DELETE CASCADE\n)',
|
|
||||||
'DROP TABLE IF EXISTS "config_category"',
|
|
||||||
}
|
}
|
||||||
upgrade_operators = set(Migrate.upgrade_operators)
|
|
||||||
upgrade_more_than_expected = upgrade_operators - expected_upgrade_operators
|
|
||||||
assert not upgrade_more_than_expected
|
|
||||||
upgrade_less_than_expected = expected_upgrade_operators - upgrade_operators
|
|
||||||
assert not upgrade_less_than_expected
|
|
||||||
|
|
||||||
expected_downgrade_operators = {
|
expected_downgrade_operators = {
|
||||||
'CREATE UNIQUE INDEX IF NOT EXISTS "uid_category_title_f7fc03" ON "category" ("title")',
|
|
||||||
'ALTER TABLE "category" ALTER COLUMN "name" SET NOT NULL',
|
'ALTER TABLE "category" ALTER COLUMN "name" SET NOT NULL',
|
||||||
'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(200) USING "slug"::VARCHAR(200)',
|
'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(200) USING "slug"::VARCHAR(200)',
|
||||||
'ALTER TABLE "category" RENAME COLUMN "owner_id" TO "user_id"',
|
'ALTER TABLE "category" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
|
||||||
'ALTER TABLE "category" DROP CONSTRAINT IF EXISTS "fk_category_user_110d4c63"',
|
|
||||||
'DROP INDEX IF EXISTS "idx_category_slug_e9bcff"',
|
|
||||||
'CREATE INDEX IF NOT EXISTS "idx_category_slug_e9bcff" ON "category" ("slug")',
|
|
||||||
'ALTER TABLE "config" ADD "name" VARCHAR(100) NOT NULL UNIQUE',
|
|
||||||
'CREATE UNIQUE INDEX IF NOT EXISTS "uid_config_name_2c83c8" ON "config" ("name")',
|
|
||||||
'ALTER TABLE "config" ALTER COLUMN "status" SET DEFAULT 1',
|
'ALTER TABLE "config" ALTER COLUMN "status" SET DEFAULT 1',
|
||||||
'ALTER TABLE "config" DROP CONSTRAINT IF EXISTS "fk_config_user_17daa970"',
|
|
||||||
'ALTER TABLE "config" RENAME TO "configs"',
|
|
||||||
'ALTER TABLE "config" DROP COLUMN "user_id"',
|
'ALTER TABLE "config" DROP COLUMN "user_id"',
|
||||||
'ALTER TABLE "config" ALTER COLUMN "slug" TYPE VARCHAR(10) USING "slug"::VARCHAR(10)',
|
'ALTER TABLE "config" DROP CONSTRAINT "fk_config_user_17daa970"',
|
||||||
|
'ALTER TABLE "config" RENAME TO "configs"',
|
||||||
|
'ALTER TABLE "config" ALTER COLUMN "value" TYPE JSONB USING "value"::JSONB',
|
||||||
'ALTER TABLE "email" ADD "user_id" INT NOT NULL',
|
'ALTER TABLE "email" ADD "user_id" INT NOT NULL',
|
||||||
'ALTER TABLE "email" DROP COLUMN "address"',
|
'ALTER TABLE "email" DROP COLUMN "address"',
|
||||||
'ALTER TABLE "email" RENAME COLUMN "email_id" TO "id"',
|
'ALTER TABLE "email" RENAME COLUMN "email_id" TO "id"',
|
||||||
'ALTER TABLE "email" DROP COLUMN "config_id"',
|
'ALTER TABLE "email" ALTER COLUMN "is_primary" TYPE BOOL USING "is_primary"::BOOL',
|
||||||
'ALTER TABLE "email" DROP CONSTRAINT IF EXISTS "fk_email_config_88e28c1b"',
|
|
||||||
'CREATE INDEX IF NOT EXISTS "idx_email_company_1c9234" ON "email" ("company")',
|
|
||||||
'DROP INDEX IF EXISTS "uid_email_company_1c9234"',
|
|
||||||
'ALTER TABLE "product" ADD "uuid" INT NOT NULL UNIQUE',
|
|
||||||
'CREATE UNIQUE INDEX IF NOT EXISTS "uid_product_uuid_d33c18" ON "product" ("uuid")',
|
|
||||||
'ALTER TABLE "product" ALTER COLUMN "view_num" DROP DEFAULT',
|
'ALTER TABLE "product" ALTER COLUMN "view_num" DROP DEFAULT',
|
||||||
'ALTER TABLE "product" RENAME COLUMN "pic" TO "image"',
|
'ALTER TABLE "product" RENAME COLUMN "pic" TO "image"',
|
||||||
'ALTER TABLE "product" RENAME COLUMN "is_deleted" TO "is_delete"',
|
|
||||||
'ALTER TABLE "product" RENAME COLUMN "is_reviewed" TO "is_review"',
|
|
||||||
'ALTER TABLE "product" DROP COLUMN "price"',
|
|
||||||
'ALTER TABLE "product" DROP COLUMN "no"',
|
|
||||||
'ALTER TABLE "product" ALTER COLUMN "id" TYPE INT USING "id"::INT',
|
|
||||||
'ALTER TABLE "user" ADD "avatar" VARCHAR(200) NOT NULL DEFAULT \'\'',
|
'ALTER TABLE "user" ADD "avatar" VARCHAR(200) NOT NULL DEFAULT \'\'',
|
||||||
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(200) USING "password"::VARCHAR(200)',
|
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(200) USING "password"::VARCHAR(200)',
|
||||||
|
'ALTER TABLE "user" ALTER COLUMN "last_login" TYPE TIMESTAMPTZ USING "last_login"::TIMESTAMPTZ',
|
||||||
|
'ALTER TABLE "user" ALTER COLUMN "is_superuser" TYPE BOOL USING "is_superuser"::BOOL',
|
||||||
|
'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL',
|
||||||
|
'ALTER TABLE "user" ALTER COLUMN "intro" TYPE TEXT USING "intro"::TEXT',
|
||||||
'ALTER TABLE "user" ALTER COLUMN "longitude" TYPE DECIMAL(12,9) USING "longitude"::DECIMAL(12,9)',
|
'ALTER TABLE "user" ALTER COLUMN "longitude" TYPE DECIMAL(12,9) USING "longitude"::DECIMAL(12,9)',
|
||||||
'DROP TABLE IF EXISTS "product_user"',
|
'ALTER TABLE "product" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
|
||||||
'DROP INDEX IF EXISTS "idx_product_name_869427"',
|
'ALTER TABLE "product" ALTER COLUMN "is_reviewed" TYPE BOOL USING "is_reviewed"::BOOL',
|
||||||
'DROP INDEX IF EXISTS "idx_email_email_4a1a33"',
|
'ALTER TABLE "product" ALTER COLUMN "body" TYPE TEXT USING "body"::TEXT',
|
||||||
'DROP INDEX IF EXISTS "uid_user_usernam_9987ab"',
|
'DROP INDEX "idx_product_name_869427"',
|
||||||
'DROP INDEX IF EXISTS "uid_product_name_869427"',
|
'DROP INDEX "idx_email_email_4a1a33"',
|
||||||
'DROP INDEX IF EXISTS "idx_product_no_e4d701"',
|
'DROP INDEX "idx_user_usernam_9987ab"',
|
||||||
|
'DROP INDEX "uid_product_name_869427"',
|
||||||
'DROP TABLE IF EXISTS "email_user"',
|
'DROP TABLE IF EXISTS "email_user"',
|
||||||
'DROP TABLE IF EXISTS "newmodel"',
|
'DROP TABLE IF EXISTS "newmodel"',
|
||||||
'CREATE TABLE "config_category" (\n "config_id" VARCHAR(20) NOT NULL REFERENCES "config" ("slug") ON DELETE CASCADE,\n "category_id" INT NOT NULL REFERENCES "category" ("id") ON DELETE CASCADE\n)',
|
|
||||||
'DROP TABLE IF EXISTS "config_category_map"',
|
|
||||||
}
|
}
|
||||||
downgrade_operators = set(Migrate.downgrade_operators)
|
assert not set(Migrate.upgrade_operators).symmetric_difference(expected_upgrade_operators)
|
||||||
downgrade_more_than_expected = downgrade_operators - expected_downgrade_operators
|
assert not set(Migrate.downgrade_operators).symmetric_difference(
|
||||||
assert not downgrade_more_than_expected
|
expected_downgrade_operators
|
||||||
downgrade_less_than_expected = expected_downgrade_operators - downgrade_operators
|
)
|
||||||
assert not downgrade_less_than_expected
|
|
||||||
|
|
||||||
elif isinstance(Migrate.ddl, SqliteDDL):
|
elif isinstance(Migrate.ddl, SqliteDDL):
|
||||||
assert Migrate.upgrade_operators == []
|
assert Migrate.upgrade_operators == []
|
||||||
@ -1200,7 +958,7 @@ def test_sort_all_version_files(mocker):
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
Migrate.migrate_location = Path(".")
|
Migrate.migrate_location = "."
|
||||||
|
|
||||||
assert Migrate.get_all_version_files() == [
|
assert Migrate.get_all_version_files() == [
|
||||||
"1_datetime_update.py",
|
"1_datetime_update.py",
|
||||||
@ -1208,39 +966,3 @@ def test_sort_all_version_files(mocker):
|
|||||||
"10_datetime_update.py",
|
"10_datetime_update.py",
|
||||||
"11_datetime_update.py",
|
"11_datetime_update.py",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def test_sort_files_containing_non_migrations(mocker):
|
|
||||||
mocker.patch(
|
|
||||||
"os.listdir",
|
|
||||||
return_value=[
|
|
||||||
"1_datetime_update.py",
|
|
||||||
"11_datetime_update.py",
|
|
||||||
"10_datetime_update.py",
|
|
||||||
"2_datetime_update.py",
|
|
||||||
"not_a_migration.py",
|
|
||||||
"999.py",
|
|
||||||
"123foo_not_a_migration.py",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
Migrate.migrate_location = Path(".")
|
|
||||||
|
|
||||||
assert Migrate.get_all_version_files() == [
|
|
||||||
"1_datetime_update.py",
|
|
||||||
"2_datetime_update.py",
|
|
||||||
"10_datetime_update.py",
|
|
||||||
"11_datetime_update.py",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
async def test_empty_migration(mocker, tmp_path: Path) -> None:
|
|
||||||
mocker.patch("os.listdir", return_value=[])
|
|
||||||
Migrate.app = "foo"
|
|
||||||
expected_content = MIGRATE_TEMPLATE.format(upgrade_sql="", downgrade_sql="")
|
|
||||||
Migrate.migrate_location = tmp_path
|
|
||||||
|
|
||||||
migration_file = await Migrate.migrate("update", True)
|
|
||||||
|
|
||||||
f = tmp_path / migration_file
|
|
||||||
assert f.read_text() == expected_content
|
|
||||||
|
@ -1,18 +0,0 @@
|
|||||||
import subprocess # nosec
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from aerich.version import __version__
|
|
||||||
from tests._utils import chdir, run_shell
|
|
||||||
|
|
||||||
|
|
||||||
def test_python_m_aerich():
|
|
||||||
assert __version__ in run_shell("python -m aerich --version")
|
|
||||||
|
|
||||||
|
|
||||||
def test_poetry_add(tmp_path: Path):
|
|
||||||
package = Path(__file__).parent.resolve().parent
|
|
||||||
with chdir(tmp_path):
|
|
||||||
subprocess.run(["poetry", "new", "foo"]) # nosec
|
|
||||||
with chdir("foo"):
|
|
||||||
r = subprocess.run(["poetry", "add", package]) # nosec
|
|
||||||
assert r.returncode == 0
|
|
@ -1,213 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import os
|
|
||||||
import platform
|
|
||||||
import shlex
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
from collections.abc import Generator
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from tests._utils import Dialect, chdir, copy_files
|
|
||||||
|
|
||||||
|
|
||||||
def run_aerich(cmd: str) -> subprocess.CompletedProcess | None:
|
|
||||||
if not cmd.startswith("poetry") and not cmd.startswith("python"):
|
|
||||||
if not cmd.startswith("aerich"):
|
|
||||||
cmd = "aerich " + cmd
|
|
||||||
if platform.system() == "Windows":
|
|
||||||
cmd = "python -m " + cmd
|
|
||||||
r = None
|
|
||||||
with contextlib.suppress(subprocess.TimeoutExpired):
|
|
||||||
r = subprocess.run(shlex.split(cmd), timeout=2)
|
|
||||||
return r
|
|
||||||
|
|
||||||
|
|
||||||
def run_shell(cmd: str) -> subprocess.CompletedProcess:
|
|
||||||
envs = dict(os.environ, PYTHONPATH=".")
|
|
||||||
return subprocess.run(shlex.split(cmd), env=envs)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_empty_db() -> Path:
|
|
||||||
if (db_file := Path("db.sqlite3")).exists():
|
|
||||||
db_file.unlink()
|
|
||||||
return db_file
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def prepare_sqlite_project(tmp_path: Path) -> Generator[tuple[Path, str]]:
|
|
||||||
test_dir = Path(__file__).parent
|
|
||||||
asset_dir = test_dir / "assets" / "sqlite_migrate"
|
|
||||||
with chdir(tmp_path):
|
|
||||||
files = ("models.py", "settings.py", "_tests.py")
|
|
||||||
copy_files(*(asset_dir / f for f in files), target_dir=Path())
|
|
||||||
models_py, settings_py, test_py = (Path(f) for f in files)
|
|
||||||
copy_files(asset_dir / "conftest_.py", target_dir=Path("conftest.py"))
|
|
||||||
_get_empty_db()
|
|
||||||
yield models_py, models_py.read_text("utf-8")
|
|
||||||
|
|
||||||
|
|
||||||
def test_close_tortoise_connections_patch(tmp_path: Path) -> None:
|
|
||||||
if not Dialect.is_sqlite():
|
|
||||||
return
|
|
||||||
with prepare_sqlite_project(tmp_path) as (models_py, models_text):
|
|
||||||
run_aerich("aerich init -t settings.TORTOISE_ORM")
|
|
||||||
r = run_aerich("aerich init-db")
|
|
||||||
assert r is not None
|
|
||||||
|
|
||||||
|
|
||||||
def test_sqlite_migrate_alter_indexed_unique(tmp_path: Path) -> None:
|
|
||||||
if not Dialect.is_sqlite():
|
|
||||||
return
|
|
||||||
with prepare_sqlite_project(tmp_path) as (models_py, models_text):
|
|
||||||
models_py.write_text(models_text.replace("db_index=False", "db_index=True"))
|
|
||||||
run_aerich("aerich init -t settings.TORTOISE_ORM")
|
|
||||||
run_aerich("aerich init-db")
|
|
||||||
r = run_shell("pytest -s _tests.py::test_allow_duplicate")
|
|
||||||
assert r.returncode == 0
|
|
||||||
models_py.write_text(models_text.replace("db_index=False", "unique=True"))
|
|
||||||
run_aerich("aerich migrate") # migrations/models/1_
|
|
||||||
run_aerich("aerich upgrade")
|
|
||||||
r = run_shell("pytest _tests.py::test_unique_is_true")
|
|
||||||
assert r.returncode == 0
|
|
||||||
models_py.write_text(models_text.replace("db_index=False", "db_index=True"))
|
|
||||||
run_aerich("aerich migrate") # migrations/models/2_
|
|
||||||
run_aerich("aerich upgrade")
|
|
||||||
r = run_shell("pytest -s _tests.py::test_allow_duplicate")
|
|
||||||
assert r.returncode == 0
|
|
||||||
|
|
||||||
|
|
||||||
M2M_WITH_CUSTOM_THROUGH = """
|
|
||||||
groups = fields.ManyToManyField("models.Group", through="foo_group")
|
|
||||||
|
|
||||||
class Group(Model):
|
|
||||||
name = fields.CharField(max_length=60)
|
|
||||||
|
|
||||||
class FooGroup(Model):
|
|
||||||
foo = fields.ForeignKeyField("models.Foo")
|
|
||||||
group = fields.ForeignKeyField("models.Group")
|
|
||||||
is_active = fields.BooleanField(default=False)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
table = "foo_group"
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def test_sqlite_migrate(tmp_path: Path) -> None:
|
|
||||||
if not Dialect.is_sqlite():
|
|
||||||
return
|
|
||||||
with prepare_sqlite_project(tmp_path) as (models_py, models_text):
|
|
||||||
MODELS = models_text
|
|
||||||
run_aerich("aerich init -t settings.TORTOISE_ORM")
|
|
||||||
config_file = Path("pyproject.toml")
|
|
||||||
modify_time = config_file.stat().st_mtime
|
|
||||||
run_aerich("aerich init-db")
|
|
||||||
run_aerich("aerich init -t settings.TORTOISE_ORM")
|
|
||||||
assert modify_time == config_file.stat().st_mtime
|
|
||||||
r = run_shell("pytest _tests.py::test_allow_duplicate")
|
|
||||||
assert r.returncode == 0
|
|
||||||
# Add index
|
|
||||||
models_py.write_text(MODELS.replace("index=False", "index=True"))
|
|
||||||
run_aerich("aerich migrate") # migrations/models/1_
|
|
||||||
run_aerich("aerich upgrade")
|
|
||||||
r = run_shell("pytest -s _tests.py::test_allow_duplicate")
|
|
||||||
assert r.returncode == 0
|
|
||||||
# Drop index
|
|
||||||
models_py.write_text(MODELS)
|
|
||||||
run_aerich("aerich migrate") # migrations/models/2_
|
|
||||||
run_aerich("aerich upgrade")
|
|
||||||
r = run_shell("pytest -s _tests.py::test_allow_duplicate")
|
|
||||||
assert r.returncode == 0
|
|
||||||
# Add unique index
|
|
||||||
models_py.write_text(MODELS.replace("index=False", "index=True, unique=True"))
|
|
||||||
run_aerich("aerich migrate") # migrations/models/3_
|
|
||||||
run_aerich("aerich upgrade")
|
|
||||||
r = run_shell("pytest _tests.py::test_unique_is_true")
|
|
||||||
assert r.returncode == 0
|
|
||||||
# Drop unique index
|
|
||||||
models_py.write_text(MODELS)
|
|
||||||
run_aerich("aerich migrate") # migrations/models/4_
|
|
||||||
run_aerich("aerich upgrade")
|
|
||||||
r = run_shell("pytest _tests.py::test_allow_duplicate")
|
|
||||||
assert r.returncode == 0
|
|
||||||
# Add field with unique=True
|
|
||||||
with models_py.open("a") as f:
|
|
||||||
f.write(" age = fields.IntField(unique=True, default=0)")
|
|
||||||
run_aerich("aerich migrate") # migrations/models/5_
|
|
||||||
run_aerich("aerich upgrade")
|
|
||||||
r = run_shell("pytest _tests.py::test_add_unique_field")
|
|
||||||
assert r.returncode == 0
|
|
||||||
# Drop unique field
|
|
||||||
models_py.write_text(MODELS)
|
|
||||||
run_aerich("aerich migrate") # migrations/models/6_
|
|
||||||
run_aerich("aerich upgrade")
|
|
||||||
r = run_shell("pytest -s _tests.py::test_drop_unique_field")
|
|
||||||
assert r.returncode == 0
|
|
||||||
|
|
||||||
# Initial with indexed field and then drop it
|
|
||||||
migrations_dir = Path("migrations/models")
|
|
||||||
shutil.rmtree(migrations_dir)
|
|
||||||
db_file = _get_empty_db()
|
|
||||||
models_py.write_text(MODELS + " age = fields.IntField(db_index=True)")
|
|
||||||
run_aerich("aerich init -t settings.TORTOISE_ORM")
|
|
||||||
run_aerich("aerich init-db")
|
|
||||||
migration_file = list(migrations_dir.glob("0_*.py"))[0]
|
|
||||||
assert "CREATE INDEX" in migration_file.read_text()
|
|
||||||
r = run_shell("pytest _tests.py::test_with_age_field")
|
|
||||||
assert r.returncode == 0
|
|
||||||
models_py.write_text(MODELS)
|
|
||||||
run_aerich("aerich migrate")
|
|
||||||
run_aerich("aerich upgrade")
|
|
||||||
migration_file_1 = list(migrations_dir.glob("1_*.py"))[0]
|
|
||||||
assert "DROP INDEX" in migration_file_1.read_text()
|
|
||||||
r = run_shell("pytest _tests.py::test_without_age_field")
|
|
||||||
assert r.returncode == 0
|
|
||||||
|
|
||||||
# Generate migration file in emptry directory
|
|
||||||
db_file.unlink()
|
|
||||||
run_aerich("aerich init-db")
|
|
||||||
assert not db_file.exists()
|
|
||||||
for p in migrations_dir.glob("*"):
|
|
||||||
if p.is_dir():
|
|
||||||
shutil.rmtree(p)
|
|
||||||
else:
|
|
||||||
p.unlink()
|
|
||||||
run_aerich("aerich init-db")
|
|
||||||
assert db_file.exists()
|
|
||||||
|
|
||||||
# init without '[tool]' section in pyproject.toml
|
|
||||||
config_file = Path("pyproject.toml")
|
|
||||||
config_file.write_text('[project]\nname = "project"')
|
|
||||||
run_aerich("init -t settings.TORTOISE_ORM")
|
|
||||||
assert "[tool.aerich]" in config_file.read_text()
|
|
||||||
|
|
||||||
# add m2m with custom model for through
|
|
||||||
models_py.write_text(MODELS + M2M_WITH_CUSTOM_THROUGH)
|
|
||||||
run_aerich("aerich migrate")
|
|
||||||
run_aerich("aerich upgrade")
|
|
||||||
migration_file_1 = list(migrations_dir.glob("1_*.py"))[0]
|
|
||||||
assert "foo_group" in migration_file_1.read_text()
|
|
||||||
r = run_shell("pytest _tests.py::test_m2m_with_custom_through")
|
|
||||||
assert r.returncode == 0
|
|
||||||
|
|
||||||
# add m2m field after init-db
|
|
||||||
new = """
|
|
||||||
groups = fields.ManyToManyField("models.Group", through="foo_group", related_name="users")
|
|
||||||
|
|
||||||
class Group(Model):
|
|
||||||
name = fields.CharField(max_length=60)
|
|
||||||
"""
|
|
||||||
_get_empty_db()
|
|
||||||
if migrations_dir.exists():
|
|
||||||
shutil.rmtree(migrations_dir)
|
|
||||||
models_py.write_text(MODELS)
|
|
||||||
run_aerich("aerich init-db")
|
|
||||||
models_py.write_text(MODELS + new)
|
|
||||||
run_aerich("aerich migrate")
|
|
||||||
run_aerich("aerich upgrade")
|
|
||||||
migration_file_1 = list(migrations_dir.glob("1_*.py"))[0]
|
|
||||||
assert "foo_group" in migration_file_1.read_text()
|
|
||||||
r = run_shell("pytest _tests.py::test_add_m2m_field_after_init_db")
|
|
||||||
assert r.returncode == 0
|
|
@ -1,164 +1,6 @@
|
|||||||
from aerich.utils import get_dict_diff_by_key, import_py_file
|
from aerich.utils import import_py_file
|
||||||
|
|
||||||
|
|
||||||
def test_import_py_file() -> None:
|
def test_import_py_file():
|
||||||
m = import_py_file("aerich/utils.py")
|
m = import_py_file("aerich/utils.py")
|
||||||
assert getattr(m, "import_py_file", None)
|
assert getattr(m, "import_py_file")
|
||||||
|
|
||||||
|
|
||||||
class TestDiffFields:
|
|
||||||
def test_the_same_through_order(self) -> None:
|
|
||||||
old = [
|
|
||||||
{"name": "users", "through": "users_group"},
|
|
||||||
{"name": "admins", "through": "admins_group"},
|
|
||||||
]
|
|
||||||
new = [
|
|
||||||
{"name": "members", "through": "users_group"},
|
|
||||||
{"name": "admins", "through": "admins_group"},
|
|
||||||
]
|
|
||||||
diffs = list(get_dict_diff_by_key(old, new))
|
|
||||||
assert type(get_dict_diff_by_key(old, new)).__name__ == "generator"
|
|
||||||
assert len(diffs) == 1
|
|
||||||
assert diffs == [("change", [0, "name"], ("users", "members"))]
|
|
||||||
|
|
||||||
def test_same_through_with_different_orders(self) -> None:
|
|
||||||
old = [
|
|
||||||
{"name": "users", "through": "users_group"},
|
|
||||||
{"name": "admins", "through": "admins_group"},
|
|
||||||
]
|
|
||||||
new = [
|
|
||||||
{"name": "admins", "through": "admins_group"},
|
|
||||||
{"name": "members", "through": "users_group"},
|
|
||||||
]
|
|
||||||
diffs = list(get_dict_diff_by_key(old, new))
|
|
||||||
assert len(diffs) == 1
|
|
||||||
assert diffs == [("change", [0, "name"], ("users", "members"))]
|
|
||||||
|
|
||||||
def test_the_same_field_name_order(self) -> None:
|
|
||||||
old = [
|
|
||||||
{"name": "users", "through": "users_group"},
|
|
||||||
{"name": "admins", "through": "admins_group"},
|
|
||||||
]
|
|
||||||
new = [
|
|
||||||
{"name": "users", "through": "user_groups"},
|
|
||||||
{"name": "admins", "through": "admin_groups"},
|
|
||||||
]
|
|
||||||
diffs = list(get_dict_diff_by_key(old, new))
|
|
||||||
assert len(diffs) == 4
|
|
||||||
assert diffs == [
|
|
||||||
("remove", "", [(0, {"name": "users", "through": "users_group"})]),
|
|
||||||
("remove", "", [(0, {"name": "admins", "through": "admins_group"})]),
|
|
||||||
("add", "", [(0, {"name": "users", "through": "user_groups"})]),
|
|
||||||
("add", "", [(0, {"name": "admins", "through": "admin_groups"})]),
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_same_field_name_with_different_orders(self) -> None:
|
|
||||||
old = [
|
|
||||||
{"name": "admins", "through": "admins_group"},
|
|
||||||
{"name": "users", "through": "users_group"},
|
|
||||||
]
|
|
||||||
new = [
|
|
||||||
{"name": "users", "through": "user_groups"},
|
|
||||||
{"name": "admins", "through": "admin_groups"},
|
|
||||||
]
|
|
||||||
diffs = list(get_dict_diff_by_key(old, new))
|
|
||||||
assert len(diffs) == 4
|
|
||||||
assert diffs == [
|
|
||||||
("remove", "", [(0, {"name": "admins", "through": "admins_group"})]),
|
|
||||||
("remove", "", [(0, {"name": "users", "through": "users_group"})]),
|
|
||||||
("add", "", [(0, {"name": "users", "through": "user_groups"})]),
|
|
||||||
("add", "", [(0, {"name": "admins", "through": "admin_groups"})]),
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_drop_one(self) -> None:
|
|
||||||
old = [
|
|
||||||
{"name": "users", "through": "users_group"},
|
|
||||||
{"name": "admins", "through": "admins_group"},
|
|
||||||
]
|
|
||||||
new = [
|
|
||||||
{"name": "admins", "through": "admins_group"},
|
|
||||||
]
|
|
||||||
diffs = list(get_dict_diff_by_key(old, new))
|
|
||||||
assert len(diffs) == 1
|
|
||||||
assert diffs == [("remove", "", [(0, {"name": "users", "through": "users_group"})])]
|
|
||||||
|
|
||||||
def test_add_one(self) -> None:
|
|
||||||
old = [
|
|
||||||
{"name": "admins", "through": "admins_group"},
|
|
||||||
]
|
|
||||||
new = [
|
|
||||||
{"name": "users", "through": "users_group"},
|
|
||||||
{"name": "admins", "through": "admins_group"},
|
|
||||||
]
|
|
||||||
diffs = list(get_dict_diff_by_key(old, new))
|
|
||||||
assert len(diffs) == 1
|
|
||||||
assert diffs == [("add", "", [(0, {"name": "users", "through": "users_group"})])]
|
|
||||||
|
|
||||||
def test_drop_some(self) -> None:
|
|
||||||
old = [
|
|
||||||
{"name": "users", "through": "users_group"},
|
|
||||||
{"name": "admins", "through": "admins_group"},
|
|
||||||
{"name": "staffs", "through": "staffs_group"},
|
|
||||||
]
|
|
||||||
new = [
|
|
||||||
{"name": "admins", "through": "admins_group"},
|
|
||||||
]
|
|
||||||
diffs = list(get_dict_diff_by_key(old, new))
|
|
||||||
assert len(diffs) == 2
|
|
||||||
assert diffs == [
|
|
||||||
("remove", "", [(0, {"name": "users", "through": "users_group"})]),
|
|
||||||
("remove", "", [(0, {"name": "staffs", "through": "staffs_group"})]),
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_add_some(self) -> None:
|
|
||||||
old = [
|
|
||||||
{"name": "staffs", "through": "staffs_group"},
|
|
||||||
]
|
|
||||||
new = [
|
|
||||||
{"name": "users", "through": "users_group"},
|
|
||||||
{"name": "admins", "through": "admins_group"},
|
|
||||||
{"name": "staffs", "through": "staffs_group"},
|
|
||||||
]
|
|
||||||
diffs = list(get_dict_diff_by_key(old, new))
|
|
||||||
assert len(diffs) == 2
|
|
||||||
assert diffs == [
|
|
||||||
("add", "", [(0, {"name": "users", "through": "users_group"})]),
|
|
||||||
("add", "", [(0, {"name": "admins", "through": "admins_group"})]),
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_some_through_unchanged(self) -> None:
|
|
||||||
old = [
|
|
||||||
{"name": "staffs", "through": "staffs_group"},
|
|
||||||
{"name": "admins", "through": "admins_group"},
|
|
||||||
]
|
|
||||||
new = [
|
|
||||||
{"name": "users", "through": "users_group"},
|
|
||||||
{"name": "admins_new", "through": "admins_group"},
|
|
||||||
{"name": "staffs_new", "through": "staffs_group"},
|
|
||||||
]
|
|
||||||
diffs = list(get_dict_diff_by_key(old, new))
|
|
||||||
assert len(diffs) == 3
|
|
||||||
assert diffs == [
|
|
||||||
("change", [0, "name"], ("staffs", "staffs_new")),
|
|
||||||
("change", [0, "name"], ("admins", "admins_new")),
|
|
||||||
("add", "", [(0, {"name": "users", "through": "users_group"})]),
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_some_unchanged_without_drop_or_add(self) -> None:
|
|
||||||
old = [
|
|
||||||
{"name": "staffs", "through": "staffs_group"},
|
|
||||||
{"name": "admins", "through": "admins_group"},
|
|
||||||
{"name": "users", "through": "users_group"},
|
|
||||||
]
|
|
||||||
new = [
|
|
||||||
{"name": "users_new", "through": "users_group"},
|
|
||||||
{"name": "admins_new", "through": "admins_group"},
|
|
||||||
{"name": "staffs_new", "through": "staffs_group"},
|
|
||||||
]
|
|
||||||
diffs = list(get_dict_diff_by_key(old, new))
|
|
||||||
assert len(diffs) == 3
|
|
||||||
assert diffs == [
|
|
||||||
("change", [0, "name"], ("staffs", "staffs_new")),
|
|
||||||
("change", [0, "name"], ("admins", "admins_new")),
|
|
||||||
("change", [0, "name"], ("users", "users_new")),
|
|
||||||
]
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user