80 Commits

Author SHA1 Message Date
long2ice
20aebc4413 chore: update version 2022-09-27 22:44:12 +08:00
long2ice
f8e1f9ff44 fix: initialize an empty database. (#267) 2022-09-27 22:42:54 +08:00
long2ice
ab31445fb2 fix: test 2022-09-26 18:39:48 +08:00
long2ice
28d19a4b7b - Fix syntax error with python3.8.10. (#265)
- Fix sql generate error. (#263)
2022-09-26 18:36:57 +08:00
long2ice
9da99824fe fix: postgres sql error (#263) 2022-09-23 23:33:49 +08:00
long2ice
75db7cea60 fix: test error 2022-09-23 10:35:33 +08:00
long2ice
d777c9c278 Merge remote-tracking branch 'origin/dev' into dev
# Conflicts:
#	aerich/utils.py
#	tests/test_utils.py
2022-09-23 10:30:46 +08:00
long2ice
e9b76bdd35 feat: use .py version files 2022-09-23 10:29:48 +08:00
long2ice
8b7864d886 Merge pull request #199 from ehdgua01/fix-ddl-format-and-writing-version-file
[Enhancement] Fix version file formats
2022-09-16 09:30:27 +08:00
KDH
bef45941f2 Fix testcase 2022-09-16 10:26:21 +09:00
KDH
7b472d7a84 Fix testcase 2022-09-16 10:08:34 +09:00
KDH
1f0a6dfb50 Fix typo 2022-09-16 09:58:04 +09:00
KDH
36282f123f Merge branch 'dev' into fix-ddl-format-and-writing-version-file
# Conflicts:
#	aerich/utils.py
#	tests/test_migrate.py
2022-09-16 09:54:51 +09:00
KDH
3cd4e24050 Merge branch 'dev' into fix-ddl-format-and-writing-version-file 2022-09-16 09:43:57 +09:00
long2ice
f8c2f1b551 Merge pull request #205 from GDGSNF/dev
Merge repeated `if` statements into single `if`
2022-09-16 08:40:28 +08:00
Yasser Tahiri
131d97a3d6 Merge branch 'dev' into dev 2022-09-15 23:19:59 +01:00
Yasser Tahiri
1a0371e977 Update aerich/utils.py
Co-authored-by: KDH <ehdgua01@naver.com>
2022-09-15 23:19:18 +01:00
long2ice
e5b092fd08 Merge pull request #260 from waketzheng/dev
refactor: use pathlib to read and write text
2022-09-12 11:44:02 +08:00
Waket Zheng
7a109f3c79 refactor: use pathlib to read and write text 2022-09-12 00:57:46 +08:00
Jinlong Peng
8c2ecbaef1 feat: support add/remove field with index 2022-08-26 18:04:20 +08:00
long2ice
b141363c51 Merge pull request #242 from ssilaev/dev
Hotfix for cli group function in v0.6.3
2022-07-22 08:39:02 +08:00
long2ice
9dd474d79f Merge remote-tracking branch 'origin/dev' into dev 2022-06-27 11:42:37 +08:00
long2ice
e4bb9d838e docs: update changelog 2022-06-27 11:41:48 +08:00
long2ice
029d522c79 Merge pull request #249 from tortoise/fix-decimal
Fix decimal field change
2022-06-27 11:38:03 +08:00
long2ice
d6627906c7 test: fix test_migrate 2022-06-27 11:36:09 +08:00
long2ice
3c88833154 fix: decimal field change (#246) 2022-06-27 11:29:47 +08:00
long2ice
8f68f08eba Merge pull request #248 from isaquealves/feature/load_ddl_class_per_dialect
feat: Add support for dynamically load DDL classes
2022-06-22 20:25:42 +08:00
Isaque Alves
60ba6963fd Update changelog 2022-06-22 09:22:26 -03:00
Isaque Alves
4c35c44bd2 feat: Add support for dynamically load DDL classes
Adopt a strategy of loading classes based on their names, allowing to
easily add new database support without changing Migrate class logic
2022-06-22 09:16:11 -03:00
long2ice
bdeaf5495e Merge pull request #247 from isaquealves/feature/postgresql-numeric-type-translate
refactor: Improve db inspection
2022-06-22 08:41:15 +08:00
Isaque Alves
db33059ec9 Resolve style issue 2022-06-20 15:42:21 -03:00
Isaque Alves
44b96058f8 fix(tests/test_migrate.py): Resolve issue with broken tests 2022-06-17 12:36:04 -03:00
Isaque Alves
abff753b6a refactor: Improve postgresql migrate operators tests 2022-06-17 09:45:02 -03:00
Isaque Alves
dcd8441a05 fix: add space following python style guide" 2022-06-17 02:03:41 -03:00
Isaque Alves
b4a735b814 fix: Adjust changelog formatting 2022-06-17 02:02:37 -03:00
Isaque Alves
83ba13e99a Update changelog 2022-06-17 02:00:35 -03:00
Isaque Alves
d7b1c07d13 fix: Add comma to separate value in join 2022-06-17 01:51:47 -03:00
Isaque Alves
1ac16188fc refactor: Improve db inspection
- Add support to postgresql numeric type.
- Improve field configuration handling for numeric and decimal types
2022-06-17 01:38:39 -03:00
long2ice
4abc464ce0 feat: add is_flag to init-db 2022-05-24 11:20:12 +08:00
Sergey Silaev
d4430cec0d Hotfix for cli group function in v0.6.3 2022-05-10 01:20:11 +04:00
long2ice
0b01fa38d8 feat: add index inspect 2022-04-05 19:38:08 +08:00
long2ice
801dde15be feat: inspectdb support sqlite 2022-04-01 20:30:36 +08:00
long2ice
75480e2041 Merge remote-tracking branch 'origin/dev' into dev 2022-04-01 19:57:03 +08:00
long2ice
45129cef9f feat: improve inspectdb and support postgres 2022-04-01 19:56:48 +08:00
long2ice
3a0dd2355d Merge pull request #230 from ssilaev/dev
Increase max length of app column
2022-02-09 15:01:39 +08:00
Sergey Silaev
0e71bc16ae Increase max length of app column 2022-02-08 22:14:55 +03:00
long2ice
c39462820c upgrade deps 2022-01-17 22:26:13 +08:00
long2ice
f15cbaf9e0 Support migration for specified index. (#203) 2021-12-29 21:36:23 +08:00
long2ice
15131469df upgrade deps 2021-12-22 16:26:13 +08:00
long2ice
c60c1610f0 Fix pyproject.toml not existing error. (#217) 2021-12-12 22:11:51 +08:00
long2ice
63e8d06157 remove aiomysql 2021-12-08 14:43:33 +08:00
long2ice
68ef8ac676 Fix ci 2021-12-08 14:38:16 +08:00
long2ice
8b5cf6faa0 inspectdb support DATE. (#215) 2021-12-08 14:33:27 +08:00
Yasser Tahiri
40c7ef7fd6 Merge repeated if statements into single if 2021-10-21 15:43:18 +01:00
KDH
7a826df43f Fix duplicated semicolon in table creation DDL 2021-10-12 11:24:37 +09:00
KDH
b1b9cc1454 Fix M2M table template 2021-10-12 11:23:29 +09:00
long2ice
fac00d45cc Remove pydantic dependency. (#198) 2021-10-04 23:05:20 +08:00
long2ice
6f7893d376 Fix section name 2021-09-28 15:07:10 +08:00
long2ice
b1521c4cc7 update version 2021-09-27 19:55:38 +08:00
long2ice
24c1f4cb7d Change default config file from aerich.ini to pyproject.toml. (#197) 2021-09-27 11:05:20 +08:00
long2ice
661f241dac Compatible with old version in indexes 2021-08-31 17:53:17 +08:00
long2ice
01787558d6 Fix test 2021-08-31 17:41:13 +08:00
long2ice
699b0321a4 Support indexes change. (#193) 2021-08-31 17:36:25 +08:00
long2ice
4a83021892 Update FUNDING.yml 2021-08-26 20:39:31 +08:00
long2ice
af63221875 Fix no module found error. (#188) (#189) 2021-08-16 11:14:43 +08:00
long2ice
359525716c update README.md 2021-08-12 15:42:54 +08:00
long2ice
7d3eb2e151 Merge pull request #181 from Vovetta/dev
Fix: migrate doesn't use source_field in unique_together
2021-08-04 09:42:18 +08:00
Vovetta
d8abf79449 Updated changelog and version 2021-08-03 10:38:31 -07:00
Vovetta
aa9f40ae27 Fix: migrate doesn't use source_field in unique_together 2021-08-03 10:36:06 -07:00
long2ice
79b7ae343a update README.md 2021-08-03 16:25:06 +08:00
long2ice
6f5a9ab78c Add Command class. (#148) (#141) (#123) (#106) 2021-08-03 16:18:07 +08:00
long2ice
1e5a83c281 update deps 2021-07-26 17:44:18 +08:00
long2ice
180420843d update README.md 2021-07-26 15:27:49 +08:00
long2ice
58f66b91cf Fix redundant semicolons 2021-07-23 17:07:10 +08:00
long2ice
064d7ff675 Fix ci 2021-07-22 15:32:07 +08:00
long2ice
2da794d823 Fix db_constraint when fk changed. (#179) 2021-07-22 14:37:49 +08:00
long2ice
77005f3793 Fix MySQL 5.X rename column. 2021-07-09 10:53:13 +08:00
long2ice
5a873b8b69 Merge pull request #177 from yusukefs/add-default-src-folder-config
Add default value for src_folder config
2021-07-08 17:27:29 +08:00
Yusuke Sakai
3989b7c674 Update version and changelog 2021-07-08 18:01:59 +09:00
Yusuke Sakai
694b05356f Add default src_folder cofig value 2021-07-08 17:35:44 +09:00
30 changed files with 1589 additions and 1049 deletions

2
.github/FUNDING.yml vendored
View File

@@ -1 +1 @@
custom: ["https://sponsor.long2ice.cn"] custom: ["https://sponsor.long2ice.io"]

View File

@@ -2,10 +2,10 @@ name: ci
on: on:
push: push:
branches-ignore: branches-ignore:
- master - main
pull_request: pull_request:
branches-ignore: branches-ignore:
- master - main
jobs: jobs:
ci: ci:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -26,9 +26,9 @@ jobs:
with: with:
python-version: '3.x' python-version: '3.x'
- name: Install and configure Poetry - name: Install and configure Poetry
uses: snok/install-poetry@v1.1.1 run: |
with: pip install -U pip poetry
virtualenvs-create: false poetry config virtualenvs.create false
- name: CI - name: CI
env: env:
MYSQL_PASS: root MYSQL_PASS: root

View File

@@ -12,9 +12,9 @@ jobs:
with: with:
python-version: '3.x' python-version: '3.x'
- name: Install and configure Poetry - name: Install and configure Poetry
uses: snok/install-poetry@v1.1.1 run: |
with: pip install -U pip poetry
virtualenvs-create: false poetry config virtualenvs.create false
- name: Build dists - name: Build dists
run: make build run: make build
- name: Pypi Publish - name: Pypi Publish

View File

@@ -1,7 +1,78 @@
# ChangeLog # ChangeLog
## 0.7
### 0.7.1
- Fix syntax error with python3.8.10. (#265)
- Fix sql generate error. (#263)
- Fix initialize an empty database. (#267)
### 0.7.1rc1
- Fix postgres sql error (#263)
### 0.7.0
**Now aerich use `.py` file to record versions.**
Upgrade Note:
1. Drop `aerich` table
2. Delete `migrations/models` folder
3. Run `aerich init-db`
- Improve `inspectdb` adding support to `postgresql::numeric` data type
- Add support for dynamically load DDL classes easing to add support to
new databases without changing `Migrate` class logic
- Fix decimal field change. (#246)
- Support add/remove field with index.
## 0.6
### 0.6.3
- Improve `inspectdb` and support `postgres` & `sqlite`.
### 0.6.2
- Support migration for specified index. (#203)
### 0.6.1
- Fix `pyproject.toml` not existing error. (#217)
### 0.6.0
- Change default config file from `aerich.ini` to `pyproject.toml`. (#197)
**Upgrade note:**
1. Run `aerich init -t config.TORTOISE_ORM`.
2. Remove `aerich.ini`.
- Remove `pydantic` dependency. (#198)
- `inspectdb` support `DATE`. (#215)
## 0.5 ## 0.5
### 0.5.8
- Support `indexes` change. (#193)
### 0.5.7
- Fix no module found error. (#188) (#189)
### 0.5.6
- Add `Command` class. (#148) (#141) (#123) (#106)
- Fix: migrate doesn't use source_field in unique_together. (#181)
### 0.5.5
- Fix KeyError: 'src_folder' after upgrading aerich to 0.5.4. (#176)
- Fix MySQL 5.X rename column.
- Fix `db_constraint` when fk changed. (#179)
### 0.5.4 ### 0.5.4
- Fix incorrect index creation order. (#151) - Fix incorrect index creation order. (#151)

View File

@@ -12,16 +12,15 @@ up:
@poetry update @poetry update
deps: deps:
@poetry install -E asyncpg -E asyncmy -E aiomysql @poetry install -E asyncpg -E asyncmy
style: deps style: deps
isort -src $(checkfiles) @isort -src $(checkfiles)
black $(black_opts) $(checkfiles) @black $(black_opts) $(checkfiles)
check: deps check: deps
black --check $(black_opts) $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false) @black --check $(black_opts) $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false)
flake8 $(checkfiles) @pflake8 $(checkfiles)
bandit -x tests -r $(checkfiles)
test: deps test: deps
$(py_warn) TEST_DB=sqlite://:memory: py.test $(py_warn) TEST_DB=sqlite://:memory: py.test

View File

@@ -7,7 +7,7 @@
## Introduction ## Introduction
Aerich is a database migrations tool for Tortoise-ORM, which is like alembic for SQLAlchemy, or like Django ORM with Aerich is a database migrations tool for TortoiseORM, which is like alembic for SQLAlchemy, or like Django ORM with
it\'s own migration solution. it\'s own migration solution.
## Install ## Install
@@ -15,7 +15,7 @@ it\'s own migration solution.
Just install from pypi: Just install from pypi:
```shell ```shell
> pip install aerich pip install aerich
``` ```
## Quick Start ## Quick Start
@@ -27,11 +27,8 @@ Usage: aerich [OPTIONS] COMMAND [ARGS]...
Options: Options:
-V, --version Show the version and exit. -V, --version Show the version and exit.
-c, --config TEXT Config file. [default: aerich.ini] -c, --config TEXT Config file. [default: pyproject.toml]
--app TEXT Tortoise-ORM app name. --app TEXT Tortoise-ORM app name.
-n, --name TEXT Name of section in .ini file to use for aerich config.
[default: aerich]
-h, --help Show this message and exit. -h, --help Show this message and exit.
Commands: Commands:
@@ -70,10 +67,9 @@ Usage: aerich init [OPTIONS]
Init config file and generate root migrate location. Init config file and generate root migrate location.
OOptions: Options:
-t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like -t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like
settings.TORTOISE_ORM. [required] settings.TORTOISE_ORM. [required]
--location TEXT Migrate store location. [default: ./migrations] --location TEXT Migrate store location. [default: ./migrations]
-s, --src_folder TEXT Folder of the source, relative to the project root. -s, --src_folder TEXT Folder of the source, relative to the project root.
-h, --help Show this message and exit. -h, --help Show this message and exit.
@@ -85,7 +81,7 @@ Initialize the config file and migrations location:
> aerich init -t tests.backends.mysql.TORTOISE_ORM > aerich init -t tests.backends.mysql.TORTOISE_ORM
Success create migrate location ./migrations Success create migrate location ./migrations
Success generate config file aerich.ini Success write config to pyproject.toml
``` ```
### Init db ### Init db
@@ -105,11 +101,11 @@ e.g. `aerich --app other_models init-db`.
```shell ```shell
> aerich migrate --name drop_column > aerich migrate --name drop_column
Success migrate 1_202029051520102929_drop_column.sql Success migrate 1_202029051520102929_drop_column.py
``` ```
Format of migrate filename is Format of migrate filename is
`{version_num}_{datetime}_{name|update}.sql`. `{version_num}_{datetime}_{name|update}.py`.
If `aerich` guesses you are renaming a column, it will ask `Rename {old_column} to {new_column} [True]`. You can choose If `aerich` guesses you are renaming a column, it will ask `Rename {old_column} to {new_column} [True]`. You can choose
`True` to rename column without column drop, or choose `False` to drop the column then create. Note that the latter may `True` to rename column without column drop, or choose `False` to drop the column then create. Note that the latter may
@@ -120,7 +116,7 @@ lose data.
```shell ```shell
> aerich upgrade > aerich upgrade
Success upgrade 1_202029051520102929_drop_column.sql Success upgrade 1_202029051520102929_drop_column.py
``` ```
Now your db is migrated to latest. Now your db is migrated to latest.
@@ -146,7 +142,7 @@ Options:
```shell ```shell
> aerich downgrade > aerich downgrade
Success downgrade 1_202029051520102929_drop_column.sql Success downgrade 1_202029051520102929_drop_column.py
``` ```
Now your db is rolled back to the specified version. Now your db is rolled back to the specified version.
@@ -156,7 +152,7 @@ Now your db is rolled back to the specified version.
```shell ```shell
> aerich history > aerich history
1_202029051520102929_drop_column.sql 1_202029051520102929_drop_column.py
``` ```
### Show heads to be migrated ### Show heads to be migrated
@@ -164,12 +160,12 @@ Now your db is rolled back to the specified version.
```shell ```shell
> aerich heads > aerich heads
1_202029051520102929_drop_column.sql 1_202029051520102929_drop_column.py
``` ```
### Inspect db tables to TortoiseORM model ### Inspect db tables to TortoiseORM model
Currently `inspectdb` only supports MySQL. Currently `inspectdb` support MySQL & Postgres & SQLite.
```shell ```shell
Usage: aerich inspectdb [OPTIONS] Usage: aerich inspectdb [OPTIONS]
@@ -193,7 +189,44 @@ Inspect a specified table in the default app and redirect to `models.py`:
aerich inspectdb -t user > models.py aerich inspectdb -t user > models.py
``` ```
Note that this command is limited and cannot infer some fields, such as `IntEnumField`, `ForeignKeyField`, and others. For example, you table is:
```sql
CREATE TABLE `test`
(
`id` int NOT NULL AUTO_INCREMENT,
`decimal` decimal(10, 2) NOT NULL,
`date` date DEFAULT NULL,
`datetime` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`time` time DEFAULT NULL,
`float` float DEFAULT NULL,
`string` varchar(200) COLLATE utf8mb4_general_ci DEFAULT NULL,
`tinyint` tinyint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `asyncmy_string_index` (`string`)
) ENGINE = InnoDB
DEFAULT CHARSET = utf8mb4
COLLATE = utf8mb4_general_ci
```
Now run `aerich inspectdb -t test` to see the generated model:
```python
from tortoise import Model, fields
class Test(Model):
date = fields.DateField(null=True, )
datetime = fields.DatetimeField(auto_now=True, )
decimal = fields.DecimalField(max_digits=10, decimal_places=2, )
float = fields.FloatField(null=True, )
id = fields.IntField(pk=True, )
string = fields.CharField(max_length=200, null=True, )
time = fields.TimeField(null=True, )
tinyint = fields.BooleanField(null=True, )
```
Note that this command is limited and can't infer some fields, such as `IntEnumField`, `ForeignKeyField`, and others.
### Multiple databases ### Multiple databases
@@ -212,6 +245,29 @@ tortoise_orm = {
You only need to specify `aerich.models` in one app, and must specify `--app` when running `aerich migrate` and so on. You only need to specify `aerich.models` in one app, and must specify `--app` when running `aerich migrate` and so on.
## Restore `aerich` workflow
In some cases, such as broken changes from upgrade of `aerich`, you can't run `aerich migrate` or `aerich upgrade`, you
can make the following steps:
1. drop `aerich` table.
2. delete `migrations/{app}` directory.
3. rerun `aerich init-db`.
Note that these actions is safe, also you can do that to reset your migrations if your migration files is too many.
## Use `aerich` in application
You can use `aerich` out of cli by use `Command` class.
```python
from aerich import Command
command = Command(tortoise_config=config, app='models')
await command.init()
await command.migrate('test')
```
## License ## License
This project is licensed under the This project is licensed under the

View File

@@ -1 +1,144 @@
__version__ = "0.5.4" import os
from pathlib import Path
from typing import List
from tortoise import Tortoise, generate_schema_for_client
from tortoise.exceptions import OperationalError
from tortoise.transactions import in_transaction
from tortoise.utils import get_schema_sql
from aerich.exceptions import DowngradeError
from aerich.inspectdb.mysql import InspectMySQL
from aerich.inspectdb.postgres import InspectPostgres
from aerich.inspectdb.sqlite import InspectSQLite
from aerich.migrate import MIGRATE_TEMPLATE, Migrate
from aerich.models import Aerich
from aerich.utils import (
get_app_connection,
get_app_connection_name,
get_models_describe,
import_py_file,
)
class Command:
def __init__(
self,
tortoise_config: dict,
app: str = "models",
location: str = "./migrations",
):
self.tortoise_config = tortoise_config
self.app = app
self.location = location
Migrate.app = app
async def init(self):
await Migrate.init(self.tortoise_config, self.app, self.location)
async def upgrade(self):
migrated = []
for version_file in Migrate.get_all_version_files():
try:
exists = await Aerich.exists(version=version_file, app=self.app)
except OperationalError:
exists = False
if not exists:
async with in_transaction(
get_app_connection_name(self.tortoise_config, self.app)
) as conn:
file_path = Path(Migrate.migrate_location, version_file)
m = import_py_file(file_path)
upgrade = getattr(m, "upgrade")
await conn.execute_script(await upgrade(conn))
await Aerich.create(
version=version_file,
app=self.app,
content=get_models_describe(self.app),
)
migrated.append(version_file)
return migrated
async def downgrade(self, version: int, delete: bool):
ret = []
if version == -1:
specified_version = await Migrate.get_last_version()
else:
specified_version = await Aerich.filter(
app=self.app, version__startswith=f"{version}_"
).first()
if not specified_version:
raise DowngradeError("No specified version found")
if version == -1:
versions = [specified_version]
else:
versions = await Aerich.filter(app=self.app, pk__gte=specified_version.pk)
for version in versions:
file = version.version
async with in_transaction(
get_app_connection_name(self.tortoise_config, self.app)
) as conn:
file_path = Path(Migrate.migrate_location, file)
m = import_py_file(file_path)
downgrade = getattr(m, "downgrade")
downgrade_sql = await downgrade(conn)
if not downgrade_sql.strip():
raise DowngradeError("No downgrade items found")
await conn.execute_script(downgrade_sql)
await version.delete()
if delete:
os.unlink(file_path)
ret.append(file)
return ret
async def heads(self):
ret = []
versions = Migrate.get_all_version_files()
for version in versions:
if not await Aerich.exists(version=version, app=self.app):
ret.append(version)
return ret
async def history(self):
versions = Migrate.get_all_version_files()
return [version for version in versions]
async def inspectdb(self, tables: List[str] = None) -> str:
connection = get_app_connection(self.tortoise_config, self.app)
dialect = connection.schema_generator.DIALECT
if dialect == "mysql":
cls = InspectMySQL
elif dialect == "postgres":
cls = InspectPostgres
elif dialect == "sqlite":
cls = InspectSQLite
else:
raise NotImplementedError(f"{dialect} is not supported")
inspect = cls(connection, tables)
return await inspect.inspect()
async def migrate(self, name: str = "update"):
return await Migrate.migrate(name)
async def init_db(self, safe: bool):
location = self.location
app = self.app
dirname = Path(location, app)
dirname.mkdir(parents=True)
await Tortoise.init(config=self.tortoise_config)
connection = get_app_connection(self.tortoise_config, app)
await generate_schema_for_client(connection, safe)
schema = get_schema_sql(connection, safe)
version = await Migrate.generate_version()
await Aerich.create(
version=version,
app=app,
content=get_models_describe(app),
)
version_file = Path(dirname, version)
content = MIGRATE_TEMPLATE.format(upgrade_sql=schema, downgrade_sql="")
with open(version_file, "w", encoding="utf-8") as f:
f.write(content)

View File

@@ -1,34 +1,24 @@
import asyncio import asyncio
import os import os
from configparser import ConfigParser
from functools import wraps from functools import wraps
from pathlib import Path from pathlib import Path
from typing import List from typing import List
import click import click
import tomlkit
from click import Context, UsageError from click import Context, UsageError
from tortoise import Tortoise, generate_schema_for_client from tomlkit.exceptions import NonExistentKey
from tortoise.exceptions import OperationalError from tortoise import Tortoise
from tortoise.transactions import in_transaction
from tortoise.utils import get_schema_sql
from aerich.inspectdb import InspectDb from aerich import Command
from aerich.migrate import Migrate from aerich.enums import Color
from aerich.utils import ( from aerich.exceptions import DowngradeError
add_src_path, from aerich.utils import add_src_path, get_tortoise_config
get_app_connection, from aerich.version import __version__
get_app_connection_name,
get_models_describe,
get_tortoise_config,
get_version_content_from_file,
write_version_file,
)
from . import __version__ CONFIG_DEFAULT_VALUES = {
from .enums import Color "src_folder": ".",
from .models import Aerich }
parser = ConfigParser()
def coro(f): def coro(f):
@@ -36,11 +26,11 @@ def coro(f):
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
# Close db connections at the end of all all but the cli group function # Close db connections at the end of all but the cli group function
try: try:
loop.run_until_complete(f(*args, **kwargs)) loop.run_until_complete(f(*args, **kwargs))
finally: finally:
if f.__name__ != "cli": if f.__name__ not in ["cli", "init_db", "init"]:
loop.run_until_complete(Tortoise.close_connections()) loop.run_until_complete(Tortoise.close_connections())
return wrapper return wrapper
@@ -49,46 +39,42 @@ def coro(f):
@click.group(context_settings={"help_option_names": ["-h", "--help"]}) @click.group(context_settings={"help_option_names": ["-h", "--help"]})
@click.version_option(__version__, "-V", "--version") @click.version_option(__version__, "-V", "--version")
@click.option( @click.option(
"-c", "--config", default="aerich.ini", show_default=True, help="Config file.", "-c",
"--config",
default="pyproject.toml",
show_default=True,
help="Config file.",
) )
@click.option("--app", required=False, help="Tortoise-ORM app name.") @click.option("--app", required=False, help="Tortoise-ORM app name.")
@click.option(
"-n",
"--name",
default="aerich",
show_default=True,
help="Name of section in .ini file to use for aerich config.",
)
@click.pass_context @click.pass_context
@coro @coro
async def cli(ctx: Context, config, app, name): async def cli(ctx: Context, config, app):
ctx.ensure_object(dict) ctx.ensure_object(dict)
ctx.obj["config_file"] = config ctx.obj["config_file"] = config
ctx.obj["name"] = name
invoked_subcommand = ctx.invoked_subcommand invoked_subcommand = ctx.invoked_subcommand
if invoked_subcommand != "init": if invoked_subcommand != "init":
if not Path(config).exists(): config_path = Path(config)
if not config_path.exists():
raise UsageError("You must exec init first", ctx=ctx) raise UsageError("You must exec init first", ctx=ctx)
parser.read(config) content = config_path.read_text()
doc = tomlkit.parse(content)
location = parser[name]["location"] try:
tortoise_orm = parser[name]["tortoise_orm"] tool = doc["tool"]["aerich"]
src_folder = parser[name]["src_folder"] location = tool["location"]
tortoise_orm = tool["tortoise_orm"]
# Add specified source folder to path src_folder = tool.get("src_folder", CONFIG_DEFAULT_VALUES["src_folder"])
except NonExistentKey:
raise UsageError("You need run aerich init again when upgrade to 0.6.0+")
add_src_path(src_folder) add_src_path(src_folder)
tortoise_config = get_tortoise_config(ctx, tortoise_orm) tortoise_config = get_tortoise_config(ctx, tortoise_orm)
app = app or list(tortoise_config.get("apps").keys())[0] app = app or list(tortoise_config.get("apps").keys())[0]
ctx.obj["config"] = tortoise_config command = Command(tortoise_config=tortoise_config, app=app, location=location)
ctx.obj["location"] = location ctx.obj["command"] = command
ctx.obj["app"] = app
Migrate.app = app
if invoked_subcommand != "init-db": if invoked_subcommand != "init-db":
if not Path(location, app).exists(): if not Path(location, app).exists():
raise UsageError("You must exec init-db first", ctx=ctx) raise UsageError("You must exec init-db first", ctx=ctx)
await Migrate.init(tortoise_config, app, location) await command.init()
@cli.command(help="Generate migrate changes file.") @cli.command(help="Generate migrate changes file.")
@@ -96,7 +82,8 @@ async def cli(ctx: Context, config, app, name):
@click.pass_context @click.pass_context
@coro @coro
async def migrate(ctx: Context, name): async def migrate(ctx: Context, name):
ret = await Migrate.migrate(name) command = ctx.obj["command"]
ret = await command.migrate(name)
if not ret: if not ret:
return click.secho("No changes detected", fg=Color.yellow) return click.secho("No changes detected", fg=Color.yellow)
click.secho(f"Success migrate {ret}", fg=Color.green) click.secho(f"Success migrate {ret}", fg=Color.green)
@@ -106,28 +93,13 @@ async def migrate(ctx: Context, name):
@click.pass_context @click.pass_context
@coro @coro
async def upgrade(ctx: Context): async def upgrade(ctx: Context):
config = ctx.obj["config"] command = ctx.obj["command"]
app = ctx.obj["app"] migrated = await command.upgrade()
migrated = False
for version_file in Migrate.get_all_version_files():
try:
exists = await Aerich.exists(version=version_file, app=app)
except OperationalError:
exists = False
if not exists:
async with in_transaction(get_app_connection_name(config, app)) as conn:
file_path = Path(Migrate.migrate_location, version_file)
content = get_version_content_from_file(file_path)
upgrade_query_list = content.get("upgrade")
for upgrade_query in upgrade_query_list:
await conn.execute_script(upgrade_query)
await Aerich.create(
version=version_file, app=app, content=get_models_describe(app),
)
click.secho(f"Success upgrade {version_file}", fg=Color.green)
migrated = True
if not migrated: if not migrated:
click.secho("No upgrade items found", fg=Color.yellow) click.secho("No upgrade items found", fg=Color.yellow)
else:
for version_file in migrated:
click.secho(f"Success upgrade {version_file}", fg=Color.green)
@cli.command(help="Downgrade to specified version.") @cli.command(help="Downgrade to specified version.")
@@ -153,59 +125,37 @@ async def upgrade(ctx: Context):
) )
@coro @coro
async def downgrade(ctx: Context, version: int, delete: bool): async def downgrade(ctx: Context, version: int, delete: bool):
app = ctx.obj["app"] command = ctx.obj["command"]
config = ctx.obj["config"] try:
if version == -1: files = await command.downgrade(version, delete)
specified_version = await Migrate.get_last_version() except DowngradeError as e:
else: return click.secho(str(e), fg=Color.yellow)
specified_version = await Aerich.filter(app=app, version__startswith=f"{version}_").first() for file in files:
if not specified_version: click.secho(f"Success downgrade {file}", fg=Color.green)
return click.secho("No specified version found", fg=Color.yellow)
if version == -1:
versions = [specified_version]
else:
versions = await Aerich.filter(app=app, pk__gte=specified_version.pk)
for version in versions:
file = version.version
async with in_transaction(get_app_connection_name(config, app)) as conn:
file_path = Path(Migrate.migrate_location, file)
content = get_version_content_from_file(file_path)
downgrade_query_list = content.get("downgrade")
if not downgrade_query_list:
click.secho("No downgrade items found", fg=Color.yellow)
return
for downgrade_query in downgrade_query_list:
await conn.execute_query(downgrade_query)
await version.delete()
if delete:
os.unlink(file_path)
click.secho(f"Success downgrade {file}", fg=Color.green)
@cli.command(help="Show current available heads in migrate location.") @cli.command(help="Show current available heads in migrate location.")
@click.pass_context @click.pass_context
@coro @coro
async def heads(ctx: Context): async def heads(ctx: Context):
app = ctx.obj["app"] command = ctx.obj["command"]
versions = Migrate.get_all_version_files() head_list = await command.heads()
is_heads = False if not head_list:
for version in versions: return click.secho("No available heads, try migrate first", fg=Color.green)
if not await Aerich.exists(version=version, app=app): for version in head_list:
click.secho(version, fg=Color.green) click.secho(version, fg=Color.green)
is_heads = True
if not is_heads:
click.secho("No available heads,try migrate first", fg=Color.green)
@cli.command(help="List all migrate items.") @cli.command(help="List all migrate items.")
@click.pass_context @click.pass_context
@coro @coro
async def history(ctx: Context): async def history(ctx: Context):
versions = Migrate.get_all_version_files() command = ctx.obj["command"]
versions = await command.history()
if not versions:
return click.secho("No history, try migrate", fg=Color.green)
for version in versions: for version in versions:
click.secho(version, fg=Color.green) click.secho(version, fg=Color.green)
if not versions:
click.secho("No history,try migrate", fg=Color.green)
@cli.command(help="Init config file and generate root migrate location.") @cli.command(help="Init config file and generate root migrate location.")
@@ -216,12 +166,15 @@ async def history(ctx: Context):
help="Tortoise-ORM config module dict variable, like settings.TORTOISE_ORM.", help="Tortoise-ORM config module dict variable, like settings.TORTOISE_ORM.",
) )
@click.option( @click.option(
"--location", default="./migrations", show_default=True, help="Migrate store location.", "--location",
default="./migrations",
show_default=True,
help="Migrate store location.",
) )
@click.option( @click.option(
"-s", "-s",
"--src_folder", "--src_folder",
default=".", default=CONFIG_DEFAULT_VALUES["src_folder"],
show_default=False, show_default=False,
help="Folder of the source, relative to the project root.", help="Folder of the source, relative to the project root.",
) )
@@ -229,9 +182,6 @@ async def history(ctx: Context):
@coro @coro
async def init(ctx: Context, tortoise_orm, location, src_folder): async def init(ctx: Context, tortoise_orm, location, src_folder):
config_file = ctx.obj["config_file"] config_file = ctx.obj["config_file"]
name = ctx.obj["name"]
if Path(config_file).exists():
return click.secho("Configuration file already created", fg=Color.yellow)
if os.path.isabs(src_folder): if os.path.isabs(src_folder):
src_folder = os.path.relpath(os.getcwd(), src_folder) src_folder = os.path.relpath(os.getcwd(), src_folder)
@@ -242,75 +192,66 @@ async def init(ctx: Context, tortoise_orm, location, src_folder):
# check that we can find the configuration, if not we can fail before the config file gets created # check that we can find the configuration, if not we can fail before the config file gets created
add_src_path(src_folder) add_src_path(src_folder)
get_tortoise_config(ctx, tortoise_orm) get_tortoise_config(ctx, tortoise_orm)
config_path = Path(config_file)
if config_path.exists():
content = config_path.read_text()
doc = tomlkit.parse(content)
else:
doc = tomlkit.parse("[tool.aerich]")
table = tomlkit.table()
table["tortoise_orm"] = tortoise_orm
table["location"] = location
table["src_folder"] = src_folder
doc["tool"]["aerich"] = table
parser.add_section(name) config_path.write_text(tomlkit.dumps(doc))
parser.set(name, "tortoise_orm", tortoise_orm)
parser.set(name, "location", location)
parser.set(name, "src_folder", src_folder)
with open(config_file, "w", encoding="utf-8") as f:
parser.write(f)
Path(location).mkdir(parents=True, exist_ok=True) Path(location).mkdir(parents=True, exist_ok=True)
click.secho(f"Success create migrate location {location}", fg=Color.green) click.secho(f"Success create migrate location {location}", fg=Color.green)
click.secho(f"Success generate config file {config_file}", fg=Color.green) click.secho(f"Success write config to {config_file}", fg=Color.green)
@cli.command(help="Generate schema and generate app migrate location.") @cli.command(help="Generate schema and generate app migrate location.")
@click.option( @click.option(
"-s",
"--safe", "--safe",
type=bool, type=bool,
is_flag=True,
default=True, default=True,
help="When set to true, creates the table only when it does not already exist.", help="When set to true, creates the table only when it does not already exist.",
show_default=True, show_default=True,
) )
@click.pass_context @click.pass_context
@coro @coro
async def init_db(ctx: Context, safe): async def init_db(ctx: Context, safe: bool):
config = ctx.obj["config"] command = ctx.obj["command"]
location = ctx.obj["location"] app = command.app
app = ctx.obj["app"] dirname = Path(command.location, app)
dirname = Path(location, app)
try: try:
dirname.mkdir(parents=True) await command.init_db(safe)
click.secho(f"Success create app migrate location {dirname}", fg=Color.green) click.secho(f"Success create app migrate location {dirname}", fg=Color.green)
click.secho(f'Success generate schema for app "{app}"', fg=Color.green)
except FileExistsError: except FileExistsError:
return click.secho( return click.secho(
f"Inited {app} already, or delete {dirname} and try again.", fg=Color.yellow f"Inited {app} already, or delete {dirname} and try again.", fg=Color.yellow
) )
await Tortoise.init(config=config)
connection = get_app_connection(config, app)
await generate_schema_for_client(connection, safe)
schema = get_schema_sql(connection, safe)
version = await Migrate.generate_version()
await Aerich.create(
version=version, app=app, content=get_models_describe(app),
)
content = {
"upgrade": [schema],
}
write_version_file(Path(dirname, version), content)
click.secho(f'Success generate schema for app "{app}"', fg=Color.green)
@cli.command(help="Introspects the database tables to standard output as TortoiseORM model.") @cli.command(help="Introspects the database tables to standard output as TortoiseORM model.")
@click.option( @click.option(
"-t", "--table", help="Which tables to inspect.", multiple=True, required=False, "-t",
"--table",
help="Which tables to inspect.",
multiple=True,
required=False,
) )
@click.pass_context @click.pass_context
@coro @coro
async def inspectdb(ctx: Context, table: List[str]): async def inspectdb(ctx: Context, table: List[str]):
config = ctx.obj["config"] command = ctx.obj["command"]
app = ctx.obj["app"] ret = await command.inspectdb(table)
connection = get_app_connection(config, app) click.secho(ret)
inspect = InspectDb(connection, table)
await inspect.inspect()
def main(): def main():

31
aerich/coder.py Normal file
View File

@@ -0,0 +1,31 @@
import base64
import json
import pickle # nosec: B301,B403
from tortoise.indexes import Index
class JsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, Index):
return {
"type": "index",
"val": base64.b64encode(pickle.dumps(obj)).decode(), # nosec: B301
}
else:
return super().default(obj)
def object_hook(obj):
_type = obj.get("type")
if not _type:
return obj
return pickle.loads(base64.b64decode(obj["val"])) # nosec: B301
def encoder(obj: dict):
return json.dumps(obj, cls=JsonEncoder)
def decoder(obj: str):
return json.loads(obj, object_hook=object_hook)

View File

@@ -23,7 +23,12 @@ class BaseDDL:
_DROP_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" DROP INDEX "{index_name}"' _DROP_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" DROP INDEX "{index_name}"'
_ADD_FK_TEMPLATE = 'ALTER TABLE "{table_name}" ADD CONSTRAINT "{fk_name}" FOREIGN KEY ("{db_column}") REFERENCES "{table}" ("{field}") ON DELETE {on_delete}' _ADD_FK_TEMPLATE = 'ALTER TABLE "{table_name}" ADD CONSTRAINT "{fk_name}" FOREIGN KEY ("{db_column}") REFERENCES "{table}" ("{field}") ON DELETE {on_delete}'
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP FOREIGN KEY "{fk_name}"' _DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP FOREIGN KEY "{fk_name}"'
_M2M_TABLE_TEMPLATE = 'CREATE TABLE "{table_name}" ("{backward_key}" {backward_type} NOT NULL REFERENCES "{backward_table}" ("{backward_field}") ON DELETE CASCADE,"{forward_key}" {forward_type} NOT NULL REFERENCES "{forward_table}" ("{forward_field}") ON DELETE {on_delete}){extra}{comment}' _M2M_TABLE_TEMPLATE = (
'CREATE TABLE "{table_name}" (\n'
' "{backward_key}" {backward_type} NOT NULL REFERENCES "{backward_table}" ("{backward_field}") ON DELETE CASCADE,\n'
' "{forward_key}" {forward_type} NOT NULL REFERENCES "{forward_table}" ("{forward_field}") ON DELETE {on_delete}\n'
"){extra}{comment}"
)
_MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" MODIFY COLUMN {column}' _MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" MODIFY COLUMN {column}'
_CHANGE_COLUMN_TEMPLATE = ( _CHANGE_COLUMN_TEMPLATE = (
'ALTER TABLE "{table_name}" CHANGE {old_column_name} {new_column_name} {new_column_type}' 'ALTER TABLE "{table_name}" CHANGE {old_column_name} {new_column_name} {new_column_type}'
@@ -115,7 +120,9 @@ class BaseDDL:
nullable="NOT NULL" if not field_describe.get("nullable") else "", nullable="NOT NULL" if not field_describe.get("nullable") else "",
unique="UNIQUE" if field_describe.get("unique") else "", unique="UNIQUE" if field_describe.get("unique") else "",
comment=self.schema_generator._column_comment_generator( comment=self.schema_generator._column_comment_generator(
table=db_table, column=db_column, comment=field_describe.get("description"), table=db_table,
column=db_column,
comment=field_describe.get("description"),
) )
if description if description
else "", else "",
@@ -178,7 +185,7 @@ class BaseDDL:
"idx" if not unique else "uid", model, field_names "idx" if not unique else "uid", model, field_names
), ),
table_name=model._meta.db_table, table_name=model._meta.db_table,
column_names=", ".join([self.schema_generator.quote(f) for f in field_names]), column_names=", ".join(self.schema_generator.quote(f) for f in field_names),
) )
def drop_index(self, model: "Type[Model]", field_names: List[str], unique=False): def drop_index(self, model: "Type[Model]", field_names: List[str], unique=False):
@@ -189,6 +196,12 @@ class BaseDDL:
table_name=model._meta.db_table, table_name=model._meta.db_table,
) )
def drop_index_by_name(self, model: "Type[Model]", index_name: str):
return self._DROP_INDEX_TEMPLATE.format(
index_name=index_name,
table_name=model._meta.db_table,
)
def add_fk(self, model: "Type[Model]", field_describe: dict, reference_table_describe: dict): def add_fk(self, model: "Type[Model]", field_describe: dict, reference_table_describe: dict):
db_table = model._meta.db_table db_table = model._meta.db_table

View File

@@ -22,6 +22,11 @@ class MysqlDDL(BaseDDL):
_DROP_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` DROP INDEX `{index_name}`" _DROP_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` DROP INDEX `{index_name}`"
_ADD_FK_TEMPLATE = "ALTER TABLE `{table_name}` ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}" _ADD_FK_TEMPLATE = "ALTER TABLE `{table_name}` ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}"
_DROP_FK_TEMPLATE = "ALTER TABLE `{table_name}` DROP FOREIGN KEY `{fk_name}`" _DROP_FK_TEMPLATE = "ALTER TABLE `{table_name}` DROP FOREIGN KEY `{fk_name}`"
_M2M_TABLE_TEMPLATE = "CREATE TABLE `{table_name}` (`{backward_key}` {backward_type} NOT NULL REFERENCES `{backward_table}` (`{backward_field}`) ON DELETE CASCADE,`{forward_key}` {forward_type} NOT NULL REFERENCES `{forward_table}` (`{forward_field}`) ON DELETE CASCADE){extra}{comment}" _M2M_TABLE_TEMPLATE = (
"CREATE TABLE `{table_name}` (\n"
" `{backward_key}` {backward_type} NOT NULL REFERENCES `{backward_table}` (`{backward_field}`) ON DELETE CASCADE,\n"
" `{forward_key}` {forward_type} NOT NULL REFERENCES `{forward_table}` (`{forward_field}`) ON DELETE CASCADE\n"
"){extra}{comment}"
)
_MODIFY_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` MODIFY COLUMN {column}" _MODIFY_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` MODIFY COLUMN {column}"
_RENAME_TABLE_TEMPLATE = "ALTER TABLE `{old_table_name}` RENAME TO `{new_table_name}`" _RENAME_TABLE_TEMPLATE = "ALTER TABLE `{old_table_name}` RENAME TO `{new_table_name}`"

View File

@@ -2,3 +2,9 @@ class NotSupportError(Exception):
""" """
raise when features not support raise when features not support
""" """
class DowngradeError(Exception):
"""
raise when downgrade error
"""

View File

@@ -1,86 +0,0 @@
import sys
from typing import List, Optional
from ddlparse import DdlParse
from tortoise import BaseDBAsyncClient
class InspectDb:
_table_template = "class {table}(Model):\n"
_field_template_mapping = {
"INT": " {field} = fields.IntField({pk}{unique}{comment})",
"SMALLINT": " {field} = fields.IntField({pk}{unique}{comment})",
"TINYINT": " {field} = fields.BooleanField({null}{default}{comment})",
"VARCHAR": " {field} = fields.CharField({pk}{unique}{length}{null}{default}{comment})",
"LONGTEXT": " {field} = fields.TextField({null}{default}{comment})",
"TEXT": " {field} = fields.TextField({null}{default}{comment})",
"DATETIME": " {field} = fields.DatetimeField({null}{default}{comment})",
"FLOAT": " {field} = fields.FloatField({null}{default}{comment})",
}
def __init__(self, conn: BaseDBAsyncClient, tables: Optional[List[str]] = None):
self.conn = conn
self.tables = tables
self.DIALECT = conn.schema_generator.DIALECT
async def show_create_tables(self):
if self.DIALECT == "mysql":
if not self.tables:
sql_tables = f"SELECT table_name FROM information_schema.tables WHERE table_schema = '{self.conn.database}';" # nosec: B608
ret = await self.conn.execute_query(sql_tables)
self.tables = map(lambda x: x["TABLE_NAME"], ret[1])
for table in self.tables:
sql_show_create_table = f"SHOW CREATE TABLE {table}"
ret = await self.conn.execute_query(sql_show_create_table)
yield ret[1][0]["Create Table"]
else:
raise NotImplementedError("Currently only support MySQL")
async def inspect(self):
ddl_list = self.show_create_tables()
result = "from tortoise import Model, fields\n\n\n"
tables = []
async for ddl in ddl_list:
parser = DdlParse(ddl, DdlParse.DATABASE.mysql)
table = parser.parse()
name = table.name.title()
columns = table.columns
fields = []
model = self._table_template.format(table=name)
for column_name, column in columns.items():
comment = default = length = unique = null = pk = ""
if column.primary_key:
pk = "pk=True, "
if column.unique:
unique = "unique=True, "
if column.data_type == "VARCHAR":
length = f"max_length={column.length}, "
if not column.not_null:
null = "null=True, "
if column.default is not None:
if column.data_type == "TINYINT":
default = f"default={'True' if column.default == '1' else 'False'}, "
elif column.data_type == "DATETIME":
if "CURRENT_TIMESTAMP" in column.default:
if "ON UPDATE CURRENT_TIMESTAMP" in ddl:
default = "auto_now_add=True, "
else:
default = "auto_now=True, "
else:
default = f"default={column.default}, "
if column.comment:
comment = f"description='{column.comment}', "
field = self._field_template_mapping[column.data_type].format(
field=column_name,
pk=pk,
unique=unique,
length=length,
null=null,
default=default,
comment=comment,
)
fields.append(field)
tables.append(model + "\n".join(fields))
sys.stdout.write(result + "\n\n\n".join(tables))

View File

@@ -0,0 +1,168 @@
from typing import Any, List, Optional
from pydantic import BaseModel
from tortoise import BaseDBAsyncClient
class Column(BaseModel):
name: str
data_type: str
null: bool
default: Any
comment: Optional[str]
pk: bool
unique: bool
index: bool
length: Optional[int]
extra: Optional[str]
decimal_places: Optional[int]
max_digits: Optional[int]
def translate(self) -> dict:
comment = default = length = index = null = pk = ""
if self.pk:
pk = "pk=True, "
else:
if self.unique:
index = "unique=True, "
else:
if self.index:
index = "index=True, "
if self.data_type in ["varchar", "VARCHAR"]:
length = f"max_length={self.length}, "
if self.data_type in ["decimal", "numeric"]:
length_parts = []
if self.max_digits:
length_parts.append(f"max_digits={self.max_digits}")
if self.decimal_places:
length_parts.append(f"decimal_places={self.decimal_places}")
length = ", ".join(length_parts)
if self.null:
null = "null=True, "
if self.default is not None:
if self.data_type in ["tinyint", "INT"]:
default = f"default={'True' if self.default == '1' else 'False'}, "
elif self.data_type == "bool":
default = f"default={'True' if self.default == 'true' else 'False'}, "
elif self.data_type in ["datetime", "timestamptz", "TIMESTAMP"]:
if "CURRENT_TIMESTAMP" == self.default:
if "DEFAULT_GENERATED on update CURRENT_TIMESTAMP" == self.extra:
default = "auto_now=True, "
else:
default = "auto_now_add=True, "
else:
if "::" in self.default:
default = f"default={self.default.split('::')[0]}, "
elif self.default.endswith("()"):
default = ""
else:
default = f"default={self.default}, "
if self.comment:
comment = f"description='{self.comment}', "
return {
"name": self.name,
"pk": pk,
"index": index,
"null": null,
"default": default,
"length": length,
"comment": comment,
}
class Inspect:
_table_template = "class {table}(Model):\n"
def __init__(self, conn: BaseDBAsyncClient, tables: Optional[List[str]] = None):
self.conn = conn
try:
self.database = conn.database
except AttributeError:
pass
self.tables = tables
@property
def field_map(self) -> dict:
raise NotImplementedError
async def inspect(self) -> str:
if not self.tables:
self.tables = await self.get_all_tables()
result = "from tortoise import Model, fields\n\n\n"
tables = []
for table in self.tables:
columns = await self.get_columns(table)
fields = []
model = self._table_template.format(table=table.title().replace("_", ""))
for column in columns:
field = self.field_map[column.data_type](**column.translate())
fields.append(" " + field)
tables.append(model + "\n".join(fields))
return result + "\n\n\n".join(tables)
async def get_columns(self, table: str) -> List[Column]:
raise NotImplementedError
async def get_all_tables(self) -> List[str]:
raise NotImplementedError
@classmethod
def decimal_field(cls, **kwargs) -> str:
return "{name} = fields.DecimalField({pk}{index}{length}{null}{default}{comment})".format(
**kwargs
)
@classmethod
def time_field(cls, **kwargs) -> str:
return "{name} = fields.TimeField({null}{default}{comment})".format(**kwargs)
@classmethod
def date_field(cls, **kwargs) -> str:
return "{name} = fields.DateField({null}{default}{comment})".format(**kwargs)
@classmethod
def float_field(cls, **kwargs) -> str:
return "{name} = fields.FloatField({null}{default}{comment})".format(**kwargs)
@classmethod
def datetime_field(cls, **kwargs) -> str:
return "{name} = fields.DatetimeField({null}{default}{comment})".format(**kwargs)
@classmethod
def text_field(cls, **kwargs) -> str:
return "{name} = fields.TextField({null}{default}{comment})".format(**kwargs)
@classmethod
def char_field(cls, **kwargs) -> str:
return "{name} = fields.CharField({pk}{index}{length}{null}{default}{comment})".format(
**kwargs
)
@classmethod
def int_field(cls, **kwargs) -> str:
return "{name} = fields.IntField({pk}{index}{comment})".format(**kwargs)
@classmethod
def smallint_field(cls, **kwargs) -> str:
return "{name} = fields.SmallIntField({pk}{index}{comment})".format(**kwargs)
@classmethod
def bigint_field(cls, **kwargs) -> str:
return "{name} = fields.BigIntField({pk}{index}{default}{comment})".format(**kwargs)
@classmethod
def bool_field(cls, **kwargs) -> str:
return "{name} = fields.BooleanField({null}{default}{comment})".format(**kwargs)
@classmethod
def uuid_field(cls, **kwargs) -> str:
return "{name} = fields.UUIDField({pk}{index}{default}{comment})".format(**kwargs)
@classmethod
def json_field(cls, **kwargs) -> str:
return "{name} = fields.JSONField({null}{default}{comment})".format(**kwargs)
@classmethod
def binary_field(cls, **kwargs) -> str:
return "{name} = fields.BinaryField({null}{default}{comment})".format(**kwargs)

69
aerich/inspectdb/mysql.py Normal file
View File

@@ -0,0 +1,69 @@
from typing import List
from aerich.inspectdb import Column, Inspect
class InspectMySQL(Inspect):
@property
def field_map(self) -> dict:
return {
"int": self.int_field,
"smallint": self.smallint_field,
"tinyint": self.bool_field,
"bigint": self.bigint_field,
"varchar": self.char_field,
"longtext": self.text_field,
"text": self.text_field,
"datetime": self.datetime_field,
"float": self.float_field,
"date": self.date_field,
"time": self.time_field,
"decimal": self.decimal_field,
"json": self.json_field,
"longblob": self.binary_field,
}
async def get_all_tables(self) -> List[str]:
sql = "select TABLE_NAME from information_schema.TABLES where TABLE_SCHEMA=%s"
ret = await self.conn.execute_query_dict(sql, [self.database])
return list(map(lambda x: x["TABLE_NAME"], ret))
async def get_columns(self, table: str) -> List[Column]:
columns = []
sql = """select c.*, s.NON_UNIQUE, s.INDEX_NAME
from information_schema.COLUMNS c
left join information_schema.STATISTICS s on c.TABLE_NAME = s.TABLE_NAME
and c.TABLE_SCHEMA = s.TABLE_SCHEMA
and c.COLUMN_NAME = s.COLUMN_NAME
where c.TABLE_SCHEMA = %s
and c.TABLE_NAME = %s"""
ret = await self.conn.execute_query_dict(sql, [self.database, table])
for row in ret:
non_unique = row["NON_UNIQUE"]
if non_unique is None:
unique = False
else:
unique = not non_unique
index_name = row["INDEX_NAME"]
if index_name is None:
index = False
else:
index = row["INDEX_NAME"] != "PRIMARY"
columns.append(
Column(
name=row["COLUMN_NAME"],
data_type=row["DATA_TYPE"],
null=row["IS_NULLABLE"] == "YES",
default=row["COLUMN_DEFAULT"],
pk=row["COLUMN_KEY"] == "PRI",
comment=row["COLUMN_COMMENT"],
unique=row["COLUMN_KEY"] == "UNI",
extra=row["EXTRA"],
unque=unique,
index=index,
length=row["CHARACTER_MAXIMUM_LENGTH"],
max_digits=row["NUMERIC_PRECISION"],
decimal_places=row["NUMERIC_SCALE"],
)
)
return columns

View File

@@ -0,0 +1,76 @@
from typing import List, Optional
from tortoise import BaseDBAsyncClient
from aerich.inspectdb import Column, Inspect
class InspectPostgres(Inspect):
def __init__(self, conn: BaseDBAsyncClient, tables: Optional[List[str]] = None):
super().__init__(conn, tables)
self.schema = self.conn.server_settings.get("schema") or "public"
@property
def field_map(self) -> dict:
return {
"int4": self.int_field,
"int8": self.int_field,
"smallint": self.smallint_field,
"varchar": self.char_field,
"text": self.text_field,
"bigint": self.bigint_field,
"timestamptz": self.datetime_field,
"float4": self.float_field,
"float8": self.float_field,
"date": self.date_field,
"time": self.time_field,
"decimal": self.decimal_field,
"numeric": self.decimal_field,
"uuid": self.uuid_field,
"jsonb": self.json_field,
"bytea": self.binary_field,
"bool": self.bool_field,
"timestamp": self.datetime_field,
}
async def get_all_tables(self) -> List[str]:
sql = "select TABLE_NAME from information_schema.TABLES where table_catalog=$1 and table_schema=$2"
ret = await self.conn.execute_query_dict(sql, [self.database, self.schema])
return list(map(lambda x: x["table_name"], ret))
async def get_columns(self, table: str) -> List[Column]:
columns = []
sql = f"""select c.column_name,
col_description('public.{table}'::regclass, ordinal_position) as column_comment,
t.constraint_type as column_key,
udt_name as data_type,
is_nullable,
column_default,
character_maximum_length,
numeric_precision,
numeric_scale
from information_schema.constraint_column_usage const
join information_schema.table_constraints t
using (table_catalog, table_schema, table_name, constraint_catalog, constraint_schema, constraint_name)
right join information_schema.columns c using (column_name, table_catalog, table_schema, table_name)
where c.table_catalog = $1
and c.table_name = $2
and c.table_schema = $3"""
ret = await self.conn.execute_query_dict(sql, [self.database, table, self.schema])
for row in ret:
columns.append(
Column(
name=row["column_name"],
data_type=row["data_type"],
null=row["is_nullable"] == "YES",
default=row["column_default"],
length=row["character_maximum_length"],
max_digits=row["numeric_precision"],
decimal_places=row["numeric_scale"],
comment=row["column_comment"],
pk=row["column_key"] == "PRIMARY KEY",
unique=False, # can't get this simply
index=False, # can't get this simply
)
)
return columns

View File

@@ -0,0 +1,61 @@
from typing import List
from aerich.inspectdb import Column, Inspect
class InspectSQLite(Inspect):
@property
def field_map(self) -> dict:
return {
"INTEGER": self.int_field,
"INT": self.bool_field,
"SMALLINT": self.smallint_field,
"VARCHAR": self.char_field,
"TEXT": self.text_field,
"TIMESTAMP": self.datetime_field,
"REAL": self.float_field,
"BIGINT": self.bigint_field,
"DATE": self.date_field,
"TIME": self.time_field,
"JSON": self.json_field,
"BLOB": self.binary_field,
}
async def get_columns(self, table: str) -> List[Column]:
columns = []
sql = f"PRAGMA table_info({table})"
ret = await self.conn.execute_query_dict(sql)
columns_index = await self._get_columns_index(table)
for row in ret:
try:
length = row["type"].split("(")[1].split(")")[0]
except IndexError:
length = None
columns.append(
Column(
name=row["name"],
data_type=row["type"].split("(")[0],
null=row["notnull"] == 0,
default=row["dflt_value"],
length=length,
pk=row["pk"] == 1,
unique=columns_index.get(row["name"]) == "unique",
index=columns_index.get(row["name"]) == "index",
)
)
return columns
async def _get_columns_index(self, table: str):
sql = f"PRAGMA index_list ({table})"
indexes = await self.conn.execute_query_dict(sql)
ret = {}
for index in indexes:
sql = f"PRAGMA index_info({index['name']})"
index_info = (await self.conn.execute_query_dict(sql))[0]
ret[index_info["name"]] = "unique" if index["unique"] else "index"
return ret
async def get_all_tables(self) -> List[str]:
sql = "select tbl_name from sqlite_master where type='table' and name!='sqlite_sequence'"
ret = await self.conn.execute_query_dict(sql)
return list(map(lambda x: x["tbl_name"], ret))

View File

@@ -1,21 +1,32 @@
import importlib
import os import os
from datetime import datetime from datetime import datetime
from hashlib import md5
from pathlib import Path from pathlib import Path
from typing import Dict, List, Optional, Tuple, Type from typing import Dict, List, Optional, Tuple, Type, Union
import click import click
from dictdiffer import diff from dictdiffer import diff
from tortoise import BaseDBAsyncClient, Model, Tortoise from tortoise import BaseDBAsyncClient, Model, Tortoise
from tortoise.exceptions import OperationalError from tortoise.exceptions import OperationalError
from tortoise.indexes import Index
from aerich.ddl import BaseDDL from aerich.ddl import BaseDDL
from aerich.models import MAX_VERSION_LENGTH, Aerich from aerich.models import MAX_VERSION_LENGTH, Aerich
from aerich.utils import ( from aerich.utils import get_app_connection, get_models_describe, is_default_function
get_app_connection,
get_models_describe, MIGRATE_TEMPLATE = """from tortoise import BaseDBAsyncClient
is_default_function,
write_version_file,
) async def upgrade(db: BaseDBAsyncClient) -> str:
return \"\"\"
{upgrade_sql}\"\"\"
async def downgrade(db: BaseDBAsyncClient) -> str:
return \"\"\"
{downgrade_sql}\"\"\"
"""
class Migrate: class Migrate:
@@ -32,14 +43,14 @@ class Migrate:
ddl: BaseDDL ddl: BaseDDL
_last_version_content: Optional[dict] = None _last_version_content: Optional[dict] = None
app: str app: str
migrate_location: str migrate_location: Path
dialect: str dialect: str
_db_version: Optional[str] = None _db_version: Optional[str] = None
@classmethod @classmethod
def get_all_version_files(cls) -> List[str]: def get_all_version_files(cls) -> List[str]:
return sorted( return sorted(
filter(lambda x: x.endswith("sql"), os.listdir(cls.migrate_location)), filter(lambda x: x.endswith("py"), os.listdir(cls.migrate_location)),
key=lambda x: int(x.split("_")[0]), key=lambda x: int(x.split("_")[0]),
) )
@@ -61,6 +72,11 @@ class Migrate:
ret = await connection.execute_query(sql) ret = await connection.execute_query(sql)
cls._db_version = ret[1][0].get("version") cls._db_version = ret[1][0].get("version")
@classmethod
async def load_ddl_class(cls):
ddl_dialect_module = importlib.import_module(f"aerich.ddl.{cls.dialect}")
return getattr(ddl_dialect_module, f"{cls.dialect.capitalize()}DDL")
@classmethod @classmethod
async def init(cls, config: dict, app: str, location: str): async def init(cls, config: dict, app: str, location: str):
await Tortoise.init(config=config) await Tortoise.init(config=config)
@@ -72,18 +88,8 @@ class Migrate:
connection = get_app_connection(config, app) connection = get_app_connection(config, app)
cls.dialect = connection.schema_generator.DIALECT cls.dialect = connection.schema_generator.DIALECT
if cls.dialect == "mysql": cls.ddl_class = await cls.load_ddl_class()
from aerich.ddl.mysql import MysqlDDL cls.ddl = cls.ddl_class(connection)
cls.ddl = MysqlDDL(connection)
elif cls.dialect == "sqlite":
from aerich.ddl.sqlite import SqliteDDL
cls.ddl = SqliteDDL(connection)
elif cls.dialect == "postgres":
from aerich.ddl.postgres import PostgresDDL
cls.ddl = PostgresDDL(connection)
await cls._get_db_version(connection) await cls._get_db_version(connection)
@classmethod @classmethod
@@ -99,24 +105,28 @@ class Migrate:
now = datetime.now().strftime("%Y%m%d%H%M%S").replace("/", "") now = datetime.now().strftime("%Y%m%d%H%M%S").replace("/", "")
last_version_num = await cls._get_last_version_num() last_version_num = await cls._get_last_version_num()
if last_version_num is None: if last_version_num is None:
return f"0_{now}_init.sql" return f"0_{now}_init.py"
version = f"{last_version_num + 1}_{now}_{name}.sql" version = f"{last_version_num + 1}_{now}_{name}.py"
if len(version) > MAX_VERSION_LENGTH: if len(version) > MAX_VERSION_LENGTH:
raise ValueError(f"Version name exceeds maximum length ({MAX_VERSION_LENGTH})") raise ValueError(f"Version name exceeds maximum length ({MAX_VERSION_LENGTH})")
return version return version
@classmethod @classmethod
async def _generate_diff_sql(cls, name): async def _generate_diff_py(cls, name):
version = await cls.generate_version(name) version = await cls.generate_version(name)
# delete if same version exists # delete if same version exists
for version_file in cls.get_all_version_files(): for version_file in cls.get_all_version_files():
if version_file.startswith(version.split("_")[0]): if version_file.startswith(version.split("_")[0]):
os.unlink(Path(cls.migrate_location, version_file)) os.unlink(Path(cls.migrate_location, version_file))
content = {
"upgrade": list(dict.fromkeys(cls.upgrade_operators)), version_file = Path(cls.migrate_location, version)
"downgrade": list(dict.fromkeys(cls.downgrade_operators)), content = MIGRATE_TEMPLATE.format(
} upgrade_sql=";\n ".join(cls.upgrade_operators) + ";",
write_version_file(Path(cls.migrate_location, version), content) downgrade_sql=";\n ".join(cls.downgrade_operators) + ";",
)
with open(version_file, "w", encoding="utf-8") as f:
f.write(content)
return version return version
@classmethod @classmethod
@@ -135,28 +145,40 @@ class Migrate:
if not cls.upgrade_operators: if not cls.upgrade_operators:
return "" return ""
return await cls._generate_diff_sql(name) return await cls._generate_diff_py(name)
@classmethod @classmethod
def _add_operator(cls, operator: str, upgrade=True, fk_m2m=False): def _add_operator(cls, operator: str, upgrade=True, fk_m2m_index=False):
""" """
add operator,differentiate fk because fk is order limit add operator,differentiate fk because fk is order limit
:param operator: :param operator:
:param upgrade: :param upgrade:
:param fk_m2m: :param fk_m2m_index:
:return: :return:
""" """
if upgrade: if upgrade:
if fk_m2m: if fk_m2m_index:
cls._upgrade_fk_m2m_index_operators.append(operator) cls._upgrade_fk_m2m_index_operators.append(operator)
else: else:
cls.upgrade_operators.append(operator) cls.upgrade_operators.append(operator)
else: else:
if fk_m2m: if fk_m2m_index:
cls._downgrade_fk_m2m_index_operators.append(operator) cls._downgrade_fk_m2m_index_operators.append(operator)
else: else:
cls.downgrade_operators.append(operator) cls.downgrade_operators.append(operator)
@classmethod
def _handle_indexes(cls, model: Type[Model], indexes: List[Union[Tuple[str], Index]]):
ret = []
for index in indexes:
if isinstance(index, Index):
index.__hash__ = lambda self: md5( # nosec: B303
self.index_name(cls.ddl.schema_generator, model).encode()
+ self.__class__.__name__.encode()
).hexdigest()
ret.append(index)
return ret
@classmethod @classmethod
def diff_models(cls, old_models: Dict[str, dict], new_models: Dict[str, dict], upgrade=True): def diff_models(cls, old_models: Dict[str, dict], new_models: Dict[str, dict], upgrade=True):
""" """
@@ -192,7 +214,18 @@ class Migrate:
new_unique_together = set( new_unique_together = set(
map(lambda x: tuple(x), new_model_describe.get("unique_together")) map(lambda x: tuple(x), new_model_describe.get("unique_together"))
) )
old_indexes = set(
map(
lambda x: x if isinstance(x, Index) else tuple(x),
cls._handle_indexes(model, old_model_describe.get("indexes", [])),
)
)
new_indexes = set(
map(
lambda x: x if isinstance(x, Index) else tuple(x),
cls._handle_indexes(model, new_model_describe.get("indexes", [])),
)
)
old_pk_field = old_model_describe.get("pk_field") old_pk_field = old_model_describe.get("pk_field")
new_pk_field = new_model_describe.get("pk_field") new_pk_field = new_model_describe.get("pk_field")
# pk field # pk field
@@ -205,6 +238,8 @@ class Migrate:
old_m2m_fields = old_model_describe.get("m2m_fields") old_m2m_fields = old_model_describe.get("m2m_fields")
new_m2m_fields = new_model_describe.get("m2m_fields") new_m2m_fields = new_model_describe.get("m2m_fields")
for action, option, change in diff(old_m2m_fields, new_m2m_fields): for action, option, change in diff(old_m2m_fields, new_m2m_fields):
if change[0][0] == "db_constraint":
continue
table = change[0][1].get("through") table = change[0][1].get("through")
if action == "add": if action == "add":
add = False add = False
@@ -222,7 +257,7 @@ class Migrate:
new_models.get(change[0][1].get("model_name")), new_models.get(change[0][1].get("model_name")),
), ),
upgrade, upgrade,
fk_m2m=True, fk_m2m_index=True,
) )
elif action == "remove": elif action == "remove":
add = False add = False
@@ -233,14 +268,19 @@ class Migrate:
cls._downgrade_m2m.append(table) cls._downgrade_m2m.append(table)
add = True add = True
if add: if add:
cls._add_operator(cls.drop_m2m(table), upgrade, fk_m2m=True) cls._add_operator(cls.drop_m2m(table), upgrade, True)
# add unique_together # add unique_together
for index in new_unique_together.difference(old_unique_together): for index in new_unique_together.difference(old_unique_together):
cls._add_operator(cls._add_index(model, index, True), upgrade, True) cls._add_operator(cls._add_index(model, index, True), upgrade, True)
# remove unique_together # remove unique_together
for index in old_unique_together.difference(new_unique_together): for index in old_unique_together.difference(new_unique_together):
cls._add_operator(cls._drop_index(model, index, True), upgrade, True) cls._add_operator(cls._drop_index(model, index, True), upgrade, True)
# add indexes
for index in new_indexes.difference(old_indexes):
cls._add_operator(cls._add_index(model, index, False), upgrade, True)
# remove indexes
for index in old_indexes.difference(new_indexes):
cls._add_operator(cls._drop_index(model, index, False), upgrade, True)
old_data_fields = old_model_describe.get("data_fields") old_data_fields = old_model_describe.get("data_fields")
new_data_fields = new_model_describe.get("data_fields") new_data_fields = new_model_describe.get("data_fields")
@@ -262,7 +302,11 @@ class Migrate:
# rename field # rename field
if ( if (
changes[0] changes[0]
== ("change", "name", (old_data_field_name, new_data_field_name),) == (
"change",
"name",
(old_data_field_name, new_data_field_name),
)
and changes[1] and changes[1]
== ( == (
"change", "change",
@@ -293,36 +337,62 @@ class Migrate:
and cls._db_version.startswith("5.") and cls._db_version.startswith("5.")
): ):
cls._add_operator( cls._add_operator(
cls._modify_field(model, new_data_field), upgrade, cls._change_field(
model, old_data_field, new_data_field
),
upgrade,
) )
else: else:
cls._add_operator( cls._add_operator(
cls._rename_field(model, *changes[1][2]), upgrade, cls._rename_field(model, *changes[1][2]),
upgrade,
) )
if not is_rename: if not is_rename:
cls._add_operator( cls._add_operator(
cls._add_field(model, new_data_field,), upgrade, cls._add_field(
model,
new_data_field,
),
upgrade,
) )
if new_data_field["indexed"]:
cls._add_operator(
cls._add_index(
model, {new_data_field["db_column"]}, new_data_field["unique"]
),
upgrade,
True,
)
# remove fields # remove fields
for old_data_field_name in set(old_data_fields_name).difference( for old_data_field_name in set(old_data_fields_name).difference(
set(new_data_fields_name) set(new_data_fields_name)
): ):
# don't remove field if is rename # don't remove field if is renamed
if (upgrade and old_data_field_name in cls._rename_old) or ( if (upgrade and old_data_field_name in cls._rename_old) or (
not upgrade and old_data_field_name in cls._rename_new not upgrade and old_data_field_name in cls._rename_new
): ):
continue continue
old_data_field = next(
filter(lambda x: x.get("name") == old_data_field_name, old_data_fields)
)
db_column = old_data_field["db_column"]
cls._add_operator( cls._add_operator(
cls._remove_field( cls._remove_field(
model, model,
next( db_column,
filter(
lambda x: x.get("name") == old_data_field_name, old_data_fields
)
).get("db_column"),
), ),
upgrade, upgrade,
) )
if old_data_field["indexed"]:
cls._add_operator(
cls._drop_index(
model,
{db_column},
),
upgrade,
True,
)
old_fk_fields = old_model_describe.get("fk_fields") old_fk_fields = old_model_describe.get("fk_fields")
new_fk_fields = new_model_describe.get("fk_fields") new_fk_fields = new_model_describe.get("fk_fields")
@@ -336,11 +406,14 @@ class Migrate:
fk_field = next( fk_field = next(
filter(lambda x: x.get("name") == new_fk_field_name, new_fk_fields) filter(lambda x: x.get("name") == new_fk_field_name, new_fk_fields)
) )
cls._add_operator( if fk_field.get("db_constraint"):
cls._add_fk(model, fk_field, new_models.get(fk_field.get("python_type"))), cls._add_operator(
upgrade, cls._add_fk(
fk_m2m=True, model, fk_field, new_models.get(fk_field.get("python_type"))
) ),
upgrade,
fk_m2m_index=True,
)
# drop fk # drop fk
for old_fk_field_name in set(old_fk_fields_name).difference( for old_fk_field_name in set(old_fk_fields_name).difference(
set(new_fk_fields_name) set(new_fk_fields_name)
@@ -348,13 +421,14 @@ class Migrate:
old_fk_field = next( old_fk_field = next(
filter(lambda x: x.get("name") == old_fk_field_name, old_fk_fields) filter(lambda x: x.get("name") == old_fk_field_name, old_fk_fields)
) )
cls._add_operator( if old_fk_field.get("db_constraint"):
cls._drop_fk( cls._add_operator(
model, old_fk_field, old_models.get(old_fk_field.get("python_type")) cls._drop_fk(
), model, old_fk_field, old_models.get(old_fk_field.get("python_type"))
upgrade, ),
fk_m2m=True, upgrade,
) fk_m2m_index=True,
)
# change fields # change fields
for field_name in set(new_data_fields_name).intersection(set(old_data_fields_name)): for field_name in set(new_data_fields_name).intersection(set(old_data_fields_name)):
old_data_field = next( old_data_field = next(
@@ -378,8 +452,14 @@ class Migrate:
cls._drop_index(model, (field_name,), unique), upgrade, True cls._drop_index(model, (field_name,), unique), upgrade, True
) )
elif option == "db_field_types.": elif option == "db_field_types.":
# continue since repeated with others if new_data_field.get("field_type") == "DecimalField":
continue # modify column
cls._add_operator(
cls._modify_field(model, new_data_field),
upgrade,
)
else:
continue
elif option == "default": elif option == "default":
if not ( if not (
is_default_function(old_new[0]) or is_default_function(old_new[1]) is_default_function(old_new[0]) or is_default_function(old_new[1])
@@ -397,7 +477,8 @@ class Migrate:
else: else:
# modify column # modify column
cls._add_operator( cls._add_operator(
cls._modify_field(model, new_data_field), upgrade, cls._modify_field(model, new_data_field),
upgrade,
) )
for old_model in old_models: for old_model in old_models:
@@ -428,19 +509,28 @@ class Migrate:
def _resolve_fk_fields_name(cls, model: Type[Model], fields_name: Tuple[str]): def _resolve_fk_fields_name(cls, model: Type[Model], fields_name: Tuple[str]):
ret = [] ret = []
for field_name in fields_name: for field_name in fields_name:
if field_name in model._meta.fk_fields: field = model._meta.fields_map[field_name]
if field.source_field:
ret.append(field.source_field)
elif field_name in model._meta.fk_fields:
ret.append(field_name + "_id") ret.append(field_name + "_id")
else: else:
ret.append(field_name) ret.append(field_name)
return ret return ret
@classmethod @classmethod
def _drop_index(cls, model: Type[Model], fields_name: Tuple[str], unique=False): def _drop_index(cls, model: Type[Model], fields_name: Union[Tuple[str], Index], unique=False):
if isinstance(fields_name, Index):
return cls.ddl.drop_index_by_name(
model, fields_name.index_name(cls.ddl.schema_generator, model)
)
fields_name = cls._resolve_fk_fields_name(model, fields_name) fields_name = cls._resolve_fk_fields_name(model, fields_name)
return cls.ddl.drop_index(model, fields_name, unique) return cls.ddl.drop_index(model, fields_name, unique)
@classmethod @classmethod
def _add_index(cls, model: Type[Model], fields_name: Tuple[str], unique=False): def _add_index(cls, model: Type[Model], fields_name: Union[Tuple[str], Index], unique=False):
if isinstance(fields_name, Index):
return fields_name.get_sql(cls.ddl.schema_generator, model, False)
fields_name = cls._resolve_fk_fields_name(model, fields_name) fields_name = cls._resolve_fk_fields_name(model, fields_name)
return cls.ddl.add_index(model, fields_name, unique) return cls.ddl.add_index(model, fields_name, unique)

View File

@@ -1,12 +1,15 @@
from tortoise import Model, fields from tortoise import Model, fields
from aerich.coder import decoder, encoder
MAX_VERSION_LENGTH = 255 MAX_VERSION_LENGTH = 255
MAX_APP_LENGTH = 100
class Aerich(Model): class Aerich(Model):
version = fields.CharField(max_length=MAX_VERSION_LENGTH) version = fields.CharField(max_length=MAX_VERSION_LENGTH)
app = fields.CharField(max_length=20) app = fields.CharField(max_length=MAX_APP_LENGTH)
content = fields.JSONField() content = fields.JSONField(encoder=encoder, decoder=decoder)
class Meta: class Meta:
ordering = ["-id"] ordering = ["-id"]

View File

@@ -1,4 +1,4 @@
import importlib import importlib.util
import os import os
import re import re
import sys import sys
@@ -11,7 +11,7 @@ from tortoise import BaseDBAsyncClient, Tortoise
def add_src_path(path: str) -> str: def add_src_path(path: str) -> str:
""" """
add a folder to the paths so we can import from there add a folder to the paths, so we can import from there
:param path: path to add :param path: path to add
:return: absolute path :return: absolute path
""" """
@@ -36,7 +36,8 @@ def get_app_connection_name(config, app_name: str) -> str:
if app: if app:
return app.get("default_connection", "default") return app.get("default_connection", "default")
raise BadOptionUsage( raise BadOptionUsage(
option_name="--app", message=f'Can\'t get app named "{app_name}"', option_name="--app",
message=f'Can\'t get app named "{app_name}"',
) )
@@ -76,58 +77,6 @@ def get_tortoise_config(ctx: Context, tortoise_orm: str) -> dict:
return config return config
_UPGRADE = "-- upgrade --\n"
_DOWNGRADE = "-- downgrade --\n"
def get_version_content_from_file(version_file: str) -> Dict:
"""
get version content
:param version_file:
:return:
"""
with open(version_file, "r", encoding="utf-8") as f:
content = f.read()
first = content.index(_UPGRADE)
try:
second = content.index(_DOWNGRADE)
except ValueError:
second = len(content) - 1
upgrade_content = content[first + len(_UPGRADE) : second].strip() # noqa:E203
downgrade_content = content[second + len(_DOWNGRADE) :].strip() # noqa:E203
ret = {
"upgrade": list(filter(lambda x: x or False, upgrade_content.split(";\n"))),
"downgrade": list(filter(lambda x: x or False, downgrade_content.split(";\n"))),
}
return ret
def write_version_file(version_file: Path, content: Dict):
"""
write version file
:param version_file:
:param content:
:return:
"""
with open(version_file, "w", encoding="utf-8") as f:
f.write(_UPGRADE)
upgrade = content.get("upgrade")
if len(upgrade) > 1:
f.write(";\n".join(upgrade) + ";\n")
else:
f.write(f"{upgrade[0]}")
if not upgrade[0].endswith(";"):
f.write(";")
f.write("\n")
downgrade = content.get("downgrade")
if downgrade:
f.write(_DOWNGRADE)
if len(downgrade) > 1:
f.write(";\n".join(downgrade) + ";\n")
else:
f.write(f"{downgrade[0]};\n")
def get_models_describe(app: str) -> Dict: def get_models_describe(app: str) -> Dict:
""" """
get app models describe get app models describe
@@ -143,3 +92,11 @@ def get_models_describe(app: str) -> Dict:
def is_default_function(string: str): def is_default_function(string: str):
return re.match(r"^<function.+>$", str(string or "")) return re.match(r"^<function.+>$", str(string or ""))
def import_py_file(file: Path):
module_name, file_ext = os.path.splitext(os.path.split(file)[-1])
spec = importlib.util.spec_from_file_location(module_name, file)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module

1
aerich/version.py Normal file
View File

@@ -0,0 +1 @@
__version__ = "0.7.1"

913
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "aerich" name = "aerich"
version = "0.5.4" version = "0.7.1"
description = "A database migrations tool for Tortoise ORM." description = "A database migrations tool for Tortoise ORM."
authors = ["long2ice <long2ice@gmail.com>"] authors = ["long2ice <long2ice@gmail.com>"]
license = "Apache-2.0" license = "Apache-2.0"
@@ -18,27 +18,32 @@ include = ["CHANGELOG.md", "LICENSE", "README.md"]
python = "^3.7" python = "^3.7"
tortoise-orm = "*" tortoise-orm = "*"
click = "*" click = "*"
pydantic = "*"
aiomysql = { version = "*", optional = true }
asyncpg = { version = "*", optional = true } asyncpg = { version = "*", optional = true }
ddlparse = "*" asyncmy = { version = "*", optional = true }
pydantic = "*"
dictdiffer = "*" dictdiffer = "*"
tomlkit = "*"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
flake8 = "*" flake8 = "*"
isort = "*" isort = "*"
black = "19.10b0" black = "*"
pytest = "*" pytest = "*"
pytest-xdist = "*" pytest-xdist = "*"
pytest-asyncio = "*" pytest-asyncio = "*"
bandit = "*" bandit = "*"
pytest-mock = "*" pytest-mock = "*"
cryptography = "*" cryptography = "*"
pyproject-flake8 = "*"
[tool.poetry.extras] [tool.poetry.extras]
asyncmy = ["asyncmy"] asyncmy = ["asyncmy"]
asyncpg = ["asyncpg"] asyncpg = ["asyncpg"]
aiomysql = ["aiomysql"]
[tool.aerich]
tortoise_orm = "conftest.tortoise_orm"
location = "./migrations"
src_folder = "./."
[build-system] [build-system]
requires = ["poetry>=0.12"] requires = ["poetry>=0.12"]
@@ -46,3 +51,17 @@ build-backend = "poetry.masonry.api"
[tool.poetry.scripts] [tool.poetry.scripts]
aerich = "aerich.cli:main" aerich = "aerich.cli:main"
[tool.black]
line-length = 100
target-version = ['py36', 'py37', 'py38', 'py39']
[tool.pytest.ini_options]
asyncio_mode = 'auto'
[tool.mypy]
pretty = true
ignore_missing_imports = true
[tool.flake8]
ignore = 'E501,W503,E203'

View File

@@ -1,2 +0,0 @@
[flake8]
ignore = E501,W503

View File

@@ -29,6 +29,7 @@ class User(Model):
is_active = fields.BooleanField(default=True, description="Is Active") is_active = fields.BooleanField(default=True, description="Is Active")
is_superuser = fields.BooleanField(default=False, description="Is SuperUser") is_superuser = fields.BooleanField(default=False, description="Is SuperUser")
intro = fields.TextField(default="") intro = fields.TextField(default="")
longitude = fields.DecimalField(max_digits=10, decimal_places=8)
class Email(Model): class Email(Model):
@@ -56,13 +57,16 @@ class Product(Model):
view_num = fields.IntField(description="View Num", default=0) view_num = fields.IntField(description="View Num", default=0)
sort = fields.IntField() sort = fields.IntField()
is_reviewed = fields.BooleanField(description="Is Reviewed") is_reviewed = fields.BooleanField(description="Is Reviewed")
type = fields.IntEnumField(ProductType, description="Product Type") type = fields.IntEnumField(
ProductType, description="Product Type", source_field="type_db_alias"
)
pic = fields.CharField(max_length=200) pic = fields.CharField(max_length=200)
body = fields.TextField() body = fields.TextField()
created_at = fields.DatetimeField(auto_now_add=True) created_at = fields.DatetimeField(auto_now_add=True)
class Meta: class Meta:
unique_together = (("name", "type"),) unique_together = (("name", "type"),)
indexes = (("name", "type"),)
class Config(Model): class Config(Model):

View File

@@ -50,7 +50,9 @@ class Product(Model):
view_num = fields.IntField(description="View Num") view_num = fields.IntField(description="View Num")
sort = fields.IntField() sort = fields.IntField()
is_reviewed = fields.BooleanField(description="Is Reviewed") is_reviewed = fields.BooleanField(description="Is Reviewed")
type = fields.IntEnumField(ProductType, description="Product Type") type = fields.IntEnumField(
ProductType, description="Product Type", source_field="type_db_alias"
)
image = fields.CharField(max_length=200) image = fields.CharField(max_length=200)
body = fields.TextField() body = fields.TextField()
created_at = fields.DatetimeField(auto_now_add=True) created_at = fields.DatetimeField(auto_now_add=True)

View File

@@ -29,6 +29,7 @@ class User(Model):
is_superuser = fields.BooleanField(default=False, description="Is SuperUser") is_superuser = fields.BooleanField(default=False, description="Is SuperUser")
avatar = fields.CharField(max_length=200, default="") avatar = fields.CharField(max_length=200, default="")
intro = fields.TextField(default="") intro = fields.TextField(default="")
longitude = fields.DecimalField(max_digits=12, decimal_places=9)
class Email(Model): class Email(Model):
@@ -50,7 +51,9 @@ class Product(Model):
view_num = fields.IntField(description="View Num") view_num = fields.IntField(description="View Num")
sort = fields.IntField() sort = fields.IntField()
is_reviewed = fields.BooleanField(description="Is Reviewed") is_reviewed = fields.BooleanField(description="Is Reviewed")
type = fields.IntEnumField(ProductType, description="Product Type") type = fields.IntEnumField(
ProductType, description="Product Type", source_field="type_db_alias"
)
image = fields.CharField(max_length=200) image = fields.CharField(max_length=200)
body = fields.TextField() body = fields.TextField()
created_at = fields.DatetimeField(auto_now_add=True) created_at = fields.DatetimeField(auto_now_add=True)

View File

@@ -72,18 +72,16 @@ def test_modify_column():
ret1 = Migrate.ddl.modify_column(User, User._meta.fields_map.get("is_active").describe(False)) ret1 = Migrate.ddl.modify_column(User, User._meta.fields_map.get("is_active").describe(False))
if isinstance(Migrate.ddl, MysqlDDL): if isinstance(Migrate.ddl, MysqlDDL):
assert ret0 == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)" assert ret0 == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)"
assert (
ret1
== "ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1"
)
elif isinstance(Migrate.ddl, PostgresDDL): elif isinstance(Migrate.ddl, PostgresDDL):
assert ( assert (
ret0 ret0
== 'ALTER TABLE "category" ALTER COLUMN "name" TYPE VARCHAR(200) USING "name"::VARCHAR(200)' == 'ALTER TABLE "category" ALTER COLUMN "name" TYPE VARCHAR(200) USING "name"::VARCHAR(200)'
) )
if isinstance(Migrate.ddl, MysqlDDL):
assert (
ret1
== "ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1"
)
elif isinstance(Migrate.ddl, PostgresDDL):
assert ( assert (
ret1 == 'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL' ret1 == 'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL'
) )

View File

@@ -17,6 +17,7 @@ old_models_describe = {
"description": None, "description": None,
"docstring": None, "docstring": None,
"unique_together": [], "unique_together": [],
"indexes": [],
"pk_field": { "pk_field": {
"name": "id", "name": "id",
"field_type": "IntField", "field_type": "IntField",
@@ -151,6 +152,7 @@ old_models_describe = {
"description": None, "description": None,
"docstring": None, "docstring": None,
"unique_together": [], "unique_together": [],
"indexes": [],
"pk_field": { "pk_field": {
"name": "id", "name": "id",
"field_type": "IntField", "field_type": "IntField",
@@ -242,6 +244,7 @@ old_models_describe = {
"description": None, "description": None,
"docstring": None, "docstring": None,
"unique_together": [], "unique_together": [],
"indexes": [],
"pk_field": { "pk_field": {
"name": "id", "name": "id",
"field_type": "IntField", "field_type": "IntField",
@@ -334,6 +337,7 @@ old_models_describe = {
"description": None, "description": None,
"docstring": None, "docstring": None,
"unique_together": [], "unique_together": [],
"indexes": [],
"pk_field": { "pk_field": {
"name": "id", "name": "id",
"field_type": "IntField", "field_type": "IntField",
@@ -413,7 +417,7 @@ old_models_describe = {
{ {
"name": "type", "name": "type",
"field_type": "IntEnumFieldInstance", "field_type": "IntEnumFieldInstance",
"db_column": "type", "db_column": "type_db_alias",
"python_type": "int", "python_type": "int",
"generated": False, "generated": False,
"nullable": False, "nullable": False,
@@ -512,6 +516,7 @@ old_models_describe = {
"description": None, "description": None,
"docstring": None, "docstring": None,
"unique_together": [], "unique_together": [],
"indexes": [],
"pk_field": { "pk_field": {
"name": "id", "name": "id",
"field_type": "IntField", "field_type": "IntField",
@@ -639,6 +644,21 @@ old_models_describe = {
"constraints": {}, "constraints": {},
"db_field_types": {"": "TEXT", "mysql": "LONGTEXT"}, "db_field_types": {"": "TEXT", "mysql": "LONGTEXT"},
}, },
{
"name": "longitude",
"unique": False,
"default": None,
"indexed": False,
"nullable": False,
"db_column": "longitude",
"docstring": None,
"generated": False,
"field_type": "DecimalField",
"constraints": {},
"description": None,
"python_type": "decimal.Decimal",
"db_field_types": {"": "DECIMAL(12,9)", "sqlite": "VARCHAR(40)"},
},
], ],
"fk_fields": [], "fk_fields": [],
"backward_fk_fields": [ "backward_fk_fields": [
@@ -681,6 +701,7 @@ old_models_describe = {
"description": None, "description": None,
"docstring": None, "docstring": None,
"unique_together": [], "unique_together": [],
"indexes": [],
"pk_field": { "pk_field": {
"name": "id", "name": "id",
"field_type": "IntField", "field_type": "IntField",
@@ -781,104 +802,148 @@ def test_migrate(mocker: MockerFixture):
Migrate.diff_models(models_describe, old_models_describe, False) Migrate.diff_models(models_describe, old_models_describe, False)
Migrate._merge_operators() Migrate._merge_operators()
if isinstance(Migrate.ddl, MysqlDDL): if isinstance(Migrate.ddl, MysqlDDL):
assert sorted(Migrate.upgrade_operators) == sorted( expected_upgrade_operators = {
[ "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)",
"ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)", "ALTER TABLE `category` MODIFY COLUMN `slug` VARCHAR(100) NOT NULL",
"ALTER TABLE `category` MODIFY COLUMN `slug` VARCHAR(100) NOT NULL", "ALTER TABLE `config` ADD `user_id` INT NOT NULL COMMENT 'User'",
"ALTER TABLE `config` ADD `user_id` INT NOT NULL COMMENT 'User'", "ALTER TABLE `config` ADD CONSTRAINT `fk_config_user_17daa970` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE",
"ALTER TABLE `config` ADD CONSTRAINT `fk_config_user_17daa970` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE", "ALTER TABLE `config` ALTER COLUMN `status` DROP DEFAULT",
"ALTER TABLE `config` ALTER COLUMN `status` DROP DEFAULT", "ALTER TABLE `config` MODIFY COLUMN `value` JSON NOT NULL",
"ALTER TABLE `email` ADD `address` VARCHAR(200) NOT NULL", "ALTER TABLE `email` ADD `address` VARCHAR(200) NOT NULL",
"ALTER TABLE `email` DROP COLUMN `user_id`", "ALTER TABLE `email` DROP COLUMN `user_id`",
"ALTER TABLE `configs` RENAME TO `config`", "ALTER TABLE `configs` RENAME TO `config`",
"ALTER TABLE `product` RENAME COLUMN `image` TO `pic`", "ALTER TABLE `product` RENAME COLUMN `image` TO `pic`",
"ALTER TABLE `email` RENAME COLUMN `id` TO `email_id`", "ALTER TABLE `email` RENAME COLUMN `id` TO `email_id`",
"ALTER TABLE `email` DROP FOREIGN KEY `fk_email_user_5b58673d`", "ALTER TABLE `product` ADD INDEX `idx_product_name_869427` (`name`, `type_db_alias`)",
"ALTER TABLE `email` ADD INDEX `idx_email_email_4a1a33` (`email`)", "ALTER TABLE `email` ADD INDEX `idx_email_email_4a1a33` (`email`)",
"ALTER TABLE `product` ADD UNIQUE INDEX `uid_product_name_f14935` (`name`, `type`)", "ALTER TABLE `product` ADD UNIQUE INDEX `uid_product_name_869427` (`name`, `type_db_alias`)",
"ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0", "ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0",
"ALTER TABLE `user` DROP COLUMN `avatar`", "ALTER TABLE `product` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
"ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(100) NOT NULL", "ALTER TABLE `product` MODIFY COLUMN `is_reviewed` BOOL NOT NULL COMMENT 'Is Reviewed'",
"CREATE TABLE IF NOT EXISTS `newmodel` (\n `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,\n `name` VARCHAR(50) NOT NULL\n) CHARACTER SET utf8mb4;", "ALTER TABLE `user` DROP COLUMN `avatar`",
"ALTER TABLE `user` ADD UNIQUE INDEX `uid_user_usernam_9987ab` (`username`)", "ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(100) NOT NULL",
"CREATE TABLE `email_user` (`email_id` INT NOT NULL REFERENCES `email` (`email_id`) ON DELETE CASCADE,`user_id` INT NOT NULL REFERENCES `user` (`id`) ON DELETE CASCADE) CHARACTER SET utf8mb4", "ALTER TABLE `user` MODIFY COLUMN `intro` LONGTEXT NOT NULL",
] "ALTER TABLE `user` MODIFY COLUMN `last_login` DATETIME(6) NOT NULL COMMENT 'Last Login'",
) "ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1",
"ALTER TABLE `user` MODIFY COLUMN `is_superuser` BOOL NOT NULL COMMENT 'Is SuperUser' DEFAULT 0",
"ALTER TABLE `user` MODIFY COLUMN `longitude` DECIMAL(10,8) NOT NULL",
"ALTER TABLE `user` ADD UNIQUE INDEX `uid_user_usernam_9987ab` (`username`)",
"CREATE TABLE `email_user` (\n `email_id` INT NOT NULL REFERENCES `email` (`email_id`) ON DELETE CASCADE,\n `user_id` INT NOT NULL REFERENCES `user` (`id`) ON DELETE CASCADE\n) CHARACTER SET utf8mb4",
"CREATE TABLE IF NOT EXISTS `newmodel` (\n `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,\n `name` VARCHAR(50) NOT NULL\n) CHARACTER SET utf8mb4;",
"ALTER TABLE `category` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
"ALTER TABLE `product` MODIFY COLUMN `body` LONGTEXT NOT NULL",
"ALTER TABLE `email` MODIFY COLUMN `is_primary` BOOL NOT NULL DEFAULT 0",
}
expected_downgrade_operators = {
"ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200) NOT NULL",
"ALTER TABLE `category` MODIFY COLUMN `slug` VARCHAR(200) NOT NULL",
"ALTER TABLE `config` DROP COLUMN `user_id`",
"ALTER TABLE `config` DROP FOREIGN KEY `fk_config_user_17daa970`",
"ALTER TABLE `config` ALTER COLUMN `status` SET DEFAULT 1",
"ALTER TABLE `email` ADD `user_id` INT NOT NULL",
"ALTER TABLE `email` DROP COLUMN `address`",
"ALTER TABLE `config` RENAME TO `configs`",
"ALTER TABLE `product` RENAME COLUMN `pic` TO `image`",
"ALTER TABLE `email` RENAME COLUMN `email_id` TO `id`",
"ALTER TABLE `product` DROP INDEX `idx_product_name_869427`",
"ALTER TABLE `email` DROP INDEX `idx_email_email_4a1a33`",
"ALTER TABLE `product` DROP INDEX `uid_product_name_869427`",
"ALTER TABLE `product` ALTER COLUMN `view_num` DROP DEFAULT",
"ALTER TABLE `user` ADD `avatar` VARCHAR(200) NOT NULL DEFAULT ''",
"ALTER TABLE `user` DROP INDEX `idx_user_usernam_9987ab`",
"ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(200) NOT NULL",
"DROP TABLE IF EXISTS `email_user`",
"DROP TABLE IF EXISTS `newmodel`",
"ALTER TABLE `user` MODIFY COLUMN `intro` LONGTEXT NOT NULL",
"ALTER TABLE `config` MODIFY COLUMN `value` TEXT NOT NULL",
"ALTER TABLE `category` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
"ALTER TABLE `product` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
"ALTER TABLE `product` MODIFY COLUMN `is_reviewed` BOOL NOT NULL COMMENT 'Is Reviewed'",
"ALTER TABLE `user` MODIFY COLUMN `last_login` DATETIME(6) NOT NULL COMMENT 'Last Login'",
"ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1",
"ALTER TABLE `user` MODIFY COLUMN `is_superuser` BOOL NOT NULL COMMENT 'Is SuperUser' DEFAULT 0",
"ALTER TABLE `user` MODIFY COLUMN `longitude` DECIMAL(12,9) NOT NULL",
"ALTER TABLE `product` MODIFY COLUMN `body` LONGTEXT NOT NULL",
"ALTER TABLE `email` MODIFY COLUMN `is_primary` BOOL NOT NULL DEFAULT 0",
}
assert not set(Migrate.upgrade_operators).symmetric_difference(expected_upgrade_operators)
assert sorted(Migrate.downgrade_operators) == sorted( assert not set(Migrate.downgrade_operators).symmetric_difference(
[ expected_downgrade_operators
"ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200) NOT NULL",
"ALTER TABLE `category` MODIFY COLUMN `slug` VARCHAR(200) NOT NULL",
"ALTER TABLE `config` DROP COLUMN `user_id`",
"ALTER TABLE `config` DROP FOREIGN KEY `fk_config_user_17daa970`",
"ALTER TABLE `config` ALTER COLUMN `status` SET DEFAULT 1",
"ALTER TABLE `email` ADD `user_id` INT NOT NULL",
"ALTER TABLE `email` DROP COLUMN `address`",
"ALTER TABLE `config` RENAME TO `configs`",
"ALTER TABLE `product` RENAME COLUMN `pic` TO `image`",
"ALTER TABLE `email` RENAME COLUMN `email_id` TO `id`",
"ALTER TABLE `email` ADD CONSTRAINT `fk_email_user_5b58673d` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE",
"ALTER TABLE `email` DROP INDEX `idx_email_email_4a1a33`",
"ALTER TABLE `product` DROP INDEX `uid_product_name_f14935`",
"ALTER TABLE `product` ALTER COLUMN `view_num` DROP DEFAULT",
"ALTER TABLE `user` ADD `avatar` VARCHAR(200) NOT NULL DEFAULT ''",
"ALTER TABLE `user` DROP INDEX `idx_user_usernam_9987ab`",
"ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(200) NOT NULL",
"DROP TABLE IF EXISTS `email_user`",
"DROP TABLE IF EXISTS `newmodel`",
]
) )
elif isinstance(Migrate.ddl, PostgresDDL): elif isinstance(Migrate.ddl, PostgresDDL):
assert sorted(Migrate.upgrade_operators) == sorted( expected_upgrade_operators = {
[ 'ALTER TABLE "category" ALTER COLUMN "name" DROP NOT NULL',
'ALTER TABLE "category" ALTER COLUMN "name" DROP NOT NULL', 'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(100) USING "slug"::VARCHAR(100)',
'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(100) USING "slug"::VARCHAR(100)', 'ALTER TABLE "category" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
'ALTER TABLE "config" ADD "user_id" INT NOT NULL', 'ALTER TABLE "config" ADD "user_id" INT NOT NULL',
'ALTER TABLE "config" ADD CONSTRAINT "fk_config_user_17daa970" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE', 'ALTER TABLE "config" ADD CONSTRAINT "fk_config_user_17daa970" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE',
'ALTER TABLE "config" ALTER COLUMN "status" DROP DEFAULT', 'ALTER TABLE "config" ALTER COLUMN "status" DROP DEFAULT',
'ALTER TABLE "email" ADD "address" VARCHAR(200) NOT NULL', 'ALTER TABLE "config" ALTER COLUMN "value" TYPE JSONB USING "value"::JSONB',
'ALTER TABLE "email" DROP COLUMN "user_id"', 'ALTER TABLE "configs" RENAME TO "config"',
'ALTER TABLE "product" RENAME COLUMN "image" TO "pic"', 'ALTER TABLE "email" ADD "address" VARCHAR(200) NOT NULL',
'ALTER TABLE "email" RENAME COLUMN "id" TO "email_id"', 'ALTER TABLE "email" DROP COLUMN "user_id"',
'ALTER TABLE "configs" RENAME TO "config"', 'ALTER TABLE "email" RENAME COLUMN "id" TO "email_id"',
'ALTER TABLE "email" DROP CONSTRAINT "fk_email_user_5b58673d"', 'ALTER TABLE "email" ALTER COLUMN "is_primary" TYPE BOOL USING "is_primary"::BOOL',
'CREATE INDEX "idx_email_email_4a1a33" ON "email" ("email")', 'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0',
'CREATE UNIQUE INDEX "uid_product_name_f14935" ON "product" ("name", "type")', 'ALTER TABLE "product" RENAME COLUMN "image" TO "pic"',
'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0', 'ALTER TABLE "product" ALTER COLUMN "is_reviewed" TYPE BOOL USING "is_reviewed"::BOOL',
'ALTER TABLE "user" DROP COLUMN "avatar"', 'ALTER TABLE "product" ALTER COLUMN "body" TYPE TEXT USING "body"::TEXT',
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(100) USING "password"::VARCHAR(100)', 'ALTER TABLE "product" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
'CREATE TABLE IF NOT EXISTS "newmodel" (\n "id" SERIAL NOT NULL PRIMARY KEY,\n "name" VARCHAR(50) NOT NULL\n);\nCOMMENT ON COLUMN "config"."user_id" IS \'User\';', 'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(100) USING "password"::VARCHAR(100)',
'CREATE UNIQUE INDEX "uid_user_usernam_9987ab" ON "user" ("username")', 'ALTER TABLE "user" DROP COLUMN "avatar"',
'CREATE TABLE "email_user" ("email_id" INT NOT NULL REFERENCES "email" ("email_id") ON DELETE CASCADE,"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE)', 'ALTER TABLE "user" ALTER COLUMN "is_superuser" TYPE BOOL USING "is_superuser"::BOOL',
] 'ALTER TABLE "user" ALTER COLUMN "last_login" TYPE TIMESTAMPTZ USING "last_login"::TIMESTAMPTZ',
) 'ALTER TABLE "user" ALTER COLUMN "intro" TYPE TEXT USING "intro"::TEXT',
assert sorted(Migrate.downgrade_operators) == sorted( 'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL',
[ 'ALTER TABLE "user" ALTER COLUMN "longitude" TYPE DECIMAL(10,8) USING "longitude"::DECIMAL(10,8)',
'ALTER TABLE "category" ALTER COLUMN "name" SET NOT NULL', 'CREATE INDEX "idx_product_name_869427" ON "product" ("name", "type_db_alias")',
'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(200) USING "slug"::VARCHAR(200)', 'CREATE INDEX "idx_email_email_4a1a33" ON "email" ("email")',
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(200) USING "password"::VARCHAR(200)', 'CREATE TABLE "email_user" (\n "email_id" INT NOT NULL REFERENCES "email" ("email_id") ON DELETE CASCADE,\n "user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE\n)',
'ALTER TABLE "config" DROP COLUMN "user_id"', 'CREATE TABLE IF NOT EXISTS "newmodel" (\n "id" SERIAL NOT NULL PRIMARY KEY,\n "name" VARCHAR(50) NOT NULL\n);\nCOMMENT ON COLUMN "config"."user_id" IS \'User\';',
'ALTER TABLE "config" DROP CONSTRAINT "fk_config_user_17daa970"', 'CREATE UNIQUE INDEX "uid_product_name_869427" ON "product" ("name", "type_db_alias")',
'ALTER TABLE "config" ALTER COLUMN "status" SET DEFAULT 1', 'CREATE UNIQUE INDEX "uid_user_usernam_9987ab" ON "user" ("username")',
'ALTER TABLE "email" ADD "user_id" INT NOT NULL', }
'ALTER TABLE "email" DROP COLUMN "address"', expected_downgrade_operators = {
'ALTER TABLE "config" RENAME TO "configs"', 'ALTER TABLE "category" ALTER COLUMN "name" SET NOT NULL',
'ALTER TABLE "product" RENAME COLUMN "pic" TO "image"', 'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(200) USING "slug"::VARCHAR(200)',
'ALTER TABLE "email" RENAME COLUMN "email_id" TO "id"', 'ALTER TABLE "category" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
'ALTER TABLE "email" ADD CONSTRAINT "fk_email_user_5b58673d" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE', 'ALTER TABLE "config" ALTER COLUMN "status" SET DEFAULT 1',
'DROP INDEX "idx_email_email_4a1a33"', 'ALTER TABLE "config" DROP COLUMN "user_id"',
'ALTER TABLE "product" ALTER COLUMN "view_num" DROP DEFAULT', 'ALTER TABLE "config" DROP CONSTRAINT "fk_config_user_17daa970"',
'ALTER TABLE "user" ADD "avatar" VARCHAR(200) NOT NULL DEFAULT \'\'', 'ALTER TABLE "config" RENAME TO "configs"',
'DROP INDEX "idx_user_usernam_9987ab"', 'ALTER TABLE "config" ALTER COLUMN "value" TYPE JSONB USING "value"::JSONB',
'DROP INDEX "uid_product_name_f14935"', 'ALTER TABLE "email" ADD "user_id" INT NOT NULL',
'DROP TABLE IF EXISTS "email_user"', 'ALTER TABLE "email" DROP COLUMN "address"',
'DROP TABLE IF EXISTS "newmodel"', 'ALTER TABLE "email" RENAME COLUMN "email_id" TO "id"',
] 'ALTER TABLE "email" ALTER COLUMN "is_primary" TYPE BOOL USING "is_primary"::BOOL',
'ALTER TABLE "product" ALTER COLUMN "view_num" DROP DEFAULT',
'ALTER TABLE "product" RENAME COLUMN "pic" TO "image"',
'ALTER TABLE "user" ADD "avatar" VARCHAR(200) NOT NULL DEFAULT \'\'',
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(200) USING "password"::VARCHAR(200)',
'ALTER TABLE "user" ALTER COLUMN "last_login" TYPE TIMESTAMPTZ USING "last_login"::TIMESTAMPTZ',
'ALTER TABLE "user" ALTER COLUMN "is_superuser" TYPE BOOL USING "is_superuser"::BOOL',
'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL',
'ALTER TABLE "user" ALTER COLUMN "intro" TYPE TEXT USING "intro"::TEXT',
'ALTER TABLE "user" ALTER COLUMN "longitude" TYPE DECIMAL(12,9) USING "longitude"::DECIMAL(12,9)',
'ALTER TABLE "product" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
'ALTER TABLE "product" ALTER COLUMN "is_reviewed" TYPE BOOL USING "is_reviewed"::BOOL',
'ALTER TABLE "product" ALTER COLUMN "body" TYPE TEXT USING "body"::TEXT',
'DROP INDEX "idx_product_name_869427"',
'DROP INDEX "idx_email_email_4a1a33"',
'DROP INDEX "idx_user_usernam_9987ab"',
'DROP INDEX "uid_product_name_869427"',
'DROP TABLE IF EXISTS "email_user"',
'DROP TABLE IF EXISTS "newmodel"',
}
assert not set(Migrate.upgrade_operators).symmetric_difference(expected_upgrade_operators)
assert not set(Migrate.downgrade_operators).symmetric_difference(
expected_downgrade_operators
) )
elif isinstance(Migrate.ddl, SqliteDDL): elif isinstance(Migrate.ddl, SqliteDDL):
assert Migrate.upgrade_operators == [] assert Migrate.upgrade_operators == []
assert Migrate.downgrade_operators == [] assert Migrate.downgrade_operators == []
@@ -886,18 +951,18 @@ def test_sort_all_version_files(mocker):
mocker.patch( mocker.patch(
"os.listdir", "os.listdir",
return_value=[ return_value=[
"1_datetime_update.sql", "1_datetime_update.py",
"11_datetime_update.sql", "11_datetime_update.py",
"10_datetime_update.sql", "10_datetime_update.py",
"2_datetime_update.sql", "2_datetime_update.py",
], ],
) )
Migrate.migrate_location = "." Migrate.migrate_location = "."
assert Migrate.get_all_version_files() == [ assert Migrate.get_all_version_files() == [
"1_datetime_update.sql", "1_datetime_update.py",
"2_datetime_update.sql", "2_datetime_update.py",
"10_datetime_update.sql", "10_datetime_update.py",
"11_datetime_update.sql", "11_datetime_update.py",
] ]

6
tests/test_utils.py Normal file
View File

@@ -0,0 +1,6 @@
from aerich.utils import import_py_file
def test_import_py_file():
m = import_py_file("aerich/utils.py")
assert getattr(m, "import_py_file")