53 Commits

Author SHA1 Message Date
long2ice
be41a1332a update tortoise-orm version 2021-02-04 20:53:04 +08:00
long2ice
09661c1d46 Fix unique_together 2021-02-04 14:39:07 +08:00
long2ice
abfa60133f Fix drop table 2021-02-04 14:23:46 +08:00
long2ice
048e428eac update tortoise-orm 2021-02-03 22:52:01 +08:00
long2ice
38a3df9b5a add support m2m 2021-02-03 22:22:22 +08:00
long2ice
0d94b22b3f Remove unused functions 2021-02-03 18:06:43 +08:00
long2ice
f1f0074255 Support rename field 2021-02-03 17:56:30 +08:00
long2ice
e3a14a2f60 Fix postgres index 2021-02-03 16:34:07 +08:00
long2ice
608ff8f071 update conftest.py 2021-02-03 15:49:40 +08:00
long2ice
d3a1342293 update README.md 2021-02-03 15:48:06 +08:00
long2ice
01e3de9522 basically completed 2021-02-03 15:43:04 +08:00
long2ice
c6c398fdf0 update 2021-02-02 22:52:50 +08:00
long2ice
c60bdd290e add fk and drop fk 2021-02-02 20:35:05 +08:00
long2ice
f443dc68db WIP 2021-02-01 16:54:35 +08:00
long2ice
36f84702b7 update 2021-02-01 14:00:12 +08:00
long2ice
b4cc2de0e3 v0.5 refactoring 2021-01-31 23:10:30 +08:00
long2ice
4780b90c1c add close_connections to fix stuck 2021-01-29 22:58:12 +08:00
long2ice
cd176c1fd6 Merge pull request #111 from lqmanh/bugfixes/fix-tortoise-orm-0.16.19
Fix Aerich b/c of a new feature in Tortoise ORM v0.16.19
2021-01-04 14:59:11 +08:00
long2ice
c2819fc8dc update CHANGELOG.md 2020-12-29 19:13:37 +08:00
long2ice
530e7cfce5 Fixed unnecessary import. (#113) 2020-12-29 19:12:36 +08:00
Lương Quang Mạnh
47824a100b Fix Aerich b/c of Tortoise ORM v0.16.19 2020-12-26 10:31:10 +07:00
long2ice
78a15f9f19 Merge pull request #108 from lqmanh/features/make-parent-dirs-as-needed
Make parent directories as needed
2020-12-25 22:10:56 +08:00
long2ice
5ae8b9e85f complete InspectDb 2020-12-25 21:44:26 +08:00
long2ice
55a6d4bbc7 add InspectDb and show_create_tables 2020-12-24 23:32:58 +08:00
long2ice
c5535f16e1 TODO: Add inspectdb command 2020-12-23 23:38:45 +08:00
long2ice
840cd71e44 Replace migrations separator to sql standard comment 2020-12-23 23:30:35 +08:00
Lương Quang Mạnh
e0d52b1210 Fix make style 2020-12-21 15:36:29 +07:00
Lương Quang Mạnh
4dc45f723a Make parent directories as needed 2020-12-21 15:13:26 +07:00
long2ice
d2e0a68351 Fix packaging error. (#92) 2020-12-02 23:03:15 +08:00
long2ice
ee6cc20c7d Fix empty items 2020-11-30 11:14:09 +08:00
long2ice
4e917495a0 Fix upgrade in new db. (#96) 2020-11-30 11:02:48 +08:00
long2ice
bfa66f6dd4 update changelog 2020-11-29 11:15:43 +08:00
long2ice
f00715d4c4 Merge pull request #97 from TrDex/pathlib-for-path-resolving
Use `pathlib` for path resolving
2020-11-29 11:02:44 +08:00
Mykola Solodukha
6e3105690a Use pathlib for path resolving 2020-11-28 19:23:34 +02:00
long2ice
c707f7ecb2 bug fix 2020-11-28 14:31:41 +08:00
long2ice
0bbc471e00 Fix sqlite stuck. (#90) 2020-11-26 23:38:57 +08:00
long2ice
fb6cc62047 update README and CHANGELOG 2020-11-23 16:44:16 +08:00
long2ice
e9ceaf471f Merge pull request #87 from ALexALed/remove-default-detections-for-callable
Remove callable detection for defaults
2020-11-23 16:41:30 +08:00
alexaled
85fc3b2aa2 Remove callable detection for defaults 2020-11-23 10:35:40 +02:00
long2ice
a677d506a9 Fix ci error 2020-11-19 10:41:52 +08:00
long2ice
9879004fee Add rename column support MySQL5 2020-11-19 10:11:52 +08:00
long2ice
5760fe2040 Merge pull request #83 from SakuraSound/fix-migrate-unlink
Catch OSError (if read-only file system)
2020-11-18 15:40:29 +08:00
Joir-dan Gumbs
b229c30558 Catch OSError (if read-only file system) 2020-11-17 23:28:00 -08:00
long2ice
5d2f1604c3 update github action poetry 2020-11-17 10:57:56 +08:00
long2ice
499c4e1c02 Fix black 2020-11-17 10:50:57 +08:00
long2ice
1463ee30bc update deps 2020-11-17 10:43:27 +08:00
long2ice
3b801932f5 Merge remote-tracking branch 'origin/dev' into dev 2020-11-17 10:36:14 +08:00
long2ice
c2eb4dc9e3 update poetry in github actions 2020-11-17 10:35:51 +08:00
long2ice
5927febd0c Delete .DS_Store 2020-11-17 10:10:32 +08:00
long2ice
a1c10ff330 exclude .DS_store 2020-11-17 10:09:37 +08:00
long2ice
f2013c931a Fix test error 2020-11-16 22:32:19 +08:00
long2ice
b21b954d32 Use .sql instead of .json to store version file. (#79) 2020-11-16 22:25:01 +08:00
long2ice
f5588a35c5 update deps 2020-11-12 21:27:58 +08:00
24 changed files with 1936 additions and 877 deletions

BIN
.DS_Store vendored

Binary file not shown.

View File

@@ -11,11 +11,14 @@ jobs:
- uses: actions/setup-python@v2
with:
python-version: '3.x'
- uses: dschep/install-poetry-action@v1.3
- name: Install and configure Poetry
uses: snok/install-poetry@v1.1.1
with:
virtualenvs-create: false
- name: Build dists
run: make build
- name: Pypi Publish
uses: pypa/gh-action-pypi-publish@master
with:
user: __token__
password: ${{ secrets.pypi_password }}
password: ${{ secrets.pypi_password }}

View File

@@ -1,5 +1,5 @@
name: test
on: [push, pull_request]
on: [ push, pull_request ]
jobs:
testall:
runs-on: ubuntu-latest
@@ -19,7 +19,10 @@ jobs:
- uses: actions/setup-python@v2
with:
python-version: '3.x'
- uses: dschep/install-poetry-action@v1.3
- name: Install and configure Poetry
uses: snok/install-poetry@v1.1.1
with:
virtualenvs-create: false
- name: CI
env:
MYSQL_PASS: root
@@ -28,4 +31,4 @@ jobs:
POSTGRES_PASS: 123456
POSTGRES_HOST: 127.0.0.1
POSTGRES_PORT: 5432
run: make ci
run: make ci

3
.gitignore vendored
View File

@@ -144,4 +144,5 @@ cython_debug/
migrations
aerich.ini
src
.vscode
.vscode
.DS_Store

View File

@@ -1,5 +1,39 @@
# ChangeLog
## 0.5
### 0.5.0
- Refactor core code, now has no limitation for everything.
## 0.4
### 0.4.4
- Fix unnecessary import. (#113)
### 0.4.3
- Replace migrations separator to sql standard comment.
- Add `inspectdb` command.
### 0.4.2
- Use `pathlib` for path resolving. (#89)
- Fix upgrade in new db. (#96)
- Fix packaging error. (#92)
### 0.4.1
- Bug fix. (#91 #93)
### 0.4.0
- Use `.sql` instead of `.json` to store version file.
- Add `rename` column support MySQL5.
- Remove callable detection for defaults. (#87)
- Fix `sqlite` stuck. (#90)
## 0.3
### 0.3.3
@@ -22,10 +56,10 @@
- Refactoring migrate logic, and this version is not compatible with previous version.
- Now there don't need `old_models.py` and it store in database.
- Upgrade steps:
1. Upgrade aerich version.
2. Drop aerich table in database.
3. Delete `migrations/{app}` folder and rerun `aerich init-db`.
4. Update model and `aerich migrate` normally.
1. Upgrade aerich version.
2. Drop aerich table in database.
3. Delete `migrations/{app}` folder and rerun `aerich init-db`.
4. Update model and `aerich migrate` normally.
## 0.2

View File

@@ -3,8 +3,10 @@ black_opts = -l 100 -t py38
py_warn = PYTHONDEVMODE=1
MYSQL_HOST ?= "127.0.0.1"
MYSQL_PORT ?= 3306
MYSQL_PASS ?= "123456"
POSTGRES_HOST ?= "127.0.0.1"
POSTGRES_PORT ?= 5432
POSTGRES_PASS ?= "123456"
help:
@echo "Aerich development makefile"
@@ -43,7 +45,7 @@ test_mysql:
$(py_warn) TEST_DB="mysql://root:$(MYSQL_PASS)@$(MYSQL_HOST):$(MYSQL_PORT)/test_\{\}" pytest -vv -s
test_postgres:
$(py_warn) TEST_DB="postgres://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" pytest
$(py_warn) TEST_DB="postgres://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" pytest -vv -s
testall: deps test_sqlite test_postgres test_mysql

View File

@@ -7,10 +7,12 @@
## Introduction
Aerich is a database migrations tool for Tortoise-ORM, which like alembic for SQLAlchemy, or Django ORM with it\'s
own migrations solution.
Aerich is a database migrations tool for Tortoise-ORM, which like alembic for SQLAlchemy, or Django ORM with it\'s own
migrations solution.
**Important: You can only use absolutely import in your `models.py` to make `aerich` work.**
~~**Important: You can only use absolutely import in your `models.py` to make `aerich` work.**~~
From version `v0.5.0`, there is no such limitation now.
## Install
@@ -40,14 +42,14 @@ Commands:
history List all migrate items.
init Init config file and generate root migrate location.
init-db Generate schema and generate app migrate location.
inspectdb Introspects the database tables to standard output as...
migrate Generate migrate changes file.
upgrade Upgrade to latest version.
```
## Usage
You need add `aerich.models` to your `Tortoise-ORM` config first,
example:
You need add `aerich.models` to your `Tortoise-ORM` config first, example:
```python
TORTOISE_ORM = {
@@ -103,22 +105,22 @@ If your Tortoise-ORM app is not default `models`, you must specify
```shell
> aerich migrate --name drop_column
Success migrate 1_202029051520102929_drop_column.json
Success migrate 1_202029051520102929_drop_column.sql
```
Format of migrate filename is
`{version_num}_{datetime}_{name|update}.json`.
`{version_num}_{datetime}_{name|update}.sql`.
And if `aerich` guess you are renaming a column, it will ask `Rename {old_column} to {new_column} [True]`, you can choice `True` to rename column without column drop, or choice `False` to drop column then create.
If you use `MySQL`, only MySQL8.0+ support `rename..to` syntax.
And if `aerich` guess you are renaming a column, it will ask `Rename {old_column} to {new_column} [True]`, you can
choice `True` to rename column without column drop, or choice `False` to drop column then create, note that the after
maybe lose data.
### Upgrade to latest version
```shell
> aerich upgrade
Success upgrade 1_202029051520102929_drop_column.json
Success upgrade 1_202029051520102929_drop_column.sql
```
Now your db is migrated to latest.
@@ -134,13 +136,17 @@ Usage: aerich downgrade [OPTIONS]
Options:
-v, --version INTEGER Specified version, default to last. [default: -1]
-d, --delete Delete version files at the same time. [default:
False]
--yes Confirm the action without prompting.
-h, --help Show this message and exit.
```
```shell
> aerich downgrade
Success downgrade 1_202029051520102929_drop_column.json
Success downgrade 1_202029051520102929_drop_column.sql
```
Now your db rollback to specified version.
@@ -150,7 +156,7 @@ Now your db rollback to specified version.
```shell
> aerich history
1_202029051520102929_drop_column.json
1_202029051520102929_drop_column.sql
```
### Show heads to be migrated
@@ -158,9 +164,38 @@ Now your db rollback to specified version.
```shell
> aerich heads
1_202029051520102929_drop_column.json
1_202029051520102929_drop_column.sql
```
### Inspect db tables to TortoiseORM model
Currently, only support MySQL.
```shell
Usage: aerich inspectdb [OPTIONS]
Introspects the database tables to standard output as TortoiseORM model.
Options:
-t, --table TEXT Which tables to inspect.
-h, --help Show this message and exit.
```
Inspect all tables and print to console:
```shell
aerich --app models inspectdb
```
Inspect a specified table in default app and redirect to `models.py`:
```shell
aerich inspectdb -t user > models.py
```
Note that this command is restricted, which is not supported in some solutions, such as `IntEnumField`
and `ForeignKeyField` and so on.
### Multiple databases
```python
@@ -171,7 +206,7 @@ tortoise_orm = {
},
"apps": {
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"},
"models_second": {"models": ["tests.models_second"], "default_connection": "second",},
"models_second": {"models": ["tests.models_second"], "default_connection": "second", },
},
}
```

View File

@@ -1 +1 @@
__version__ = "0.3.3"
__version__ = "0.5.0"

View File

@@ -1,9 +1,10 @@
import asyncio
import json
import os
import sys
from configparser import ConfigParser
from functools import wraps
from pathlib import Path
from typing import List
import click
from click import Context, UsageError
@@ -12,8 +13,16 @@ from tortoise.exceptions import OperationalError
from tortoise.transactions import in_transaction
from tortoise.utils import get_schema_sql
from aerich.inspectdb import InspectDb
from aerich.migrate import Migrate
from aerich.utils import get_app_connection, get_app_connection_name, get_tortoise_config
from aerich.utils import (
get_app_connection,
get_app_connection_name,
get_models_describe,
get_tortoise_config,
get_version_content_from_file,
write_version_file,
)
from . import __version__
from .enums import Color
@@ -26,11 +35,7 @@ def coro(f):
@wraps(f)
def wrapper(*args, **kwargs):
loop = asyncio.get_event_loop()
ctx = args[0]
loop.run_until_complete(f(*args, **kwargs))
app = ctx.obj.get("app")
if app:
Migrate.remove_old_model_file(app, ctx.obj["location"])
return wrapper
@@ -61,7 +66,7 @@ async def cli(ctx: Context, config, app, name):
invoked_subcommand = ctx.invoked_subcommand
if invoked_subcommand != "init":
if not os.path.exists(config):
if not Path(config).exists():
raise UsageError("You must exec init first", ctx=ctx)
parser.read(config)
@@ -75,7 +80,9 @@ async def cli(ctx: Context, config, app, name):
ctx.obj["app"] = app
Migrate.app = app
if invoked_subcommand != "init-db":
await Migrate.init_with_old_models(tortoise_config, app, location)
if not Path(location, app).exists():
raise UsageError("You must exec init-db first", ctx=ctx)
await Migrate.init(tortoise_config, app, location)
@cli.command(help="Generate migrate changes file.")
@@ -95,7 +102,6 @@ async def migrate(ctx: Context, name):
async def upgrade(ctx: Context):
config = ctx.obj["config"]
app = ctx.obj["app"]
location = ctx.obj["location"]
migrated = False
for version_file in Migrate.get_all_version_files():
try:
@@ -104,21 +110,20 @@ async def upgrade(ctx: Context):
exists = False
if not exists:
async with in_transaction(get_app_connection_name(config, app)) as conn:
file_path = os.path.join(Migrate.migrate_location, version_file)
with open(file_path, "r", encoding="utf-8") as f:
content = json.load(f)
upgrade_query_list = content.get("upgrade")
for upgrade_query in upgrade_query_list:
await conn.execute_script(upgrade_query)
file_path = Path(Migrate.migrate_location, version_file)
content = get_version_content_from_file(file_path)
upgrade_query_list = content.get("upgrade")
for upgrade_query in upgrade_query_list:
await conn.execute_script(upgrade_query)
await Aerich.create(
version=version_file,
app=app,
content=Migrate.get_models_content(config, app, location),
content=get_models_describe(app),
)
click.secho(f"Success upgrade {version_file}", fg=Color.green)
migrated = True
if not migrated:
click.secho("No migrate items", fg=Color.yellow)
click.secho("No upgrade items found", fg=Color.yellow)
@cli.command(help="Downgrade to specified version.")
@@ -130,12 +135,20 @@ async def upgrade(ctx: Context):
show_default=True,
help="Specified version, default to last.",
)
@click.option(
"-d",
"--delete",
is_flag=True,
default=False,
show_default=True,
help="Delete version files at the same time.",
)
@click.pass_context
@click.confirmation_option(
prompt="Downgrade is dangerous, which maybe lose your data, are you sure?",
)
@coro
async def downgrade(ctx: Context, version: int):
async def downgrade(ctx: Context, version: int, delete: bool):
app = ctx.obj["app"]
config = ctx.obj["config"]
if version == -1:
@@ -151,16 +164,17 @@ async def downgrade(ctx: Context, version: int):
for version in versions:
file = version.version
async with in_transaction(get_app_connection_name(config, app)) as conn:
file_path = os.path.join(Migrate.migrate_location, file)
with open(file_path, "r", encoding="utf-8") as f:
content = json.load(f)
downgrade_query_list = content.get("downgrade")
if not downgrade_query_list:
return click.secho("No downgrade item found", fg=Color.yellow)
for downgrade_query in downgrade_query_list:
await conn.execute_query(downgrade_query)
await version.delete()
os.unlink(file_path)
file_path = Path(Migrate.migrate_location, file)
content = get_version_content_from_file(file_path)
downgrade_query_list = content.get("downgrade")
if not downgrade_query_list:
click.secho("No downgrade items found", fg=Color.yellow)
return
for downgrade_query in downgrade_query_list:
await conn.execute_query(downgrade_query)
await version.delete()
if delete:
os.unlink(file_path)
click.secho(f"Success downgrade {file}", fg=Color.green)
@@ -212,7 +226,7 @@ async def init(
):
config_file = ctx.obj["config_file"]
name = ctx.obj["name"]
if os.path.exists(config_file):
if Path(config_file).exists():
return click.secho("You have inited", fg=Color.yellow)
parser.add_section(name)
@@ -222,8 +236,7 @@ async def init(
with open(config_file, "w", encoding="utf-8") as f:
parser.write(f)
if not os.path.isdir(location):
os.mkdir(location)
Path(location).mkdir(parents=True, exist_ok=True)
click.secho(f"Success create migrate location {location}", fg=Color.green)
click.secho(f"Success generate config file {config_file}", fg=Color.green)
@@ -244,12 +257,14 @@ async def init_db(ctx: Context, safe):
location = ctx.obj["location"]
app = ctx.obj["app"]
dirname = os.path.join(location, app)
if not os.path.isdir(dirname):
os.mkdir(dirname)
dirname = Path(location, app)
try:
dirname.mkdir(parents=True)
click.secho(f"Success create app migrate location {dirname}", fg=Color.green)
else:
return click.secho(f"Inited {app} already", fg=Color.yellow)
except FileExistsError:
return click.secho(
f"Inited {app} already, or delete {dirname} and try again.", fg=Color.yellow
)
await Tortoise.init(config=config)
connection = get_app_connection(config, app)
@@ -261,14 +276,32 @@ async def init_db(ctx: Context, safe):
await Aerich.create(
version=version,
app=app,
content=Migrate.get_models_content(config, app, location),
content=get_models_describe(app),
)
with open(os.path.join(dirname, version), "w", encoding="utf-8") as f:
content = {
"upgrade": [schema],
}
json.dump(content, f, ensure_ascii=False, indent=2)
return click.secho(f'Success generate schema for app "{app}"', fg=Color.green)
content = {
"upgrade": [schema],
}
write_version_file(Path(dirname, version), content)
click.secho(f'Success generate schema for app "{app}"', fg=Color.green)
@cli.command(help="Introspects the database tables to standard output as TortoiseORM model.")
@click.option(
"-t",
"--table",
help="Which tables to inspect.",
multiple=True,
required=False,
)
@click.pass_context
@coro
async def inspectdb(ctx: Context, table: List[str]):
config = ctx.obj["config"]
app = ctx.obj["app"]
connection = get_app_connection(config, app)
inspect = InspectDb(connection, table)
await inspect.inspect()
def main():

View File

@@ -1,8 +1,8 @@
from enum import Enum
from typing import List, Type
from tortoise import BaseDBAsyncClient, ForeignKeyFieldInstance, ManyToManyFieldInstance, Model
from tortoise import BaseDBAsyncClient, Model
from tortoise.backends.base.schema_generator import BaseSchemaGenerator
from tortoise.fields import CASCADE, Field, JSONField, TextField, UUIDField
class BaseDDL:
@@ -11,17 +11,21 @@ class BaseDDL:
_DROP_TABLE_TEMPLATE = 'DROP TABLE IF EXISTS "{table_name}"'
_ADD_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ADD {column}'
_DROP_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" DROP COLUMN "{column_name}"'
_ALTER_DEFAULT_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {default}'
_RENAME_COLUMN_TEMPLATE = (
'ALTER TABLE "{table_name}" RENAME COLUMN "{old_column_name}" TO "{new_column_name}"'
)
_ADD_INDEX_TEMPLATE = (
'ALTER TABLE "{table_name}" ADD {unique} INDEX "{index_name}" ({column_names})'
'ALTER TABLE "{table_name}" ADD {unique}INDEX "{index_name}" ({column_names})'
)
_DROP_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" DROP INDEX "{index_name}"'
_ADD_FK_TEMPLATE = 'ALTER TABLE "{table_name}" ADD CONSTRAINT "{fk_name}" FOREIGN KEY ("{db_column}") REFERENCES "{table}" ("{field}") ON DELETE {on_delete}'
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP FOREIGN KEY "{fk_name}"'
_M2M_TABLE_TEMPLATE = 'CREATE TABLE "{table_name}" ("{backward_key}" {backward_type} NOT NULL REFERENCES "{backward_table}" ("{backward_field}") ON DELETE CASCADE,"{forward_key}" {forward_type} NOT NULL REFERENCES "{forward_table}" ("{forward_field}") ON DELETE {on_delete}){extra}{comment};'
_M2M_TABLE_TEMPLATE = 'CREATE TABLE "{table_name}" ("{backward_key}" {backward_type} NOT NULL REFERENCES "{backward_table}" ("{backward_field}") ON DELETE CASCADE,"{forward_key}" {forward_type} NOT NULL REFERENCES "{forward_table}" ("{forward_field}") ON DELETE {on_delete}){extra}{comment}'
_MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" MODIFY COLUMN {column}'
_CHANGE_COLUMN_TEMPLATE = (
'ALTER TABLE "{table_name}" CHANGE {old_column_name} {new_column_name} {new_column_type}'
)
def __init__(self, client: "BaseDBAsyncClient"):
self.client = client
@@ -30,43 +34,50 @@ class BaseDDL:
def create_table(self, model: "Type[Model]"):
return self.schema_generator._get_table_sql(model, True)["table_creation_string"]
def drop_table(self, model: "Type[Model]"):
return self._DROP_TABLE_TEMPLATE.format(table_name=model._meta.db_table)
def drop_table(self, table_name: str):
return self._DROP_TABLE_TEMPLATE.format(table_name=table_name)
def create_m2m_table(self, model: "Type[Model]", field: ManyToManyFieldInstance):
def create_m2m(
self, model: "Type[Model]", field_describe: dict, reference_table_describe: dict
):
through = field_describe.get("through")
description = field_describe.get("description")
reference_id = reference_table_describe.get("pk_field").get("db_column")
db_field_types = reference_table_describe.get("pk_field").get("db_field_types")
return self._M2M_TABLE_TEMPLATE.format(
table_name=field.through,
table_name=through,
backward_table=model._meta.db_table,
forward_table=field.related_model._meta.db_table,
forward_table=reference_table_describe.get("table"),
backward_field=model._meta.db_pk_column,
forward_field=field.related_model._meta.db_pk_column,
backward_key=field.backward_key,
forward_field=reference_id,
backward_key=field_describe.get("backward_key"),
backward_type=model._meta.pk.get_for_dialect(self.DIALECT, "SQL_TYPE"),
forward_key=field.forward_key,
forward_type=field.related_model._meta.pk.get_for_dialect(self.DIALECT, "SQL_TYPE"),
on_delete=CASCADE,
extra=self.schema_generator._table_generate_extra(table=field.through),
forward_key=field_describe.get("forward_key"),
forward_type=db_field_types.get(self.DIALECT) or db_field_types.get(""),
on_delete=field_describe.get("on_delete"),
extra=self.schema_generator._table_generate_extra(table=through),
comment=self.schema_generator._table_comment_generator(
table=field.through, comment=field.description
table=through, comment=description
)
if field.description
if description
else "",
)
def drop_m2m(self, field: ManyToManyFieldInstance):
return self._DROP_TABLE_TEMPLATE.format(table_name=field.through)
def drop_m2m(self, table_name: str):
return self._DROP_TABLE_TEMPLATE.format(table_name=table_name)
def _get_default(self, model: "Type[Model]", field_object: Field):
def _get_default(self, model: "Type[Model]", field_describe: dict):
db_table = model._meta.db_table
default = field_object.default
db_column = field_object.model_field_name
auto_now_add = getattr(field_object, "auto_now_add", False)
auto_now = getattr(field_object, "auto_now", False)
default = field_describe.get("default")
if isinstance(default, Enum):
default = default.value
db_column = field_describe.get("db_column")
auto_now_add = field_describe.get("auto_now_add", False)
auto_now = field_describe.get("auto_now", False)
if default is not None or auto_now_add:
if callable(default) or isinstance(field_object, (UUIDField, TextField, JSONField)):
if field_describe.get("field_type") in ["UUIDField", "TextField", "JSONField"]:
default = ""
else:
default = field_object.to_db_value(default, model)
try:
default = self.schema_generator._column_default_generator(
db_table,
@@ -78,28 +89,33 @@ class BaseDDL:
except NotImplementedError:
default = ""
else:
default = ""
default = None
return default
def add_column(self, model: "Type[Model]", field_object: Field):
def add_column(self, model: "Type[Model]", field_describe: dict, is_pk: bool = False):
db_table = model._meta.db_table
description = field_describe.get("description")
db_column = field_describe.get("db_column")
db_field_types = field_describe.get("db_field_types")
default = self._get_default(model, field_describe)
if default is None:
default = ""
return self._ADD_COLUMN_TEMPLATE.format(
table_name=db_table,
column=self.schema_generator._create_string(
db_column=field_object.model_field_name,
field_type=field_object.get_for_dialect(self.DIALECT, "SQL_TYPE"),
nullable="NOT NULL" if not field_object.null else "",
unique="UNIQUE" if field_object.unique else "",
db_column=db_column,
field_type=db_field_types.get(self.DIALECT, db_field_types.get("")),
nullable="NOT NULL" if not field_describe.get("nullable") else "",
unique="UNIQUE" if field_describe.get("unique") else "",
comment=self.schema_generator._column_comment_generator(
table=db_table,
column=field_object.model_field_name,
comment=field_object.description,
column=db_column,
comment=field_describe.get("description"),
)
if field_object.description
if description
else "",
is_primary_key=field_object.pk,
default=self._get_default(model, field_object),
is_primary_key=is_pk,
default=default,
),
)
@@ -108,24 +124,28 @@ class BaseDDL:
table_name=model._meta.db_table, column_name=column_name
)
def modify_column(self, model: "Type[Model]", field_object: Field):
def modify_column(self, model: "Type[Model]", field_describe: dict, is_pk: bool = False):
db_table = model._meta.db_table
db_field_types = field_describe.get("db_field_types")
default = self._get_default(model, field_describe)
if default is None:
default = ""
return self._MODIFY_COLUMN_TEMPLATE.format(
table_name=db_table,
column=self.schema_generator._create_string(
db_column=field_object.model_field_name,
field_type=field_object.get_for_dialect(self.DIALECT, "SQL_TYPE"),
nullable="NOT NULL" if not field_object.null else "",
db_column=field_describe.get("db_column"),
field_type=db_field_types.get(self.DIALECT) or db_field_types.get(""),
nullable="NOT NULL" if not field_describe.get("nullable") else "",
unique="",
comment=self.schema_generator._column_comment_generator(
table=db_table,
column=field_object.model_field_name,
comment=field_object.description,
column=field_describe.get("db_column"),
comment=field_describe.get("description"),
)
if field_object.description
if field_describe.get("description")
else "",
is_primary_key=field_object.pk,
default=self._get_default(model, field_object),
is_primary_key=is_pk,
default=default,
),
)
@@ -136,9 +156,19 @@ class BaseDDL:
new_column_name=new_column_name,
)
def change_column(
self, model: "Type[Model]", old_column_name: str, new_column_name: str, new_column_type: str
):
return self._CHANGE_COLUMN_TEMPLATE.format(
table_name=model._meta.db_table,
old_column_name=old_column_name,
new_column_name=new_column_name,
new_column_type=new_column_type,
)
def add_index(self, model: "Type[Model]", field_names: List[str], unique=False):
return self._ADD_INDEX_TEMPLATE.format(
unique="UNIQUE" if unique else "",
unique="UNIQUE " if unique else "",
index_name=self.schema_generator._generate_index_name(
"idx" if not unique else "uid", model, field_names
),
@@ -154,48 +184,49 @@ class BaseDDL:
table_name=model._meta.db_table,
)
def add_fk(self, model: "Type[Model]", field: ForeignKeyFieldInstance):
def add_fk(self, model: "Type[Model]", field_describe: dict, reference_table_describe: dict):
db_table = model._meta.db_table
to_field_name = field.to_field_instance.source_field
if not to_field_name:
to_field_name = field.to_field_instance.model_field_name
db_column = field.source_field or field.model_field_name + "_id"
db_column = field_describe.get("raw_field")
reference_id = reference_table_describe.get("pk_field").get("db_column")
fk_name = self.schema_generator._generate_fk_name(
from_table=db_table,
from_field=db_column,
to_table=field.related_model._meta.db_table,
to_field=to_field_name,
to_table=reference_table_describe.get("table"),
to_field=reference_table_describe.get("pk_field").get("db_column"),
)
return self._ADD_FK_TEMPLATE.format(
table_name=db_table,
fk_name=fk_name,
db_column=db_column,
table=field.related_model._meta.db_table,
field=to_field_name,
on_delete=field.on_delete,
table=reference_table_describe.get("table"),
field=reference_id,
on_delete=field_describe.get("on_delete"),
)
def drop_fk(self, model: "Type[Model]", field: ForeignKeyFieldInstance):
to_field_name = field.to_field_instance.source_field
if not to_field_name:
to_field_name = field.to_field_instance.model_field_name
def drop_fk(self, model: "Type[Model]", field_describe: dict, reference_table_describe: dict):
db_table = model._meta.db_table
return self._DROP_FK_TEMPLATE.format(
table_name=db_table,
fk_name=self.schema_generator._generate_fk_name(
from_table=db_table,
from_field=field.source_field or field.model_field_name + "_id",
to_table=field.related_model._meta.db_table,
to_field=to_field_name,
from_field=field_describe.get("raw_field"),
to_table=reference_table_describe.get("table"),
to_field=reference_table_describe.get("pk_field").get("db_column"),
),
)
def alter_column_default(self, model: "Type[Model]", field_object: Field):
pass
def alter_column_default(self, model: "Type[Model]", field_describe: dict):
db_table = model._meta.db_table
default = self._get_default(model, field_describe)
return self._ALTER_DEFAULT_TEMPLATE.format(
table_name=db_table,
column=field_describe.get("db_column"),
default="SET" + default if default is not None else "DROP DEFAULT",
)
def alter_column_null(self, model: "Type[Model]", field_object: Field):
pass
def alter_column_null(self, model: "Type[Model]", field_describe: dict):
raise NotImplementedError
def set_comment(self, model: "Type[Model]", field_object: Field):
pass
def set_comment(self, model: "Type[Model]", field_describe: dict):
raise NotImplementedError

View File

@@ -1,6 +1,10 @@
from typing import Type
from tortoise import Model
from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator
from aerich.ddl import BaseDDL
from aerich.exceptions import NotSupportError
class MysqlDDL(BaseDDL):
@@ -8,15 +12,25 @@ class MysqlDDL(BaseDDL):
DIALECT = MySQLSchemaGenerator.DIALECT
_DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS `{table_name}`"
_ADD_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` ADD {column}"
_ALTER_DEFAULT_TEMPLATE = "ALTER TABLE `{table_name}` ALTER COLUMN `{column}` {default}"
_CHANGE_COLUMN_TEMPLATE = (
"ALTER TABLE `{table_name}` CHANGE {old_column_name} {new_column_name} {new_column_type}"
)
_DROP_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` DROP COLUMN `{column_name}`"
_RENAME_COLUMN_TEMPLATE = (
"ALTER TABLE `{table_name}` RENAME COLUMN `{old_column_name}` TO `{new_column_name}`"
)
_ADD_INDEX_TEMPLATE = (
"ALTER TABLE `{table_name}` ADD {unique} INDEX `{index_name}` ({column_names})"
"ALTER TABLE `{table_name}` ADD {unique}INDEX `{index_name}` ({column_names})"
)
_DROP_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` DROP INDEX `{index_name}`"
_ADD_FK_TEMPLATE = "ALTER TABLE `{table_name}` ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}"
_DROP_FK_TEMPLATE = "ALTER TABLE `{table_name}` DROP FOREIGN KEY `{fk_name}`"
_M2M_TABLE_TEMPLATE = "CREATE TABLE `{table_name}` (`{backward_key}` {backward_type} NOT NULL REFERENCES `{backward_table}` (`{backward_field}`) ON DELETE CASCADE,`{forward_key}` {forward_type} NOT NULL REFERENCES `{forward_table}` (`{forward_field}`) ON DELETE CASCADE){extra}{comment};"
_M2M_TABLE_TEMPLATE = "CREATE TABLE `{table_name}` (`{backward_key}` {backward_type} NOT NULL REFERENCES `{backward_table}` (`{backward_field}`) ON DELETE CASCADE,`{forward_key}` {forward_type} NOT NULL REFERENCES `{forward_table}` (`{forward_field}`) ON DELETE CASCADE){extra}{comment}"
_MODIFY_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` MODIFY COLUMN {column}"
def alter_column_null(self, model: "Type[Model]", field_describe: dict):
raise NotSupportError("Alter column null is unsupported in MySQL.")
def set_comment(self, model: "Type[Model]", field_describe: dict):
raise NotSupportError("Alter column comment is unsupported in MySQL.")

View File

@@ -1,8 +1,7 @@
from typing import List, Type
from typing import Type
from tortoise import Model
from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator
from tortoise.fields import Field
from aerich.ddl import BaseDDL
@@ -10,66 +9,36 @@ from aerich.ddl import BaseDDL
class PostgresDDL(BaseDDL):
schema_generator_cls = AsyncpgSchemaGenerator
DIALECT = AsyncpgSchemaGenerator.DIALECT
_ADD_INDEX_TEMPLATE = 'CREATE INDEX "{index_name}" ON "{table_name}" ({column_names})'
_ADD_UNIQUE_TEMPLATE = (
'ALTER TABLE "{table_name}" ADD CONSTRAINT "{index_name}" UNIQUE ({column_names})'
)
_ADD_INDEX_TEMPLATE = 'CREATE {unique}INDEX "{index_name}" ON "{table_name}" ({column_names})'
_DROP_INDEX_TEMPLATE = 'DROP INDEX "{index_name}"'
_DROP_UNIQUE_TEMPLATE = 'ALTER TABLE "{table_name}" DROP CONSTRAINT "{index_name}"'
_ALTER_DEFAULT_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {default}'
_ALTER_NULL_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {set_drop} NOT NULL'
_MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" TYPE {datatype}'
_SET_COMMENT_TEMPLATE = 'COMMENT ON COLUMN "{table_name}"."{column}" IS {comment}'
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP CONSTRAINT "{fk_name}"'
def alter_column_default(self, model: "Type[Model]", field_object: Field):
db_table = model._meta.db_table
default = self._get_default(model, field_object)
return self._ALTER_DEFAULT_TEMPLATE.format(
table_name=db_table,
column=field_object.model_field_name,
default="SET" + default if default else "DROP DEFAULT",
)
def alter_column_null(self, model: "Type[Model]", field_object: Field):
def alter_column_null(self, model: "Type[Model]", field_describe: dict):
db_table = model._meta.db_table
return self._ALTER_NULL_TEMPLATE.format(
table_name=db_table,
column=field_object.model_field_name,
set_drop="DROP" if field_object.null else "SET",
column=field_describe.get("db_column"),
set_drop="DROP" if field_describe.get("nullable") else "SET",
)
def modify_column(self, model: "Type[Model]", field_object: Field):
def modify_column(self, model: "Type[Model]", field_describe: dict, is_pk: bool = False):
db_table = model._meta.db_table
db_field_types = field_describe.get("db_field_types")
return self._MODIFY_COLUMN_TEMPLATE.format(
table_name=db_table,
column=field_object.model_field_name,
datatype=field_object.get_for_dialect(self.DIALECT, "SQL_TYPE"),
column=field_describe.get("db_column"),
datatype=db_field_types.get(self.DIALECT) or db_field_types.get(""),
)
def add_index(self, model: "Type[Model]", field_names: List[str], unique=False):
template = self._ADD_UNIQUE_TEMPLATE if unique else self._ADD_INDEX_TEMPLATE
return template.format(
index_name=self.schema_generator._generate_index_name(
"uid" if unique else "idx", model, field_names
),
table_name=model._meta.db_table,
column_names=", ".join([self.schema_generator.quote(f) for f in field_names]),
)
def drop_index(self, model: "Type[Model]", field_names: List[str], unique=False):
template = self._DROP_UNIQUE_TEMPLATE if unique else self._DROP_INDEX_TEMPLATE
return template.format(
index_name=self.schema_generator._generate_index_name(
"uid" if unique else "idx", model, field_names
),
table_name=model._meta.db_table,
)
def set_comment(self, model: "Type[Model]", field_object: Field):
def set_comment(self, model: "Type[Model]", field_describe: dict):
db_table = model._meta.db_table
return self._SET_COMMENT_TEMPLATE.format(
table_name=db_table,
column=field_object.model_field_name,
comment="'{}'".format(field_object.description) if field_object.description else "NULL",
column=field_describe.get("db_column") or field_describe.get("raw_field"),
comment="'{}'".format(field_describe.get("description"))
if field_describe.get("description")
else "NULL",
)

View File

@@ -2,7 +2,6 @@ from typing import Type
from tortoise import Model
from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator
from tortoise.fields import Field
from aerich.ddl import BaseDDL
from aerich.exceptions import NotSupportError
@@ -15,5 +14,14 @@ class SqliteDDL(BaseDDL):
def drop_column(self, model: "Type[Model]", column_name: str):
raise NotSupportError("Drop column is unsupported in SQLite.")
def modify_column(self, model: "Type[Model]", field_object: Field):
def modify_column(self, model: "Type[Model]", field_object: dict, is_pk: bool = True):
raise NotSupportError("Modify column is unsupported in SQLite.")
def alter_column_default(self, model: "Type[Model]", field_describe: dict):
raise NotSupportError("Alter column default is unsupported in SQLite.")
def alter_column_null(self, model: "Type[Model]", field_describe: dict):
raise NotSupportError("Alter column null is unsupported in SQLite.")
def set_comment(self, model: "Type[Model]", field_describe: dict):
raise NotSupportError("Alter column comment is unsupported in SQLite.")

85
aerich/inspectdb.py Normal file
View File

@@ -0,0 +1,85 @@
import sys
from typing import List, Optional
from ddlparse import DdlParse
from tortoise import BaseDBAsyncClient
class InspectDb:
_table_template = "class {table}(Model):\n"
_field_template_mapping = {
"INT": " {field} = fields.IntField({pk}{unique}{comment})",
"SMALLINT": " {field} = fields.IntField({pk}{unique}{comment})",
"TINYINT": " {field} = fields.BooleanField({null}{default}{comment})",
"VARCHAR": " {field} = fields.CharField({pk}{unique}{length}{null}{default}{comment})",
"LONGTEXT": " {field} = fields.TextField({null}{default}{comment})",
"TEXT": " {field} = fields.TextField({null}{default}{comment})",
"DATETIME": " {field} = fields.DatetimeField({null}{default}{comment})",
}
def __init__(self, conn: BaseDBAsyncClient, tables: Optional[List[str]] = None):
self.conn = conn
self.tables = tables
self.DIALECT = conn.schema_generator.DIALECT
async def show_create_tables(self):
if self.DIALECT == "mysql":
if not self.tables:
sql_tables = f"SELECT table_name FROM information_schema.tables WHERE table_schema = '{self.conn.database}';" # nosec: B608
ret = await self.conn.execute_query(sql_tables)
self.tables = map(lambda x: x["TABLE_NAME"], ret[1])
for table in self.tables:
sql_show_create_table = f"SHOW CREATE TABLE {table}"
ret = await self.conn.execute_query(sql_show_create_table)
yield ret[1][0]["Create Table"]
else:
raise NotImplementedError("Currently only support MySQL")
async def inspect(self):
ddl_list = self.show_create_tables()
result = "from tortoise import Model, fields\n\n\n"
tables = []
async for ddl in ddl_list:
parser = DdlParse(ddl, DdlParse.DATABASE.mysql)
table = parser.parse()
name = table.name.title()
columns = table.columns
fields = []
model = self._table_template.format(table=name)
for column_name, column in columns.items():
comment = default = length = unique = null = pk = ""
if column.primary_key:
pk = "pk=True, "
if column.unique:
unique = "unique=True, "
if column.data_type == "VARCHAR":
length = f"max_length={column.length}, "
if not column.not_null:
null = "null=True, "
if column.default is not None:
if column.data_type == "TINYINT":
default = f"default={'True' if column.default == '1' else 'False'}, "
elif column.data_type == "DATETIME":
if "CURRENT_TIMESTAMP" in column.default:
if "ON UPDATE CURRENT_TIMESTAMP" in ddl:
default = "auto_now_add=True, "
else:
default = "auto_now=True, "
else:
default = f"default={column.default}, "
if column.comment:
comment = f"description='{column.comment}', "
field = self._field_template_mapping[column.data_type].format(
field=column_name,
pk=pk,
unique=unique,
length=length,
null=null,
default=default,
comment=comment,
)
fields.append(field)
tables.append(model + "\n".join(fields))
sys.stdout.write(result + "\n\n\n".join(tables))

View File

@@ -1,27 +1,16 @@
import inspect
import json
import os
import re
from datetime import datetime
from importlib import import_module
from io import StringIO
from pathlib import Path
from typing import Dict, List, Optional, Tuple, Type
import click
from tortoise import (
BackwardFKRelation,
BackwardOneToOneRelation,
ForeignKeyFieldInstance,
ManyToManyFieldInstance,
Model,
Tortoise,
)
from dictdiffer import diff
from tortoise import BaseDBAsyncClient, Model, Tortoise
from tortoise.exceptions import OperationalError
from tortoise.fields import Field
from aerich.ddl import BaseDDL
from aerich.models import MAX_VERSION_LENGTH, Aerich
from aerich.utils import get_app_connection
from aerich.utils import get_app_connection, get_models_describe, write_version_file
class Migrate:
@@ -36,24 +25,23 @@ class Migrate:
_rename_new = []
ddl: BaseDDL
migrate_config: dict
old_models = "old_models"
diff_app = "diff_models"
_last_version_content: Optional[dict] = None
app: str
migrate_location: str
dialect: str
@classmethod
def get_old_model_file(cls, app: str, location: str):
return os.path.join(location, app, cls.old_models + ".py")
_db_version: Optional[str] = None
@classmethod
def get_all_version_files(cls) -> List[str]:
return sorted(
filter(lambda x: x.endswith("json"), os.listdir(cls.migrate_location)),
filter(lambda x: x.endswith("sql"), os.listdir(cls.migrate_location)),
key=lambda x: int(x.split("_")[0]),
)
@classmethod
def _get_model(cls, model: str) -> Type[Model]:
return Tortoise.apps.get(cls.app).get(model)
@classmethod
async def get_last_version(cls) -> Optional[Aerich]:
try:
@@ -62,26 +50,20 @@ class Migrate:
pass
@classmethod
def remove_old_model_file(cls, app: str, location: str):
try:
os.unlink(cls.get_old_model_file(app, location))
except FileNotFoundError:
pass
async def _get_db_version(cls, connection: BaseDBAsyncClient):
if cls.dialect == "mysql":
sql = "select version() as version"
ret = await connection.execute_query(sql)
cls._db_version = ret[1][0].get("version")
@classmethod
async def init_with_old_models(cls, config: dict, app: str, location: str):
async def init(cls, config: dict, app: str, location: str):
await Tortoise.init(config=config)
last_version = await cls.get_last_version()
cls.app = app
cls.migrate_location = os.path.join(location, app)
cls.migrate_location = Path(location, app)
if last_version:
content = last_version.content
with open(cls.get_old_model_file(app, location), "w", encoding="utf-8") as f:
f.write(content)
migrate_config = cls._get_migrate_config(config, app, location)
cls.migrate_config = migrate_config
await Tortoise.init(config=migrate_config)
cls._last_version_content = last_version.content
connection = get_app_connection(config, app)
cls.dialect = connection.schema_generator.DIALECT
@@ -97,6 +79,7 @@ class Migrate:
from aerich.ddl.postgres import PostgresDDL
cls.ddl = PostgresDDL(connection)
await cls._get_db_version(connection)
@classmethod
async def _get_last_version_num(cls):
@@ -111,8 +94,8 @@ class Migrate:
now = datetime.now().strftime("%Y%m%d%H%M%S").replace("/", "")
last_version_num = await cls._get_last_version_num()
if last_version_num is None:
return f"0_{now}_init.json"
version = f"{last_version_num + 1}_{now}_{name}.json"
return f"0_{now}_init.sql"
version = f"{last_version_num + 1}_{now}_{name}.sql"
if len(version) > MAX_VERSION_LENGTH:
raise ValueError(f"Version name exceeds maximum length ({MAX_VERSION_LENGTH})")
return version
@@ -123,13 +106,12 @@ class Migrate:
# delete if same version exists
for version_file in cls.get_all_version_files():
if version_file.startswith(version.split("_")[0]):
os.unlink(os.path.join(cls.migrate_location, version_file))
os.unlink(Path(cls.migrate_location, version_file))
content = {
"upgrade": cls.upgrade_operators,
"downgrade": cls.downgrade_operators,
}
with open(os.path.join(cls.migrate_location, version), "w", encoding="utf-8") as f:
json.dump(content, f, indent=2, ensure_ascii=False)
write_version_file(Path(cls.migrate_location, version), content)
return version
@classmethod
@@ -139,12 +121,9 @@ class Migrate:
:param name:
:return:
"""
apps = Tortoise.apps
diff_models = apps.get(cls.diff_app)
app_models = apps.get(cls.app)
cls.diff_models(diff_models, app_models)
cls.diff_models(app_models, diff_models, False)
new_version_content = get_models_describe(cls.app)
cls.diff_models(cls._last_version_content, new_version_content)
cls.diff_models(new_version_content, cls._last_version_content, False)
cls._merge_operators()
@@ -174,56 +153,7 @@ class Migrate:
cls.downgrade_operators.append(operator)
@classmethod
def _get_migrate_config(cls, config: dict, app: str, location: str):
"""
generate tmp config with old models
:param config:
:param app:
:param location:
:return:
"""
path = os.path.join(location, app, cls.old_models)
path = path.replace(os.sep, ".").lstrip(".")
config["apps"][cls.diff_app] = {
"models": [path],
"default_connection": config.get("apps").get(app).get("default_connection", "default"),
}
return config
@classmethod
def get_models_content(cls, config: dict, app: str, location: str):
"""
write new models to old models
:param config:
:param app:
:param location:
:return:
"""
old_model_files = []
models = config.get("apps").get(app).get("models")
for model in models:
module = import_module(model)
possible_models = [getattr(module, attr_name) for attr_name in dir(module)]
for attr in filter(
lambda x: inspect.isclass(x) and issubclass(x, Model) and x is not Model,
possible_models,
):
file = inspect.getfile(attr)
if file not in old_model_files:
old_model_files.append(file)
pattern = rf"(\n)?('|\")({app})(.\w+)('|\")"
str_io = StringIO()
for i, model_file in enumerate(old_model_files):
with open(model_file, "r", encoding="utf-8") as f:
content = f.read()
ret = re.sub(pattern, rf"\2{cls.diff_app}\4\5", content)
str_io.write(f"{ret}\n")
return str_io.getvalue()
@classmethod
def diff_models(
cls, old_models: Dict[str, Type[Model]], new_models: Dict[str, Type[Model]], upgrade=True
):
def diff_models(cls, old_models: Dict[str, dict], new_models: Dict[str, dict], upgrade=True):
"""
diff models and add operators
:param old_models:
@@ -231,180 +161,248 @@ class Migrate:
:param upgrade:
:return:
"""
old_models.pop(cls._aerich, None)
new_models.pop(cls._aerich, None)
_aerich = f"{cls.app}.{cls._aerich}"
old_models.pop(_aerich, None)
new_models.pop(_aerich, None)
for new_model_str, new_model_describe in new_models.items():
model = cls._get_model(new_model_describe.get("name").split(".")[1])
for new_model_str, new_model in new_models.items():
if new_model_str not in old_models.keys():
cls._add_operator(cls.add_model(new_model), upgrade)
cls._add_operator(cls.add_model(model), upgrade)
else:
cls.diff_model(old_models.get(new_model_str), new_model, upgrade)
old_model_describe = old_models.get(new_model_str)
old_unique_together = set(
map(lambda x: tuple(x), old_model_describe.get("unique_together"))
)
new_unique_together = set(
map(lambda x: tuple(x), new_model_describe.get("unique_together"))
)
old_pk_field = old_model_describe.get("pk_field")
new_pk_field = new_model_describe.get("pk_field")
# pk field
changes = diff(old_pk_field, new_pk_field)
for action, option, change in changes:
# current only support rename pk
if action == "change" and option == "name":
cls._add_operator(cls._rename_field(model, *change), upgrade)
# m2m fields
old_m2m_fields = old_model_describe.get("m2m_fields")
new_m2m_fields = new_model_describe.get("m2m_fields")
for action, option, change in diff(old_m2m_fields, new_m2m_fields):
table = change[0][1].get("through")
if action == "add":
add = False
if upgrade and table not in cls._upgrade_m2m:
cls._upgrade_m2m.append(table)
add = True
elif not upgrade and table not in cls._downgrade_m2m:
cls._downgrade_m2m.append(table)
add = True
if add:
cls._add_operator(
cls.create_m2m(
model,
change[0][1],
new_models.get(change[0][1].get("model_name")),
),
upgrade,
fk_m2m=True,
)
elif action == "remove":
add = False
if upgrade and table not in cls._upgrade_m2m:
cls._upgrade_m2m.append(table)
add = True
elif not upgrade and table not in cls._downgrade_m2m:
cls._downgrade_m2m.append(table)
add = True
if add:
cls._add_operator(cls.drop_m2m(table), upgrade, fk_m2m=True)
# add unique_together
for index in new_unique_together.difference(old_unique_together):
cls._add_operator(
cls._add_index(model, index, True),
upgrade,
)
# remove unique_together
for index in old_unique_together.difference(new_unique_together):
cls._add_operator(
cls._drop_index(model, index, True),
upgrade,
)
old_data_fields = old_model_describe.get("data_fields")
new_data_fields = new_model_describe.get("data_fields")
old_data_fields_name = list(map(lambda x: x.get("name"), old_data_fields))
new_data_fields_name = list(map(lambda x: x.get("name"), new_data_fields))
# add fields or rename fields
for new_data_field_name in set(new_data_fields_name).difference(
set(old_data_fields_name)
):
new_data_field = next(
filter(lambda x: x.get("name") == new_data_field_name, new_data_fields)
)
is_rename = False
for old_data_field in old_data_fields:
changes = list(diff(old_data_field, new_data_field))
old_data_field_name = old_data_field.get("name")
if len(changes) == 2:
# rename field
if changes[0] == (
"change",
"name",
(old_data_field_name, new_data_field_name),
) and changes[1] == (
"change",
"db_column",
(old_data_field.get("db_column"), new_data_field.get("db_column")),
):
if upgrade:
is_rename = click.prompt(
f"Rename {old_data_field_name} to {new_data_field_name}?",
default=True,
type=bool,
show_choices=True,
)
else:
is_rename = old_data_field_name in cls._rename_new
if is_rename:
cls._rename_new.append(new_data_field_name)
cls._rename_old.append(old_data_field_name)
# only MySQL8+ has rename syntax
if (
cls.dialect == "mysql"
and cls._db_version
and cls._db_version.startswith("5.")
):
cls._add_operator(
cls._change_field(
model, new_data_field, old_data_field
),
upgrade,
)
else:
cls._add_operator(
cls._rename_field(model, *changes[1][2]),
upgrade,
)
if not is_rename:
cls._add_operator(
cls._add_field(
model,
new_data_field,
),
upgrade,
)
# remove fields
for old_data_field_name in set(old_data_fields_name).difference(
set(new_data_fields_name)
):
# don't remove field if is rename
if (upgrade and old_data_field_name in cls._rename_old) or (
not upgrade and old_data_field_name in cls._rename_new
):
continue
cls._add_operator(
cls._remove_field(
model,
next(
filter(
lambda x: x.get("name") == old_data_field_name, old_data_fields
)
).get("db_column"),
),
upgrade,
)
old_fk_fields = old_model_describe.get("fk_fields")
new_fk_fields = new_model_describe.get("fk_fields")
old_fk_fields_name = list(map(lambda x: x.get("name"), old_fk_fields))
new_fk_fields_name = list(map(lambda x: x.get("name"), new_fk_fields))
# add fk
for new_fk_field_name in set(new_fk_fields_name).difference(
set(old_fk_fields_name)
):
fk_field = next(
filter(lambda x: x.get("name") == new_fk_field_name, new_fk_fields)
)
cls._add_operator(
cls._add_fk(model, fk_field, new_models.get(fk_field.get("python_type"))),
upgrade,
fk_m2m=True,
)
# drop fk
for old_fk_field_name in set(old_fk_fields_name).difference(
set(new_fk_fields_name)
):
old_fk_field = next(
filter(lambda x: x.get("name") == old_fk_field_name, old_fk_fields)
)
cls._add_operator(
cls._drop_fk(
model, old_fk_field, old_models.get(old_fk_field.get("python_type"))
),
upgrade,
fk_m2m=True,
)
# change fields
for field_name in set(new_data_fields_name).intersection(set(old_data_fields_name)):
old_data_field = next(
filter(lambda x: x.get("name") == field_name, old_data_fields)
)
new_data_field = next(
filter(lambda x: x.get("name") == field_name, new_data_fields)
)
changes = diff(old_data_field, new_data_field)
for change in changes:
_, option, old_new = change
if option == "indexed":
# change index
unique = new_data_field.get("unique")
if old_new[0] is False and old_new[1] is True:
cls._add_operator(
cls._add_index(model, (field_name,), unique),
upgrade,
)
else:
cls._add_operator(
cls._drop_index(model, (field_name,), unique),
upgrade,
)
elif option == "db_field_types.":
# change column
cls._add_operator(
cls._change_field(model, old_data_field, new_data_field),
upgrade,
)
elif option == "default":
cls._add_operator(cls._alter_default(model, new_data_field), upgrade)
for old_model in old_models:
if old_model not in new_models.keys():
cls._add_operator(cls.remove_model(old_models.get(old_model)), upgrade)
@classmethod
def _is_fk_m2m(cls, field: Field):
return isinstance(field, (ForeignKeyFieldInstance, ManyToManyFieldInstance))
cls._add_operator(cls.drop_model(old_models.get(old_model).get("table")), upgrade)
@classmethod
def add_model(cls, model: Type[Model]):
return cls.ddl.create_table(model)
@classmethod
def remove_model(cls, model: Type[Model]):
return cls.ddl.drop_table(model)
def drop_model(cls, table_name: str):
return cls.ddl.drop_table(table_name)
@classmethod
def diff_model(cls, old_model: Type[Model], new_model: Type[Model], upgrade=True):
"""
diff single model
:param old_model:
:param new_model:
:param upgrade:
:return:
"""
old_indexes = old_model._meta.indexes
new_indexes = new_model._meta.indexes
def create_m2m(cls, model: Type[Model], field_describe: dict, reference_table_describe: dict):
return cls.ddl.create_m2m(model, field_describe, reference_table_describe)
old_unique_together = old_model._meta.unique_together
new_unique_together = new_model._meta.unique_together
old_fields_map = old_model._meta.fields_map
new_fields_map = new_model._meta.fields_map
old_keys = old_fields_map.keys()
new_keys = new_fields_map.keys()
for new_key in new_keys:
new_field = new_fields_map.get(new_key)
if cls._exclude_field(new_field, upgrade):
continue
if new_key not in old_keys:
new_field_dict = new_field.describe(serializable=True)
new_field_dict.pop("name", None)
new_field_dict.pop("db_column", None)
for diff_key in old_keys - new_keys:
old_field = old_fields_map.get(diff_key)
old_field_dict = old_field.describe(serializable=True)
old_field_dict.pop("name", None)
old_field_dict.pop("db_column", None)
if old_field_dict == new_field_dict:
if upgrade:
is_rename = click.prompt(
f"Rename {diff_key} to {new_key}?",
default=True,
type=bool,
show_choices=True,
)
cls._rename_new.append(new_key)
cls._rename_old.append(diff_key)
else:
is_rename = diff_key in cls._rename_new
if is_rename:
cls._add_operator(
cls._rename_field(new_model, old_field, new_field),
upgrade,
)
break
else:
cls._add_operator(
cls._add_field(new_model, new_field),
upgrade,
cls._is_fk_m2m(new_field),
)
else:
old_field = old_fields_map.get(new_key)
new_field_dict = new_field.describe(serializable=True)
new_field_dict.pop("unique")
new_field_dict.pop("indexed")
old_field_dict = old_field.describe(serializable=True)
old_field_dict.pop("unique")
old_field_dict.pop("indexed")
if not cls._is_fk_m2m(new_field) and new_field_dict != old_field_dict:
if cls.dialect == "postgres":
if new_field.null != old_field.null:
cls._add_operator(
cls._alter_null(new_model, new_field), upgrade=upgrade
)
if new_field.default != old_field.default:
cls._add_operator(
cls._alter_default(new_model, new_field), upgrade=upgrade
)
if new_field.description != old_field.description:
cls._add_operator(
cls._set_comment(new_model, new_field), upgrade=upgrade
)
if new_field.field_type != old_field.field_type:
cls._add_operator(
cls._modify_field(new_model, new_field), upgrade=upgrade
)
else:
cls._add_operator(cls._modify_field(new_model, new_field), upgrade=upgrade)
if (old_field.index and not new_field.index) or (
old_field.unique and not new_field.unique
):
cls._add_operator(
cls._remove_index(
old_model, (old_field.model_field_name,), old_field.unique
),
upgrade,
cls._is_fk_m2m(old_field),
)
elif (new_field.index and not old_field.index) or (
new_field.unique and not old_field.unique
):
cls._add_operator(
cls._add_index(new_model, (new_field.model_field_name,), new_field.unique),
upgrade,
cls._is_fk_m2m(new_field),
)
if isinstance(new_field, ForeignKeyFieldInstance):
if old_field.db_constraint and not new_field.db_constraint:
cls._add_operator(
cls._drop_fk(new_model, new_field),
upgrade,
True,
)
if new_field.db_constraint and not old_field.db_constraint:
cls._add_operator(
cls._add_fk(new_model, new_field),
upgrade,
True,
)
for old_key in old_keys:
field = old_fields_map.get(old_key)
if old_key not in new_keys and not cls._exclude_field(field, upgrade):
if (upgrade and old_key not in cls._rename_old) or (
not upgrade and old_key not in cls._rename_new
):
cls._add_operator(
cls._remove_field(old_model, field),
upgrade,
cls._is_fk_m2m(field),
)
for new_index in new_indexes:
if new_index not in old_indexes:
cls._add_operator(
cls._add_index(
new_model,
new_index,
),
upgrade,
)
for old_index in old_indexes:
if old_index not in new_indexes:
cls._add_operator(cls._remove_index(old_model, old_index), upgrade)
for new_unique in new_unique_together:
if new_unique not in old_unique_together:
cls._add_operator(cls._add_index(new_model, new_unique, unique=True), upgrade)
for old_unique in old_unique_together:
if old_unique not in new_unique_together:
cls._add_operator(cls._remove_index(old_model, old_unique, unique=True), upgrade)
@classmethod
def drop_m2m(cls, table_name: str):
return cls.ddl.drop_m2m(table_name)
@classmethod
def _resolve_fk_fields_name(cls, model: Type[Model], fields_name: Tuple[str]):
@@ -417,7 +415,7 @@ class Migrate:
return ret
@classmethod
def _remove_index(cls, model: Type[Model], fields_name: Tuple[str], unique=False):
def _drop_index(cls, model: Type[Model], fields_name: Tuple[str], unique=False):
fields_name = cls._resolve_fk_fields_name(model, fields_name)
return cls.ddl.drop_index(model, fields_name, unique)
@@ -427,87 +425,57 @@ class Migrate:
return cls.ddl.add_index(model, fields_name, unique)
@classmethod
def _exclude_field(cls, field: Field, upgrade=False):
"""
exclude BackwardFKRelation and repeat m2m field
:param field:
:return:
"""
if isinstance(field, ManyToManyFieldInstance):
through = field.through
if upgrade:
if through in cls._upgrade_m2m:
return True
else:
cls._upgrade_m2m.append(through)
return False
else:
if through in cls._downgrade_m2m:
return True
else:
cls._downgrade_m2m.append(through)
return False
return isinstance(field, (BackwardFKRelation, BackwardOneToOneRelation))
def _add_field(cls, model: Type[Model], field_describe: dict, is_pk: bool = False):
return cls.ddl.add_column(model, field_describe, is_pk)
@classmethod
def _add_field(cls, model: Type[Model], field: Field):
if isinstance(field, ForeignKeyFieldInstance):
return cls.ddl.add_fk(model, field)
if isinstance(field, ManyToManyFieldInstance):
return cls.ddl.create_m2m_table(model, field)
return cls.ddl.add_column(model, field)
def _alter_default(cls, model: Type[Model], field_describe: dict):
return cls.ddl.alter_column_default(model, field_describe)
@classmethod
def _alter_default(cls, model: Type[Model], field: Field):
return cls.ddl.alter_column_default(model, field)
def _alter_null(cls, model: Type[Model], field_describe: dict):
return cls.ddl.alter_column_null(model, field_describe)
@classmethod
def _alter_null(cls, model: Type[Model], field: Field):
return cls.ddl.alter_column_null(model, field)
def _set_comment(cls, model: Type[Model], field_describe: dict):
return cls.ddl.set_comment(model, field_describe)
@classmethod
def _set_comment(cls, model: Type[Model], field: Field):
return cls.ddl.set_comment(model, field)
def _modify_field(cls, model: Type[Model], field_describe: dict):
return cls.ddl.modify_column(model, field_describe)
@classmethod
def _modify_field(cls, model: Type[Model], field: Field):
return cls.ddl.modify_column(model, field)
def _drop_fk(cls, model: Type[Model], field_describe: dict, reference_table_describe: dict):
return cls.ddl.drop_fk(model, field_describe, reference_table_describe)
@classmethod
def _drop_fk(cls, model: Type[Model], field: ForeignKeyFieldInstance):
return cls.ddl.drop_fk(model, field)
def _remove_field(cls, model: Type[Model], column_name: str):
return cls.ddl.drop_column(model, column_name)
@classmethod
def _remove_field(cls, model: Type[Model], field: Field):
if isinstance(field, ForeignKeyFieldInstance):
return cls.ddl.drop_fk(model, field)
if isinstance(field, ManyToManyFieldInstance):
return cls.ddl.drop_m2m(field)
return cls.ddl.drop_column(model, field.model_field_name)
def _rename_field(cls, model: Type[Model], old_field_name: str, new_field_name: str):
return cls.ddl.rename_column(model, old_field_name, new_field_name)
@classmethod
def _rename_field(cls, model: Type[Model], old_field: Field, new_field: Field):
return cls.ddl.rename_column(model, old_field.model_field_name, new_field.model_field_name)
def _change_field(cls, model: Type[Model], old_field_describe: dict, new_field_describe: dict):
db_field_types = new_field_describe.get("db_field_types")
return cls.ddl.change_column(
model,
old_field_describe.get("db_column"),
new_field_describe.get("db_column"),
db_field_types.get(cls.dialect) or db_field_types.get(""),
)
@classmethod
def _add_fk(cls, model: Type[Model], field: ForeignKeyFieldInstance):
def _add_fk(cls, model: Type[Model], field_describe: dict, reference_table_describe: dict):
"""
add fk
:param model:
:param field:
:param field_describe:
:param reference_table_describe:
:return:
"""
return cls.ddl.add_fk(model, field)
@classmethod
def _remove_fk(cls, model: Type[Model], field: ForeignKeyFieldInstance):
"""
drop fk
:param model:
:param field:
:return:
"""
return cls.ddl.drop_fk(model, field)
return cls.ddl.add_fk(model, field_describe, reference_table_describe)
@classmethod
def _merge_operators(cls):

View File

@@ -6,7 +6,7 @@ MAX_VERSION_LENGTH = 255
class Aerich(Model):
version = fields.CharField(max_length=MAX_VERSION_LENGTH)
app = fields.CharField(max_length=20)
content = fields.TextField()
content = fields.JSONField()
class Meta:
ordering = ["-id"]

View File

@@ -1,17 +1,24 @@
import importlib
from typing import Dict
from click import BadOptionUsage, Context
from tortoise import BaseDBAsyncClient, Tortoise
def get_app_connection_name(config, app) -> str:
def get_app_connection_name(config, app_name: str) -> str:
"""
get connection name
:param config:
:param app:
:param app_name:
:return:
"""
return config.get("apps").get(app).get("default_connection", "default")
app = config.get("apps").get(app_name)
if app:
return app.get("default_connection", "default")
raise BadOptionUsage(
option_name="--app",
message=f'Can\'t get app named "{app_name}"',
)
def get_app_connection(config, app) -> BaseDBAsyncClient:
@@ -49,3 +56,68 @@ def get_tortoise_config(ctx: Context, tortoise_orm: str) -> dict:
ctx=ctx,
)
return config
_UPGRADE = "-- upgrade --\n"
_DOWNGRADE = "-- downgrade --\n"
def get_version_content_from_file(version_file: str) -> Dict:
"""
get version content
:param version_file:
:return:
"""
with open(version_file, "r", encoding="utf-8") as f:
content = f.read()
first = content.index(_UPGRADE)
try:
second = content.index(_DOWNGRADE)
except ValueError:
second = len(content) - 1
upgrade_content = content[first + len(_UPGRADE) : second].strip() # noqa:E203
downgrade_content = content[second + len(_DOWNGRADE) :].strip() # noqa:E203
ret = {
"upgrade": list(filter(lambda x: x or False, upgrade_content.split(";\n"))),
"downgrade": list(filter(lambda x: x or False, downgrade_content.split(";\n"))),
}
return ret
def write_version_file(version_file: str, content: Dict):
"""
write version file
:param version_file:
:param content:
:return:
"""
with open(version_file, "w", encoding="utf-8") as f:
f.write(_UPGRADE)
upgrade = content.get("upgrade")
if len(upgrade) > 1:
f.write(";\n".join(upgrade) + ";\n")
else:
f.write(f"{upgrade[0]}")
if not upgrade[0].endswith(";"):
f.write(";")
f.write("\n")
downgrade = content.get("downgrade")
if downgrade:
f.write(_DOWNGRADE)
if len(downgrade) > 1:
f.write(";\n".join(downgrade) + ";\n")
else:
f.write(f"{downgrade[0]};\n")
def get_models_describe(app: str) -> Dict:
"""
get app models describe
:param app:
:return:
"""
ret = {}
for model in Tortoise.apps.get(app).values():
describe = model.describe()
ret[describe.get("name")] = describe
return ret

View File

@@ -20,7 +20,10 @@ tortoise_orm = {
"second": expand_db_url(db_url_second, True),
},
"apps": {
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"},
"models": {
"models": ["tests.models", "aerich.models"],
"default_connection": "default",
},
"models_second": {"models": ["tests.models_second"], "default_connection": "second"},
},
}
@@ -36,7 +39,7 @@ def reset_migrate():
Migrate._downgrade_m2m = []
@pytest.yield_fixture(scope="session")
@pytest.fixture(scope="session")
def event_loop():
policy = asyncio.get_event_loop_policy()
res = policy.new_event_loop()
@@ -51,12 +54,6 @@ def event_loop():
@pytest.fixture(scope="session", autouse=True)
async def initialize_tests(event_loop, request):
tortoise_orm["connections"]["diff_models"] = "sqlite://:memory:"
tortoise_orm["apps"]["diff_models"] = {
"models": ["tests.diff_models"],
"default_connection": "diff_models",
}
await Tortoise.init(config=tortoise_orm, _create_db=True)
await generate_schema_for_client(Tortoise.get_connection("default"), safe=True)
@@ -67,5 +64,5 @@ async def initialize_tests(event_loop, request):
Migrate.ddl = SqliteDDL(client)
elif client.schema_generator is AsyncpgSchemaGenerator:
Migrate.ddl = PostgresDDL(client)
Migrate.dialect = Migrate.ddl.DIALECT
request.addfinalizer(lambda: event_loop.run_until_complete(Tortoise._drop_databases()))

486
poetry.lock generated
View File

@@ -1,20 +1,20 @@
[[package]]
name = "aiomysql"
version = "0.0.20"
version = "0.0.21"
description = "MySQL driver for asyncio."
category = "main"
optional = true
python-versions = "*"
[package.dependencies]
PyMySQL = ">=0.9,<=0.9.2"
PyMySQL = ">=0.9,<=0.9.3"
[package.extras]
sa = ["sqlalchemy (>=1.0)"]
[[package]]
name = "aiosqlite"
version = "0.16.0"
version = "0.16.1"
description = "asyncio bridge to the standard sqlite3 module"
category = "main"
optional = false
@@ -76,16 +76,16 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>
[[package]]
name = "bandit"
version = "1.6.2"
version = "1.7.0"
description = "Security oriented static analyser for python code."
category = "dev"
optional = false
python-versions = "*"
python-versions = ">=3.5"
[package.dependencies]
colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""}
GitPython = ">=1.0.1"
PyYAML = ">=3.13"
PyYAML = ">=5.3.1"
six = ">=1.10.0"
stevedore = ">=1.20.0"
@@ -111,17 +111,6 @@ typing-extensions = ">=3.7.4"
colorama = ["colorama (>=0.4.3)"]
d = ["aiohttp (>=3.3.2)", "aiohttp-cors"]
[[package]]
name = "cffi"
version = "1.14.3"
description = "Foreign Function Interface for Python calling C code."
category = "main"
optional = true
python-versions = "*"
[package.dependencies]
pycparser = "*"
[[package]]
name = "click"
version = "7.1.2"
@@ -139,31 +128,37 @@ optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "cryptography"
version = "3.2.1"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
name = "ddlparse"
version = "1.9.0"
description = "DDL parase and Convert to BigQuery JSON schema"
category = "main"
optional = true
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
optional = false
python-versions = "*"
[package.dependencies]
cffi = ">=1.8,<1.11.3 || >1.11.3"
six = ">=1.4.1"
pyparsing = "*"
[[package]]
name = "dictdiffer"
version = "0.8.1"
description = "Dictdiffer is a library that helps you to diff and patch dictionaries."
category = "main"
optional = false
python-versions = "*"
[package.extras]
docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"]
docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"]
pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["pytest (>=3.6.0,!=3.9.0,!=3.9.1,!=3.9.2)", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"]
all = ["Sphinx (>=1.4.4)", "sphinx-rtd-theme (>=0.1.9)", "check-manifest (>=0.25)", "coverage (>=4.0)", "isort (>=4.2.2)", "mock (>=1.3.0)", "pydocstyle (>=1.0.0)", "pytest-cov (>=1.8.0)", "pytest-pep8 (>=1.0.6)", "pytest (>=2.8.0)", "tox (>=3.7.0)", "numpy (>=1.11.0)"]
docs = ["Sphinx (>=1.4.4)", "sphinx-rtd-theme (>=0.1.9)"]
numpy = ["numpy (>=1.11.0)"]
tests = ["check-manifest (>=0.25)", "coverage (>=4.0)", "isort (>=4.2.2)", "mock (>=1.3.0)", "pydocstyle (>=1.0.0)", "pytest-cov (>=1.8.0)", "pytest-pep8 (>=1.0.6)", "pytest (>=2.8.0)", "tox (>=3.7.0)"]
[[package]]
name = "execnet"
version = "1.7.1"
version = "1.8.0"
description = "execnet: rapid multi-Python deployment"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[package.dependencies]
apipkg = ">=1.4"
@@ -198,7 +193,7 @@ smmap = ">=3.0.1,<4"
[[package]]
name = "gitpython"
version = "3.1.11"
version = "3.1.12"
description = "Python Git Library"
category = "dev"
optional = false
@@ -209,18 +204,19 @@ gitdb = ">=4.0.1,<5"
[[package]]
name = "importlib-metadata"
version = "2.0.0"
version = "3.4.0"
description = "Read metadata from Python packages"
category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
python-versions = ">=3.6"
[package.dependencies]
typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
zipp = ">=0.5"
[package.extras]
docs = ["sphinx", "rst.linker"]
testing = ["packaging", "pep517", "importlib-resources (>=1.3)"]
docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"]
[[package]]
name = "iniconfig"
@@ -240,7 +236,7 @@ python-versions = "*"
[[package]]
name = "isort"
version = "5.6.4"
version = "5.7.0"
description = "A Python utility / library to sort Python imports."
category = "dev"
optional = false
@@ -269,7 +265,7 @@ python-versions = "*"
[[package]]
name = "packaging"
version = "20.4"
version = "20.9"
description = "Core utilities for Python packages"
category = "dev"
optional = false
@@ -277,7 +273,6 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[package.dependencies]
pyparsing = ">=2.0.2"
six = "*"
[[package]]
name = "pathspec"
@@ -311,7 +306,7 @@ dev = ["pre-commit", "tox"]
[[package]]
name = "py"
version = "1.9.0"
version = "1.10.0"
description = "library with cross-python path, ini-parsing, io, code, log facilities"
category = "dev"
optional = false
@@ -325,17 +320,9 @@ category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pycparser"
version = "2.20"
description = "C parser in Python"
category = "main"
optional = true
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pydantic"
version = "1.7.2"
version = "1.7.3"
description = "Data validation and settings management using python 3.6 type hinting"
category = "main"
optional = false
@@ -356,26 +343,26 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pymysql"
version = "0.9.2"
version = "0.9.3"
description = "Pure Python MySQL Driver"
category = "main"
optional = true
python-versions = "*"
[package.dependencies]
cryptography = "*"
[package.extras]
rsa = ["cryptography"]
[[package]]
name = "pyparsing"
version = "2.4.7"
description = "Python parsing module"
category = "dev"
category = "main"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "pypika"
version = "0.44.0"
version = "0.44.1"
description = "A SQL query builder API for Python"
category = "main"
optional = false
@@ -383,25 +370,24 @@ python-versions = "*"
[[package]]
name = "pytest"
version = "6.1.2"
version = "6.2.2"
description = "pytest: simple powerful testing with Python"
category = "dev"
optional = false
python-versions = ">=3.5"
python-versions = ">=3.6"
[package.dependencies]
atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
attrs = ">=17.4.0"
attrs = ">=19.2.0"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=0.12,<1.0"
pluggy = ">=0.12,<1.0.0a1"
py = ">=1.8.2"
toml = "*"
[package.extras]
checkqa_mypy = ["mypy (==0.780)"]
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
[[package]]
@@ -432,7 +418,7 @@ pytest = ">=3.10"
[[package]]
name = "pytest-mock"
version = "3.3.1"
version = "3.5.1"
description = "Thin-wrapper around the mock package for easier use with pytest"
category = "dev"
optional = false
@@ -446,7 +432,7 @@ dev = ["pre-commit", "tox", "pytest-asyncio"]
[[package]]
name = "pytest-xdist"
version = "2.1.0"
version = "2.2.0"
description = "pytest xdist plugin for distributed testing and loop-on-failing modes"
category = "dev"
optional = false
@@ -461,17 +447,25 @@ pytest-forked = "*"
psutil = ["psutil (>=3.0)"]
testing = ["filelock"]
[[package]]
name = "pytz"
version = "2020.5"
description = "World timezone definitions, modern and historical"
category = "main"
optional = false
python-versions = "*"
[[package]]
name = "pyyaml"
version = "5.3.1"
version = "5.4.1"
description = "YAML parser and emitter for Python"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
[[package]]
name = "regex"
version = "2020.10.28"
version = "2020.11.13"
description = "Alternative regular expression module, to replace re."
category = "dev"
optional = false
@@ -481,13 +475,13 @@ python-versions = "*"
name = "six"
version = "1.15.0"
description = "Python 2 and 3 compatibility utilities"
category = "main"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "smmap"
version = "3.0.4"
version = "3.0.5"
description = "A pure Python implementation of a sliding window memory map manager"
category = "dev"
optional = false
@@ -495,7 +489,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "stevedore"
version = "3.2.2"
version = "3.3.0"
description = "Manage dynamic plugins for Python applications"
category = "dev"
optional = false
@@ -515,25 +509,27 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "tortoise-orm"
version = "0.16.17"
version = "0.16.21"
description = "Easy async ORM for python, built with relations in mind"
category = "main"
optional = false
python-versions = "*"
python-versions = ">=3.7,<4.0"
[package.dependencies]
aerich = ">=0.3.2"
aiosqlite = ">=0.11.0"
iso8601 = ">=0.1.12"
pypika = ">=0.39.0"
typing-extensions = ">=3.7"
aiosqlite = ">=0.16.0,<0.17.0"
iso8601 = ">=0.1.13,<0.2.0"
pypika = ">=0.44.0,<0.45.0"
pytz = ">=2020.4,<2021.0"
[package.extras]
accel = ["python-rapidjson", "ciso8601 (>=2.1.2)", "uvloop (>=0.12.0)"]
docs = ["pygments", "cloud-sptheme", "docutils", "sphinx"]
aiomysql = ["aiomysql"]
asyncpg = ["asyncpg"]
accel = ["ciso8601 (>=2.1.2,<3.0.0)", "python-rapidjson", "uvloop (>=0.14.0,<0.15.0)"]
[[package]]
name = "typed-ast"
version = "1.4.1"
version = "1.4.2"
description = "a fork of Python 2 and 3 ast modules with type comment support"
category = "dev"
optional = false
@@ -565,16 +561,16 @@ dbdrivers = ["aiomysql", "asyncpg"]
[metadata]
lock-version = "1.1"
python-versions = "^3.7"
content-hash = "fb4af0739735ad1230da97320edab72d41278ffa78005116f8119705b998aa10"
content-hash = "a470f59db6ab1005b48fca403373bde6902451a9f4d413ba6d08ae33e6dbc4d0"
[metadata.files]
aiomysql = [
{file = "aiomysql-0.0.20-py3-none-any.whl", hash = "sha256:5fd798481f16625b424eec765c56d712ac78a51f3bd0175a3de94107aae43307"},
{file = "aiomysql-0.0.20.tar.gz", hash = "sha256:d89ce25d44dadb43cf2d9e4603bd67b7a0ad12d5e67208de013629ba648df2ba"},
{file = "aiomysql-0.0.21-py3-none-any.whl", hash = "sha256:a81a97da3dd732635926a8ea6adbbf2d1345799680bf61b5f89e730bcec88cc5"},
{file = "aiomysql-0.0.21.tar.gz", hash = "sha256:811569c0db118dd2685f0878f5cebf17a11e89a995fa14261d5fa0254113842c"},
]
aiosqlite = [
{file = "aiosqlite-0.16.0-py3-none-any.whl", hash = "sha256:0e5b8465b0b6aa7f2b0a1fa7f3af53216fcea1947f524b658bd4b4696e72f1b7"},
{file = "aiosqlite-0.16.0.tar.gz", hash = "sha256:d014ef07fbc523b2d195fc17cf35982285e3220eb73c1068d5df37b569950ea8"},
{file = "aiosqlite-0.16.1-py3-none-any.whl", hash = "sha256:1df802815bb1e08a26c06d5ea9df589bcb8eec56e5f3378103b0f9b223c6703c"},
{file = "aiosqlite-0.16.1.tar.gz", hash = "sha256:2e915463164efa65b60fd1901aceca829b6090082f03082618afca6fb9c8fdf7"},
]
apipkg = [
{file = "apipkg-1.5-py2.py3-none-any.whl", hash = "sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c"},
@@ -620,50 +616,12 @@ attrs = [
{file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"},
]
bandit = [
{file = "bandit-1.6.2-py2.py3-none-any.whl", hash = "sha256:336620e220cf2d3115877685e264477ff9d9abaeb0afe3dc7264f55fa17a3952"},
{file = "bandit-1.6.2.tar.gz", hash = "sha256:41e75315853507aa145d62a78a2a6c5e3240fe14ee7c601459d0df9418196065"},
{file = "bandit-1.7.0-py3-none-any.whl", hash = "sha256:216be4d044209fa06cf2a3e51b319769a51be8318140659719aa7a115c35ed07"},
{file = "bandit-1.7.0.tar.gz", hash = "sha256:8a4c7415254d75df8ff3c3b15cfe9042ecee628a1e40b44c15a98890fbfc2608"},
]
black = [
{file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"},
]
cffi = [
{file = "cffi-1.14.3-2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3eeeb0405fd145e714f7633a5173318bd88d8bbfc3dd0a5751f8c4f70ae629bc"},
{file = "cffi-1.14.3-2-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:cb763ceceae04803adcc4e2d80d611ef201c73da32d8f2722e9d0ab0c7f10768"},
{file = "cffi-1.14.3-2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:44f60519595eaca110f248e5017363d751b12782a6f2bd6a7041cba275215f5d"},
{file = "cffi-1.14.3-2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c53af463f4a40de78c58b8b2710ade243c81cbca641e34debf3396a9640d6ec1"},
{file = "cffi-1.14.3-2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:33c6cdc071ba5cd6d96769c8969a0531be2d08c2628a0143a10a7dcffa9719ca"},
{file = "cffi-1.14.3-2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c11579638288e53fc94ad60022ff1b67865363e730ee41ad5e6f0a17188b327a"},
{file = "cffi-1.14.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:3cb3e1b9ec43256c4e0f8d2837267a70b0e1ca8c4f456685508ae6106b1f504c"},
{file = "cffi-1.14.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:f0620511387790860b249b9241c2f13c3a80e21a73e0b861a2df24e9d6f56730"},
{file = "cffi-1.14.3-cp27-cp27m-win32.whl", hash = "sha256:005f2bfe11b6745d726dbb07ace4d53f057de66e336ff92d61b8c7e9c8f4777d"},
{file = "cffi-1.14.3-cp27-cp27m-win_amd64.whl", hash = "sha256:2f9674623ca39c9ebe38afa3da402e9326c245f0f5ceff0623dccdac15023e05"},
{file = "cffi-1.14.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:09e96138280241bd355cd585148dec04dbbedb4f46128f340d696eaafc82dd7b"},
{file = "cffi-1.14.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:3363e77a6176afb8823b6e06db78c46dbc4c7813b00a41300a4873b6ba63b171"},
{file = "cffi-1.14.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0ef488305fdce2580c8b2708f22d7785ae222d9825d3094ab073e22e93dfe51f"},
{file = "cffi-1.14.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:0b1ad452cc824665ddc682400b62c9e4f5b64736a2ba99110712fdee5f2505c4"},
{file = "cffi-1.14.3-cp35-cp35m-win32.whl", hash = "sha256:85ba797e1de5b48aa5a8427b6ba62cf69607c18c5d4eb747604b7302f1ec382d"},
{file = "cffi-1.14.3-cp35-cp35m-win_amd64.whl", hash = "sha256:e66399cf0fc07de4dce4f588fc25bfe84a6d1285cc544e67987d22663393926d"},
{file = "cffi-1.14.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:15f351bed09897fbda218e4db5a3d5c06328862f6198d4fb385f3e14e19decb3"},
{file = "cffi-1.14.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4d7c26bfc1ea9f92084a1d75e11999e97b62d63128bcc90c3624d07813c52808"},
{file = "cffi-1.14.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:23e5d2040367322824605bc29ae8ee9175200b92cb5483ac7d466927a9b3d537"},
{file = "cffi-1.14.3-cp36-cp36m-win32.whl", hash = "sha256:a624fae282e81ad2e4871bdb767e2c914d0539708c0f078b5b355258293c98b0"},
{file = "cffi-1.14.3-cp36-cp36m-win_amd64.whl", hash = "sha256:de31b5164d44ef4943db155b3e8e17929707cac1e5bd2f363e67a56e3af4af6e"},
{file = "cffi-1.14.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:f92cdecb618e5fa4658aeb97d5eb3d2f47aa94ac6477c6daf0f306c5a3b9e6b1"},
{file = "cffi-1.14.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:22399ff4870fb4c7ef19fff6eeb20a8bbf15571913c181c78cb361024d574579"},
{file = "cffi-1.14.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:f4eae045e6ab2bb54ca279733fe4eb85f1effda392666308250714e01907f394"},
{file = "cffi-1.14.3-cp37-cp37m-win32.whl", hash = "sha256:b0358e6fefc74a16f745afa366acc89f979040e0cbc4eec55ab26ad1f6a9bfbc"},
{file = "cffi-1.14.3-cp37-cp37m-win_amd64.whl", hash = "sha256:6642f15ad963b5092d65aed022d033c77763515fdc07095208f15d3563003869"},
{file = "cffi-1.14.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:2791f68edc5749024b4722500e86303a10d342527e1e3bcac47f35fbd25b764e"},
{file = "cffi-1.14.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:529c4ed2e10437c205f38f3691a68be66c39197d01062618c55f74294a4a4828"},
{file = "cffi-1.14.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8f0f1e499e4000c4c347a124fa6a27d37608ced4fe9f7d45070563b7c4c370c9"},
{file = "cffi-1.14.3-cp38-cp38-win32.whl", hash = "sha256:3b8eaf915ddc0709779889c472e553f0d3e8b7bdf62dab764c8921b09bf94522"},
{file = "cffi-1.14.3-cp38-cp38-win_amd64.whl", hash = "sha256:bbd2f4dfee1079f76943767fce837ade3087b578aeb9f69aec7857d5bf25db15"},
{file = "cffi-1.14.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:cc75f58cdaf043fe6a7a6c04b3b5a0e694c6a9e24050967747251fb80d7bce0d"},
{file = "cffi-1.14.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:bf39a9e19ce7298f1bd6a9758fa99707e9e5b1ebe5e90f2c3913a47bc548747c"},
{file = "cffi-1.14.3-cp39-cp39-win32.whl", hash = "sha256:d80998ed59176e8cba74028762fbd9b9153b9afc71ea118e63bbf5d4d0f9552b"},
{file = "cffi-1.14.3-cp39-cp39-win_amd64.whl", hash = "sha256:c150eaa3dadbb2b5339675b88d4573c1be3cb6f2c33a6c83387e10cc0bf05bd3"},
{file = "cffi-1.14.3.tar.gz", hash = "sha256:f92f789e4f9241cd262ad7a555ca2c648a98178a953af117ef7fad46aa1d5591"},
]
click = [
{file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"},
{file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"},
@@ -672,33 +630,17 @@ colorama = [
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
]
cryptography = [
{file = "cryptography-3.2.1-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:6dc59630ecce8c1f558277ceb212c751d6730bd12c80ea96b4ac65637c4f55e7"},
{file = "cryptography-3.2.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:75e8e6684cf0034f6bf2a97095cb95f81537b12b36a8fedf06e73050bb171c2d"},
{file = "cryptography-3.2.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4e7268a0ca14536fecfdf2b00297d4e407da904718658c1ff1961c713f90fd33"},
{file = "cryptography-3.2.1-cp27-cp27m-win32.whl", hash = "sha256:7117319b44ed1842c617d0a452383a5a052ec6aa726dfbaffa8b94c910444297"},
{file = "cryptography-3.2.1-cp27-cp27m-win_amd64.whl", hash = "sha256:a733671100cd26d816eed39507e585c156e4498293a907029969234e5e634bc4"},
{file = "cryptography-3.2.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:a75f306a16d9f9afebfbedc41c8c2351d8e61e818ba6b4c40815e2b5740bb6b8"},
{file = "cryptography-3.2.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:5849d59358547bf789ee7e0d7a9036b2d29e9a4ddf1ce5e06bb45634f995c53e"},
{file = "cryptography-3.2.1-cp35-abi3-macosx_10_10_x86_64.whl", hash = "sha256:bd717aa029217b8ef94a7d21632a3bb5a4e7218a4513d2521c2a2fd63011e98b"},
{file = "cryptography-3.2.1-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:efe15aca4f64f3a7ea0c09c87826490e50ed166ce67368a68f315ea0807a20df"},
{file = "cryptography-3.2.1-cp35-abi3-manylinux2010_x86_64.whl", hash = "sha256:32434673d8505b42c0de4de86da8c1620651abd24afe91ae0335597683ed1b77"},
{file = "cryptography-3.2.1-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:7b8d9d8d3a9bd240f453342981f765346c87ade811519f98664519696f8e6ab7"},
{file = "cryptography-3.2.1-cp35-cp35m-win32.whl", hash = "sha256:d3545829ab42a66b84a9aaabf216a4dce7f16dbc76eb69be5c302ed6b8f4a29b"},
{file = "cryptography-3.2.1-cp35-cp35m-win_amd64.whl", hash = "sha256:a4e27ed0b2504195f855b52052eadcc9795c59909c9d84314c5408687f933fc7"},
{file = "cryptography-3.2.1-cp36-abi3-win32.whl", hash = "sha256:13b88a0bd044b4eae1ef40e265d006e34dbcde0c2f1e15eb9896501b2d8f6c6f"},
{file = "cryptography-3.2.1-cp36-abi3-win_amd64.whl", hash = "sha256:07ca431b788249af92764e3be9a488aa1d39a0bc3be313d826bbec690417e538"},
{file = "cryptography-3.2.1-cp36-cp36m-win32.whl", hash = "sha256:a035a10686532b0587d58a606004aa20ad895c60c4d029afa245802347fab57b"},
{file = "cryptography-3.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:d26a2557d8f9122f9bf445fc7034242f4375bd4e95ecda007667540270965b13"},
{file = "cryptography-3.2.1-cp37-cp37m-win32.whl", hash = "sha256:545a8550782dda68f8cdc75a6e3bf252017aa8f75f19f5a9ca940772fc0cb56e"},
{file = "cryptography-3.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:55d0b896631412b6f0c7de56e12eb3e261ac347fbaa5d5e705291a9016e5f8cb"},
{file = "cryptography-3.2.1-cp38-cp38-win32.whl", hash = "sha256:3cd75a683b15576cfc822c7c5742b3276e50b21a06672dc3a800a2d5da4ecd1b"},
{file = "cryptography-3.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:d25cecbac20713a7c3bc544372d42d8eafa89799f492a43b79e1dfd650484851"},
{file = "cryptography-3.2.1.tar.gz", hash = "sha256:d3d5e10be0cf2a12214ddee45c6bd203dab435e3d83b4560c03066eda600bfe3"},
ddlparse = [
{file = "ddlparse-1.9.0-py3-none-any.whl", hash = "sha256:a7962615a9325be7d0f182cbe34011e6283996473fb98c784c6f675b9783bc18"},
{file = "ddlparse-1.9.0.tar.gz", hash = "sha256:cdffcf2f692f304a23c8e903b00afd7e83a920b79a2ff4e2f25c875b369d4f58"},
]
dictdiffer = [
{file = "dictdiffer-0.8.1-py2.py3-none-any.whl", hash = "sha256:d79d9a39e459fe33497c858470ca0d2e93cb96621751de06d631856adfd9c390"},
{file = "dictdiffer-0.8.1.tar.gz", hash = "sha256:1adec0d67cdf6166bda96ae2934ddb5e54433998ceab63c984574d187cc563d2"},
]
execnet = [
{file = "execnet-1.7.1-py2.py3-none-any.whl", hash = "sha256:d4efd397930c46415f62f8a31388d6be4f27a91d7550eb79bc64a756e0056547"},
{file = "execnet-1.7.1.tar.gz", hash = "sha256:cacb9df31c9680ec5f95553976c4da484d407e85e41c83cb812aa014f0eddc50"},
{file = "execnet-1.8.0-py2.py3-none-any.whl", hash = "sha256:7a13113028b1e1cc4c6492b28098b3c6576c9dccc7973bfe47b342afadafb2ac"},
{file = "execnet-1.8.0.tar.gz", hash = "sha256:b73c5565e517f24b62dea8a5ceac178c661c4309d3aa0c3e420856c072c411b4"},
]
flake8 = [
{file = "flake8-3.8.4-py2.py3-none-any.whl", hash = "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839"},
@@ -709,12 +651,12 @@ gitdb = [
{file = "gitdb-4.0.5.tar.gz", hash = "sha256:c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9"},
]
gitpython = [
{file = "GitPython-3.1.11-py3-none-any.whl", hash = "sha256:6eea89b655917b500437e9668e4a12eabdcf00229a0df1762aabd692ef9b746b"},
{file = "GitPython-3.1.11.tar.gz", hash = "sha256:befa4d101f91bad1b632df4308ec64555db684c360bd7d2130b4807d49ce86b8"},
{file = "GitPython-3.1.12-py3-none-any.whl", hash = "sha256:867ec3dfb126aac0f8296b19fb63b8c4a399f32b4b6fafe84c4b10af5fa9f7b5"},
{file = "GitPython-3.1.12.tar.gz", hash = "sha256:42dbefd8d9e2576c496ed0059f3103dcef7125b9ce16f9d5f9c834aed44a1dac"},
]
importlib-metadata = [
{file = "importlib_metadata-2.0.0-py2.py3-none-any.whl", hash = "sha256:cefa1a2f919b866c5beb7c9f7b0ebb4061f30a8a9bf16d609b000e2dfaceb9c3"},
{file = "importlib_metadata-2.0.0.tar.gz", hash = "sha256:77a540690e24b0305878c37ffd421785a6f7e53c8b5720d211b211de8d0e95da"},
{file = "importlib_metadata-3.4.0-py3-none-any.whl", hash = "sha256:ace61d5fc652dc280e7b6b4ff732a9c2d40db2c0f92bc6cb74e07b73d53a1771"},
{file = "importlib_metadata-3.4.0.tar.gz", hash = "sha256:fa5daa4477a7414ae34e95942e4dd07f62adf589143c875c133c1e53c4eff38d"},
]
iniconfig = [
{file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
@@ -726,8 +668,8 @@ iso8601 = [
{file = "iso8601-0.1.13.tar.gz", hash = "sha256:f7dec22af52025d4526be94cc1303c7d8f5379b746a3f54a8c8446384392eeb1"},
]
isort = [
{file = "isort-5.6.4-py3-none-any.whl", hash = "sha256:dcab1d98b469a12a1a624ead220584391648790275560e1a43e54c5dceae65e7"},
{file = "isort-5.6.4.tar.gz", hash = "sha256:dcaeec1b5f0eca77faea2a35ab790b4f3680ff75590bfcb7145986905aab2f58"},
{file = "isort-5.7.0-py3-none-any.whl", hash = "sha256:fff4f0c04e1825522ce6949973e83110a6e907750cd92d128b0d14aaaadbffdc"},
{file = "isort-5.7.0.tar.gz", hash = "sha256:c729845434366216d320e936b8ad6f9d681aab72dc7cbc2d51bedc3582f3ad1e"},
]
mccabe = [
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
@@ -738,8 +680,8 @@ mypy-extensions = [
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
]
packaging = [
{file = "packaging-20.4-py2.py3-none-any.whl", hash = "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"},
{file = "packaging-20.4.tar.gz", hash = "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8"},
{file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"},
{file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"},
]
pathspec = [
{file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"},
@@ -754,59 +696,55 @@ pluggy = [
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
]
py = [
{file = "py-1.9.0-py2.py3-none-any.whl", hash = "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2"},
{file = "py-1.9.0.tar.gz", hash = "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"},
{file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"},
{file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"},
]
pycodestyle = [
{file = "pycodestyle-2.6.0-py2.py3-none-any.whl", hash = "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367"},
{file = "pycodestyle-2.6.0.tar.gz", hash = "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"},
]
pycparser = [
{file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"},
{file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"},
]
pydantic = [
{file = "pydantic-1.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dfaa6ed1d509b5aef4142084206584280bb6e9014f01df931ec6febdad5b200a"},
{file = "pydantic-1.7.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:2182ba2a9290964b278bcc07a8d24207de709125d520efec9ad6fa6f92ee058d"},
{file = "pydantic-1.7.2-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:0fe8b45d31ae53d74a6aa0bf801587bd49970070eac6a6326f9fa2a302703b8a"},
{file = "pydantic-1.7.2-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:01f0291f4951580f320f7ae3f2ecaf0044cdebcc9b45c5f882a7e84453362420"},
{file = "pydantic-1.7.2-cp36-cp36m-win_amd64.whl", hash = "sha256:4ba6b903e1b7bd3eb5df0e78d7364b7e831ed8b4cd781ebc3c4f1077fbcb72a4"},
{file = "pydantic-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b11fc9530bf0698c8014b2bdb3bbc50243e82a7fa2577c8cfba660bcc819e768"},
{file = "pydantic-1.7.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a3c274c49930dc047a75ecc865e435f3df89715c775db75ddb0186804d9b04d0"},
{file = "pydantic-1.7.2-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c68b5edf4da53c98bb1ccb556ae8f655575cb2e676aef066c12b08c724a3f1a1"},
{file = "pydantic-1.7.2-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:95d4410c4e429480c736bba0db6cce5aaa311304aea685ebcf9ee47571bfd7c8"},
{file = "pydantic-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a2fc7bf77ed4a7a961d7684afe177ff59971828141e608f142e4af858e07dddc"},
{file = "pydantic-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9572c0db13c8658b4a4cb705dcaae6983aeb9842248b36761b3fbc9010b740f"},
{file = "pydantic-1.7.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:f83f679e727742b0c465e7ef992d6da4a7e5268b8edd8fdaf5303276374bef52"},
{file = "pydantic-1.7.2-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:e5fece30e80087d9b7986104e2ac150647ec1658c4789c89893b03b100ca3164"},
{file = "pydantic-1.7.2-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce2d452961352ba229fe1e0b925b41c0c37128f08dddb788d0fd73fd87ea0f66"},
{file = "pydantic-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:fc21a37ff3f545de80b166e1735c4172b41b017948a3fb2d5e2f03c219eac50a"},
{file = "pydantic-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c9760d1556ec59ff745f88269a8f357e2b7afc75c556b3a87b8dda5bc62da8ba"},
{file = "pydantic-1.7.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c1673633ad1eea78b1c5c420a47cd48717d2ef214c8230d96ca2591e9e00958"},
{file = "pydantic-1.7.2-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:388c0c26c574ff49bad7d0fd6ed82fbccd86a0473fa3900397d3354c533d6ebb"},
{file = "pydantic-1.7.2-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ab1d5e4d8de00575957e1c982b951bffaedd3204ddd24694e3baca3332e53a23"},
{file = "pydantic-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:f045cf7afb3352a03bc6cb993578a34560ac24c5d004fa33c76efec6ada1361a"},
{file = "pydantic-1.7.2-py3-none-any.whl", hash = "sha256:6665f7ab7fbbf4d3c1040925ff4d42d7549a8c15fe041164adfe4fc2134d4cce"},
{file = "pydantic-1.7.2.tar.gz", hash = "sha256:c8200aecbd1fb914e1bd061d71a4d1d79ecb553165296af0c14989b89e90d09b"},
{file = "pydantic-1.7.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c59ea046aea25be14dc22d69c97bee629e6d48d2b2ecb724d7fe8806bf5f61cd"},
{file = "pydantic-1.7.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a4143c8d0c456a093387b96e0f5ee941a950992904d88bc816b4f0e72c9a0009"},
{file = "pydantic-1.7.3-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:d8df4b9090b595511906fa48deda47af04e7d092318bfb291f4d45dfb6bb2127"},
{file = "pydantic-1.7.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:514b473d264671a5c672dfb28bdfe1bf1afd390f6b206aa2ec9fed7fc592c48e"},
{file = "pydantic-1.7.3-cp36-cp36m-win_amd64.whl", hash = "sha256:dba5c1f0a3aeea5083e75db9660935da90216f8a81b6d68e67f54e135ed5eb23"},
{file = "pydantic-1.7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:59e45f3b694b05a69032a0d603c32d453a23f0de80844fb14d55ab0c6c78ff2f"},
{file = "pydantic-1.7.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:5b24e8a572e4b4c18f614004dda8c9f2c07328cb5b6e314d6e1bbd536cb1a6c1"},
{file = "pydantic-1.7.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:b2b054d095b6431cdda2f852a6d2f0fdec77686b305c57961b4c5dd6d863bf3c"},
{file = "pydantic-1.7.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:025bf13ce27990acc059d0c5be46f416fc9b293f45363b3d19855165fee1874f"},
{file = "pydantic-1.7.3-cp37-cp37m-win_amd64.whl", hash = "sha256:6e3874aa7e8babd37b40c4504e3a94cc2023696ced5a0500949f3347664ff8e2"},
{file = "pydantic-1.7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e682f6442ebe4e50cb5e1cfde7dda6766fb586631c3e5569f6aa1951fd1a76ef"},
{file = "pydantic-1.7.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:185e18134bec5ef43351149fe34fda4758e53d05bb8ea4d5928f0720997b79ef"},
{file = "pydantic-1.7.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:f5b06f5099e163295b8ff5b1b71132ecf5866cc6e7f586d78d7d3fd6e8084608"},
{file = "pydantic-1.7.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:24ca47365be2a5a3cc3f4a26dcc755bcdc9f0036f55dcedbd55663662ba145ec"},
{file = "pydantic-1.7.3-cp38-cp38-win_amd64.whl", hash = "sha256:d1fe3f0df8ac0f3a9792666c69a7cd70530f329036426d06b4f899c025aca74e"},
{file = "pydantic-1.7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f6864844b039805add62ebe8a8c676286340ba0c6d043ae5dea24114b82a319e"},
{file = "pydantic-1.7.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ecb54491f98544c12c66ff3d15e701612fc388161fd455242447083350904730"},
{file = "pydantic-1.7.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:ffd180ebd5dd2a9ac0da4e8b995c9c99e7c74c31f985ba090ee01d681b1c4b95"},
{file = "pydantic-1.7.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8d72e814c7821125b16f1553124d12faba88e85405b0864328899aceaad7282b"},
{file = "pydantic-1.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:475f2fa134cf272d6631072554f845d0630907fce053926ff634cc6bc45bf1af"},
{file = "pydantic-1.7.3-py3-none-any.whl", hash = "sha256:38be427ea01a78206bcaf9a56f835784afcba9e5b88fbdce33bbbfbcd7841229"},
{file = "pydantic-1.7.3.tar.gz", hash = "sha256:213125b7e9e64713d16d988d10997dabc6a1f73f3991e1ff8e35ebb1409c7dc9"},
]
pyflakes = [
{file = "pyflakes-2.2.0-py2.py3-none-any.whl", hash = "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92"},
{file = "pyflakes-2.2.0.tar.gz", hash = "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"},
]
pymysql = [
{file = "PyMySQL-0.9.2-py2.py3-none-any.whl", hash = "sha256:95f057328357e0e13a30e67857a8c694878b0175797a9a203ee7adbfb9b1ec5f"},
{file = "PyMySQL-0.9.2.tar.gz", hash = "sha256:9ec760cbb251c158c19d6c88c17ca00a8632bac713890e465b2be01fdc30713f"},
{file = "PyMySQL-0.9.3-py2.py3-none-any.whl", hash = "sha256:3943fbbbc1e902f41daf7f9165519f140c4451c179380677e6a848587042561a"},
{file = "PyMySQL-0.9.3.tar.gz", hash = "sha256:d8c059dcd81dedb85a9f034d5e22dcb4442c0b201908bede99e306d65ea7c8e7"},
]
pyparsing = [
{file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
{file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
]
pypika = [
{file = "pypika-0.44.0.tar.gz", hash = "sha256:3d49f12d2b9f08054d8bedf7ce6e8e900ac3dd8274fc23746ece9d124e29b7e5"},
{file = "pypika-0.44.1.tar.gz", hash = "sha256:316839144d3ad7656405a10cdd26d2181f16bb8ff7e256d616ffb50335ca1fcb"},
]
pytest = [
{file = "pytest-6.1.2-py3-none-any.whl", hash = "sha256:4288fed0d9153d9646bfcdf0c0428197dba1ecb27a33bb6e031d002fa88653fe"},
{file = "pytest-6.1.2.tar.gz", hash = "sha256:c0a7e94a8cdbc5422a51ccdad8e6f1024795939cc89159a0ae7f0b316ad3823e"},
{file = "pytest-6.2.2-py3-none-any.whl", hash = "sha256:b574b57423e818210672e07ca1fa90aaf194a4f63f3ab909a2c67ebb22913839"},
{file = "pytest-6.2.2.tar.gz", hash = "sha256:9d1edf9e7d0b84d72ea3dbcdfd22b35fb543a5e8f2a60092dd578936bf63d7f9"},
]
pytest-asyncio = [
{file = "pytest-asyncio-0.14.0.tar.gz", hash = "sha256:9882c0c6b24429449f5f969a5158b528f39bde47dc32e85b9f0403965017e700"},
@@ -817,96 +755,134 @@ pytest-forked = [
{file = "pytest_forked-1.3.0-py2.py3-none-any.whl", hash = "sha256:dc4147784048e70ef5d437951728825a131b81714b398d5d52f17c7c144d8815"},
]
pytest-mock = [
{file = "pytest-mock-3.3.1.tar.gz", hash = "sha256:a4d6d37329e4a893e77d9ffa89e838dd2b45d5dc099984cf03c703ac8411bb82"},
{file = "pytest_mock-3.3.1-py3-none-any.whl", hash = "sha256:024e405ad382646318c4281948aadf6fe1135632bea9cc67366ea0c4098ef5f2"},
{file = "pytest-mock-3.5.1.tar.gz", hash = "sha256:a1e2aba6af9560d313c642dae7e00a2a12b022b80301d9d7fc8ec6858e1dd9fc"},
{file = "pytest_mock-3.5.1-py3-none-any.whl", hash = "sha256:379b391cfad22422ea2e252bdfc008edd08509029bcde3c25b2c0bd741e0424e"},
]
pytest-xdist = [
{file = "pytest-xdist-2.1.0.tar.gz", hash = "sha256:82d938f1a24186520e2d9d3a64ef7d9ac7ecdf1a0659e095d18e596b8cbd0672"},
{file = "pytest_xdist-2.1.0-py3-none-any.whl", hash = "sha256:7c629016b3bb006b88ac68e2b31551e7becf173c76b977768848e2bbed594d90"},
{file = "pytest-xdist-2.2.0.tar.gz", hash = "sha256:1d8edbb1a45e8e1f8e44b1260583107fc23f8bc8da6d18cb331ff61d41258ecf"},
{file = "pytest_xdist-2.2.0-py3-none-any.whl", hash = "sha256:f127e11e84ad37cc1de1088cb2990f3c354630d428af3f71282de589c5bb779b"},
]
pytz = [
{file = "pytz-2020.5-py2.py3-none-any.whl", hash = "sha256:16962c5fb8db4a8f63a26646d8886e9d769b6c511543557bc84e9569fb9a9cb4"},
{file = "pytz-2020.5.tar.gz", hash = "sha256:180befebb1927b16f6b57101720075a984c019ac16b1b7575673bea42c6c3da5"},
]
pyyaml = [
{file = "PyYAML-5.3.1-cp27-cp27m-win32.whl", hash = "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f"},
{file = "PyYAML-5.3.1-cp27-cp27m-win_amd64.whl", hash = "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76"},
{file = "PyYAML-5.3.1-cp35-cp35m-win32.whl", hash = "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2"},
{file = "PyYAML-5.3.1-cp35-cp35m-win_amd64.whl", hash = "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c"},
{file = "PyYAML-5.3.1-cp36-cp36m-win32.whl", hash = "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2"},
{file = "PyYAML-5.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648"},
{file = "PyYAML-5.3.1-cp37-cp37m-win32.whl", hash = "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"},
{file = "PyYAML-5.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf"},
{file = "PyYAML-5.3.1-cp38-cp38-win32.whl", hash = "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97"},
{file = "PyYAML-5.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee"},
{file = "PyYAML-5.3.1.tar.gz", hash = "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"},
{file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"},
{file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"},
{file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"},
{file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"},
{file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"},
{file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"},
{file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"},
{file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"},
{file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"},
{file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"},
{file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"},
{file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"},
{file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"},
{file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"},
{file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"},
{file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"},
{file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"},
{file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"},
{file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"},
{file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"},
{file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"},
]
regex = [
{file = "regex-2020.10.28-cp27-cp27m-win32.whl", hash = "sha256:4b5a9bcb56cc146c3932c648603b24514447eafa6ce9295234767bf92f69b504"},
{file = "regex-2020.10.28-cp27-cp27m-win_amd64.whl", hash = "sha256:c13d311a4c4a8d671f5860317eb5f09591fbe8259676b86a85769423b544451e"},
{file = "regex-2020.10.28-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c8a2b7ccff330ae4c460aff36626f911f918555660cc28163417cb84ffb25789"},
{file = "regex-2020.10.28-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4afa350f162551cf402bfa3cd8302165c8e03e689c897d185f16a167328cc6dd"},
{file = "regex-2020.10.28-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:b88fa3b8a3469f22b4f13d045d9bd3eda797aa4e406fde0a2644bc92bbdd4bdd"},
{file = "regex-2020.10.28-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:f43109822df2d3faac7aad79613f5f02e4eab0fc8ad7932d2e70e2a83bd49c26"},
{file = "regex-2020.10.28-cp36-cp36m-win32.whl", hash = "sha256:8092a5a06ad9a7a247f2a76ace121183dc4e1a84c259cf9c2ce3bbb69fac3582"},
{file = "regex-2020.10.28-cp36-cp36m-win_amd64.whl", hash = "sha256:49461446b783945597c4076aea3f49aee4b4ce922bd241e4fcf62a3e7c61794c"},
{file = "regex-2020.10.28-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:8ca9dca965bd86ea3631b975d63b0693566d3cc347e55786d5514988b6f5b84c"},
{file = "regex-2020.10.28-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ea37320877d56a7f0a1e6a625d892cf963aa7f570013499f5b8d5ab8402b5625"},
{file = "regex-2020.10.28-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:3a5f08039eee9ea195a89e180c5762bfb55258bfb9abb61a20d3abee3b37fd12"},
{file = "regex-2020.10.28-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:cb905f3d2e290a8b8f1579d3984f2cfa7c3a29cc7cba608540ceeed18513f520"},
{file = "regex-2020.10.28-cp37-cp37m-win32.whl", hash = "sha256:a62162be05edf64f819925ea88d09d18b09bebf20971b363ce0c24e8b4aa14c0"},
{file = "regex-2020.10.28-cp37-cp37m-win_amd64.whl", hash = "sha256:03855ee22980c3e4863dc84c42d6d2901133362db5daf4c36b710dd895d78f0a"},
{file = "regex-2020.10.28-cp38-cp38-manylinux1_i686.whl", hash = "sha256:625116aca6c4b57c56ea3d70369cacc4d62fead4930f8329d242e4fe7a58ce4b"},
{file = "regex-2020.10.28-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2dc522e25e57e88b4980d2bdd334825dbf6fa55f28a922fc3bfa60cc09e5ef53"},
{file = "regex-2020.10.28-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:119e0355dbdd4cf593b17f2fc5dbd4aec2b8899d0057e4957ba92f941f704bf5"},
{file = "regex-2020.10.28-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:cfcf28ed4ce9ced47b9b9670a4f0d3d3c0e4d4779ad4dadb1ad468b097f808aa"},
{file = "regex-2020.10.28-cp38-cp38-win32.whl", hash = "sha256:06b52815d4ad38d6524666e0d50fe9173533c9cc145a5779b89733284e6f688f"},
{file = "regex-2020.10.28-cp38-cp38-win_amd64.whl", hash = "sha256:c3466a84fce42c2016113101018a9981804097bacbab029c2d5b4fcb224b89de"},
{file = "regex-2020.10.28-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c2c6c56ee97485a127555c9595c069201b5161de9d05495fbe2132b5ac104786"},
{file = "regex-2020.10.28-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1ec66700a10e3c75f1f92cbde36cca0d3aaee4c73dfa26699495a3a30b09093c"},
{file = "regex-2020.10.28-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:11116d424734fe356d8777f89d625f0df783251ada95d6261b4c36ad27a394bb"},
{file = "regex-2020.10.28-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f1fce1e4929157b2afeb4bb7069204d4370bab9f4fc03ca1fbec8bd601f8c87d"},
{file = "regex-2020.10.28-cp39-cp39-win32.whl", hash = "sha256:832339223b9ce56b7b15168e691ae654d345ac1635eeb367ade9ecfe0e66bee0"},
{file = "regex-2020.10.28-cp39-cp39-win_amd64.whl", hash = "sha256:654c1635f2313d0843028487db2191530bca45af61ca85d0b16555c399625b0e"},
{file = "regex-2020.10.28.tar.gz", hash = "sha256:dd3e6547ecf842a29cf25123fbf8d2461c53c8d37aa20d87ecee130c89b7079b"},
{file = "regex-2020.11.13-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8b882a78c320478b12ff024e81dc7d43c1462aa4a3341c754ee65d857a521f85"},
{file = "regex-2020.11.13-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a63f1a07932c9686d2d416fb295ec2c01ab246e89b4d58e5fa468089cab44b70"},
{file = "regex-2020.11.13-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:6e4b08c6f8daca7d8f07c8d24e4331ae7953333dbd09c648ed6ebd24db5a10ee"},
{file = "regex-2020.11.13-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bba349276b126947b014e50ab3316c027cac1495992f10e5682dc677b3dfa0c5"},
{file = "regex-2020.11.13-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:56e01daca75eae420bce184edd8bb341c8eebb19dd3bce7266332258f9fb9dd7"},
{file = "regex-2020.11.13-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:6a8ce43923c518c24a2579fda49f093f1397dad5d18346211e46f134fc624e31"},
{file = "regex-2020.11.13-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab79fcb02b930de09c76d024d279686ec5d532eb814fd0ed1e0051eb8bd2daa"},
{file = "regex-2020.11.13-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:9801c4c1d9ae6a70aeb2128e5b4b68c45d4f0af0d1535500884d644fa9b768c6"},
{file = "regex-2020.11.13-cp36-cp36m-win32.whl", hash = "sha256:49cae022fa13f09be91b2c880e58e14b6da5d10639ed45ca69b85faf039f7a4e"},
{file = "regex-2020.11.13-cp36-cp36m-win_amd64.whl", hash = "sha256:749078d1eb89484db5f34b4012092ad14b327944ee7f1c4f74d6279a6e4d1884"},
{file = "regex-2020.11.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2f4007bff007c96a173e24dcda236e5e83bde4358a557f9ccf5e014439eae4b"},
{file = "regex-2020.11.13-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:38c8fd190db64f513fe4e1baa59fed086ae71fa45083b6936b52d34df8f86a88"},
{file = "regex-2020.11.13-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5862975b45d451b6db51c2e654990c1820523a5b07100fc6903e9c86575202a0"},
{file = "regex-2020.11.13-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:262c6825b309e6485ec2493ffc7e62a13cf13fb2a8b6d212f72bd53ad34118f1"},
{file = "regex-2020.11.13-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bafb01b4688833e099d79e7efd23f99172f501a15c44f21ea2118681473fdba0"},
{file = "regex-2020.11.13-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:e32f5f3d1b1c663af7f9c4c1e72e6ffe9a78c03a31e149259f531e0fed826512"},
{file = "regex-2020.11.13-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3bddc701bdd1efa0d5264d2649588cbfda549b2899dc8d50417e47a82e1387ba"},
{file = "regex-2020.11.13-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:02951b7dacb123d8ea6da44fe45ddd084aa6777d4b2454fa0da61d569c6fa538"},
{file = "regex-2020.11.13-cp37-cp37m-win32.whl", hash = "sha256:0d08e71e70c0237883d0bef12cad5145b84c3705e9c6a588b2a9c7080e5af2a4"},
{file = "regex-2020.11.13-cp37-cp37m-win_amd64.whl", hash = "sha256:1fa7ee9c2a0e30405e21031d07d7ba8617bc590d391adfc2b7f1e8b99f46f444"},
{file = "regex-2020.11.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:baf378ba6151f6e272824b86a774326f692bc2ef4cc5ce8d5bc76e38c813a55f"},
{file = "regex-2020.11.13-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e3faaf10a0d1e8e23a9b51d1900b72e1635c2d5b0e1bea1c18022486a8e2e52d"},
{file = "regex-2020.11.13-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2a11a3e90bd9901d70a5b31d7dd85114755a581a5da3fc996abfefa48aee78af"},
{file = "regex-2020.11.13-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1ebb090a426db66dd80df8ca85adc4abfcbad8a7c2e9a5ec7513ede522e0a8f"},
{file = "regex-2020.11.13-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:b2b1a5ddae3677d89b686e5c625fc5547c6e492bd755b520de5332773a8af06b"},
{file = "regex-2020.11.13-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2c99e97d388cd0a8d30f7c514d67887d8021541b875baf09791a3baad48bb4f8"},
{file = "regex-2020.11.13-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:c084582d4215593f2f1d28b65d2a2f3aceff8342aa85afd7be23a9cad74a0de5"},
{file = "regex-2020.11.13-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:a3d748383762e56337c39ab35c6ed4deb88df5326f97a38946ddd19028ecce6b"},
{file = "regex-2020.11.13-cp38-cp38-win32.whl", hash = "sha256:7913bd25f4ab274ba37bc97ad0e21c31004224ccb02765ad984eef43e04acc6c"},
{file = "regex-2020.11.13-cp38-cp38-win_amd64.whl", hash = "sha256:6c54ce4b5d61a7129bad5c5dc279e222afd00e721bf92f9ef09e4fae28755683"},
{file = "regex-2020.11.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1862a9d9194fae76a7aaf0150d5f2a8ec1da89e8b55890b1786b8f88a0f619dc"},
{file = "regex-2020.11.13-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4902e6aa086cbb224241adbc2f06235927d5cdacffb2425c73e6570e8d862364"},
{file = "regex-2020.11.13-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7a25fcbeae08f96a754b45bdc050e1fb94b95cab046bf56b016c25e9ab127b3e"},
{file = "regex-2020.11.13-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:d2d8ce12b7c12c87e41123997ebaf1a5767a5be3ec545f64675388970f415e2e"},
{file = "regex-2020.11.13-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f7d29a6fc4760300f86ae329e3b6ca28ea9c20823df123a2ea8693e967b29917"},
{file = "regex-2020.11.13-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:717881211f46de3ab130b58ec0908267961fadc06e44f974466d1887f865bd5b"},
{file = "regex-2020.11.13-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3128e30d83f2e70b0bed9b2a34e92707d0877e460b402faca908c6667092ada9"},
{file = "regex-2020.11.13-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8f6a2229e8ad946e36815f2a03386bb8353d4bde368fdf8ca5f0cb97264d3b5c"},
{file = "regex-2020.11.13-cp39-cp39-win32.whl", hash = "sha256:f8f295db00ef5f8bae530fc39af0b40486ca6068733fb860b42115052206466f"},
{file = "regex-2020.11.13-cp39-cp39-win_amd64.whl", hash = "sha256:a15f64ae3a027b64496a71ab1f722355e570c3fac5ba2801cafce846bf5af01d"},
{file = "regex-2020.11.13.tar.gz", hash = "sha256:83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562"},
]
six = [
{file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"},
{file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"},
]
smmap = [
{file = "smmap-3.0.4-py2.py3-none-any.whl", hash = "sha256:54c44c197c819d5ef1991799a7e30b662d1e520f2ac75c9efbeb54a742214cf4"},
{file = "smmap-3.0.4.tar.gz", hash = "sha256:9c98bbd1f9786d22f14b3d4126894d56befb835ec90cef151af566c7e19b5d24"},
{file = "smmap-3.0.5-py2.py3-none-any.whl", hash = "sha256:7bfcf367828031dc893530a29cb35eb8c8f2d7c8f2d0989354d75d24c8573714"},
{file = "smmap-3.0.5.tar.gz", hash = "sha256:84c2751ef3072d4f6b2785ec7ee40244c6f45eb934d9e543e2c51f1bd3d54c50"},
]
stevedore = [
{file = "stevedore-3.2.2-py3-none-any.whl", hash = "sha256:5e1ab03eaae06ef6ce23859402de785f08d97780ed774948ef16c4652c41bc62"},
{file = "stevedore-3.2.2.tar.gz", hash = "sha256:f845868b3a3a77a2489d226568abe7328b5c2d4f6a011cc759dfa99144a521f0"},
{file = "stevedore-3.3.0-py3-none-any.whl", hash = "sha256:50d7b78fbaf0d04cd62411188fa7eedcb03eb7f4c4b37005615ceebe582aa82a"},
{file = "stevedore-3.3.0.tar.gz", hash = "sha256:3a5bbd0652bf552748871eaa73a4a8dc2899786bc497a2aa1fcb4dcdb0debeee"},
]
toml = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
]
tortoise-orm = [
{file = "tortoise-orm-0.16.17.tar.gz", hash = "sha256:bcb978d302ba1d71ee2089352ca07b3ed73fd55936b5d580e29ffed5a0784c03"},
{file = "tortoise-orm-0.16.21.tar.gz", hash = "sha256:9729eac3eb58e59c32e2815d5ff1941a71e1eecd63834ae7199b6084c1a454b5"},
{file = "tortoise_orm-0.16.21-py3-none-any.whl", hash = "sha256:f36aa16d07bab69b141e91f8791c0f878fbcc0acfffa3c671ea10a6ad73c54ed"},
]
typed-ast = [
{file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"},
{file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"},
{file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"},
{file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"},
{file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"},
{file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"},
{file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"},
{file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"},
{file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"},
{file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"},
{file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"},
{file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"},
{file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"},
{file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"},
{file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"},
{file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"},
{file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"},
{file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"},
{file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"},
{file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"},
{file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"},
{file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"},
{file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487"},
{file = "typed_ast-1.4.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412"},
{file = "typed_ast-1.4.2-cp35-cp35m-win32.whl", hash = "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400"},
{file = "typed_ast-1.4.2-cp35-cp35m-win_amd64.whl", hash = "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606"},
{file = "typed_ast-1.4.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64"},
{file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07"},
{file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc"},
{file = "typed_ast-1.4.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a"},
{file = "typed_ast-1.4.2-cp36-cp36m-win32.whl", hash = "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151"},
{file = "typed_ast-1.4.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3"},
{file = "typed_ast-1.4.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41"},
{file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f"},
{file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581"},
{file = "typed_ast-1.4.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37"},
{file = "typed_ast-1.4.2-cp37-cp37m-win32.whl", hash = "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd"},
{file = "typed_ast-1.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496"},
{file = "typed_ast-1.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc"},
{file = "typed_ast-1.4.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10"},
{file = "typed_ast-1.4.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea"},
{file = "typed_ast-1.4.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787"},
{file = "typed_ast-1.4.2-cp38-cp38-win32.whl", hash = "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2"},
{file = "typed_ast-1.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937"},
{file = "typed_ast-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1"},
{file = "typed_ast-1.4.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6"},
{file = "typed_ast-1.4.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166"},
{file = "typed_ast-1.4.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d"},
{file = "typed_ast-1.4.2-cp39-cp39-win32.whl", hash = "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b"},
{file = "typed_ast-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440"},
{file = "typed_ast-1.4.2.tar.gz", hash = "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a"},
]
typing-extensions = [
{file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"},

View File

@@ -1,6 +1,6 @@
[tool.poetry]
name = "aerich"
version = "0.3.3"
version = "0.5.0"
description = "A database migrations tool for Tortoise ORM."
authors = ["long2ice <long2ice@gmail.com>"]
license = "Apache-2.0"
@@ -16,16 +16,18 @@ include = ["CHANGELOG.md", "LICENSE", "README.md"]
[tool.poetry.dependencies]
python = "^3.7"
tortoise-orm = "*"
tortoise-orm = "^0.16.21"
click = "*"
pydantic = "*"
aiomysql = {version = "*", optional = true}
asyncpg = {version = "*", optional = true}
aiomysql = { version = "*", optional = true }
asyncpg = { version = "*", optional = true }
ddlparse = "*"
dictdiffer = "*"
[tool.poetry.dev-dependencies]
flake8 = "*"
isort = "*"
black = "*"
black = "^20.8b1"
pytest = "*"
pytest-xdist = "*"
pytest-asyncio = "*"

View File

@@ -23,18 +23,19 @@ class Status(IntEnum):
class User(Model):
username = fields.CharField(max_length=20, unique=True)
password = fields.CharField(max_length=200)
password = fields.CharField(max_length=100)
last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now)
is_active = fields.BooleanField(default=True, description="Is Active")
is_superuser = fields.BooleanField(default=False, description="Is SuperUser")
avatar = fields.CharField(max_length=200, default="")
intro = fields.TextField(default="")
class Email(Model):
email = fields.CharField(max_length=200)
email_id = fields.IntField(pk=True)
email = fields.CharField(max_length=200, index=True)
is_primary = fields.BooleanField(default=False)
user = fields.ForeignKeyField("models.User", db_constraint=False)
address = fields.CharField(max_length=200)
users = fields.ManyToManyField("models.User")
class Category(Model):
@@ -47,17 +48,21 @@ class Category(Model):
class Product(Model):
categories = fields.ManyToManyField("models.Category")
name = fields.CharField(max_length=50)
view_num = fields.IntField(description="View Num")
view_num = fields.IntField(description="View Num", default=0)
sort = fields.IntField()
is_reviewed = fields.BooleanField(description="Is Reviewed")
type = fields.IntEnumField(ProductType, description="Product Type")
image = fields.CharField(max_length=200)
pic = fields.CharField(max_length=200)
body = fields.TextField()
created_at = fields.DatetimeField(auto_now_add=True)
class Meta:
unique_together = (("name", "type"),)
class Config(Model):
label = fields.CharField(max_length=200)
key = fields.CharField(max_length=20)
value = fields.JSONField()
status: Status = fields.IntEnumField(Status, default=Status.on)
status: Status = fields.IntEnumField(Status)
user = fields.ForeignKeyField("models.User", description="User")

View File

@@ -24,7 +24,7 @@ class Status(IntEnum):
class User(Model):
username = fields.CharField(max_length=20)
password = fields.CharField(max_length=200)
last_login_at = fields.DatetimeField(description="Last Login", default=datetime.datetime.now)
last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now)
is_active = fields.BooleanField(default=True, description="Is Active")
is_superuser = fields.BooleanField(default=False, description="Is SuperUser")
avatar = fields.CharField(max_length=200, default="")
@@ -34,17 +34,18 @@ class User(Model):
class Email(Model):
email = fields.CharField(max_length=200)
is_primary = fields.BooleanField(default=False)
user = fields.ForeignKeyField("diff_models.User", db_constraint=True)
user = fields.ForeignKeyField("models.User", db_constraint=False)
class Category(Model):
slug = fields.CharField(max_length=200)
user = fields.ForeignKeyField("diff_models.User", description="User")
name = fields.CharField(max_length=200)
user = fields.ForeignKeyField("models.User", description="User")
created_at = fields.DatetimeField(auto_now_add=True)
class Product(Model):
categories = fields.ManyToManyField("diff_models.Category")
categories = fields.ManyToManyField("models.Category")
name = fields.CharField(max_length=50)
view_num = fields.IntField(description="View Num")
sort = fields.IntField()

View File

@@ -5,7 +5,7 @@ from aerich.ddl.postgres import PostgresDDL
from aerich.ddl.sqlite import SqliteDDL
from aerich.exceptions import NotSupportError
from aerich.migrate import Migrate
from tests.models import Category, User
from tests.models import Category, Product, User
def test_create_table():
@@ -42,7 +42,7 @@ def test_create_table():
"id" SERIAL NOT NULL PRIMARY KEY,
"slug" VARCHAR(200) NOT NULL,
"name" VARCHAR(200) NOT NULL,
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE
);
COMMENT ON COLUMN "category"."user_id" IS 'User';"""
@@ -50,7 +50,7 @@ COMMENT ON COLUMN "category"."user_id" IS 'User';"""
def test_drop_table():
ret = Migrate.ddl.drop_table(Category)
ret = Migrate.ddl.drop_table(Category._meta.db_table)
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "DROP TABLE IF EXISTS `category`"
else:
@@ -58,7 +58,7 @@ def test_drop_table():
def test_add_column():
ret = Migrate.ddl.add_column(Category, Category._meta.fields_map.get("name"))
ret = Migrate.ddl.add_column(Category, Category._meta.fields_map.get("name").describe(False))
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` ADD `name` VARCHAR(200) NOT NULL"
else:
@@ -67,13 +67,12 @@ def test_add_column():
def test_modify_column():
if isinstance(Migrate.ddl, SqliteDDL):
with pytest.raises(NotSupportError):
ret0 = Migrate.ddl.modify_column(Category, Category._meta.fields_map.get("name"))
ret1 = Migrate.ddl.modify_column(User, User._meta.fields_map.get("is_active"))
return
else:
ret0 = Migrate.ddl.modify_column(Category, Category._meta.fields_map.get("name"))
ret1 = Migrate.ddl.modify_column(User, User._meta.fields_map.get("is_active"))
ret0 = Migrate.ddl.modify_column(
Category, Category._meta.fields_map.get("name").describe(False)
)
ret1 = Migrate.ddl.modify_column(User, User._meta.fields_map.get("is_active").describe(False))
if isinstance(Migrate.ddl, MysqlDDL):
assert ret0 == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200) NOT NULL"
elif isinstance(Migrate.ddl, PostgresDDL):
@@ -89,47 +88,56 @@ def test_modify_column():
def test_alter_column_default():
ret = Migrate.ddl.alter_column_default(Category, Category._meta.fields_map.get("name"))
if isinstance(Migrate.ddl, SqliteDDL):
return
ret = Migrate.ddl.alter_column_default(
Category, Category._meta.fields_map.get("name").describe(False)
)
if isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'ALTER TABLE "category" ALTER COLUMN "name" DROP DEFAULT'
else:
assert ret is None
elif isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` ALTER COLUMN `name` DROP DEFAULT"
ret = Migrate.ddl.alter_column_default(Category, Category._meta.fields_map.get("created_at"))
ret = Migrate.ddl.alter_column_default(
Category, Category._meta.fields_map.get("created_at").describe(False)
)
if isinstance(Migrate.ddl, PostgresDDL):
assert (
ret == 'ALTER TABLE "category" ALTER COLUMN "created_at" SET DEFAULT CURRENT_TIMESTAMP'
)
else:
assert ret is None
elif isinstance(Migrate.ddl, MysqlDDL):
assert (
ret
== "ALTER TABLE `category` ALTER COLUMN `created_at` SET DEFAULT CURRENT_TIMESTAMP(6)"
)
ret = Migrate.ddl.alter_column_default(User, User._meta.fields_map.get("avatar"))
ret = Migrate.ddl.alter_column_default(
Product, Product._meta.fields_map.get("view_num").describe(False)
)
if isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'ALTER TABLE "user" ALTER COLUMN "avatar" SET DEFAULT \'\''
else:
assert ret is None
assert ret == 'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0'
elif isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0"
def test_alter_column_null():
ret = Migrate.ddl.alter_column_null(Category, Category._meta.fields_map.get("name"))
if isinstance(Migrate.ddl, (SqliteDDL, MysqlDDL)):
return
ret = Migrate.ddl.alter_column_null(
Category, Category._meta.fields_map.get("name").describe(False)
)
if isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'ALTER TABLE "category" ALTER COLUMN "name" SET NOT NULL'
else:
assert ret is None
def test_set_comment():
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("name"))
if isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'COMMENT ON COLUMN "category"."name" IS NULL'
else:
assert ret is None
if isinstance(Migrate.ddl, (SqliteDDL, MysqlDDL)):
return
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("name").describe(False))
assert ret == 'COMMENT ON COLUMN "category"."name" IS NULL'
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("user"))
if isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'COMMENT ON COLUMN "category"."user" IS \'User\''
else:
assert ret is None
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("user").describe(False))
assert ret == 'COMMENT ON COLUMN "category"."user_id" IS \'User\''
def test_drop_column():
@@ -148,18 +156,15 @@ def test_add_index():
index = Migrate.ddl.add_index(Category, ["name"])
index_u = Migrate.ddl.add_index(Category, ["name"], True)
if isinstance(Migrate.ddl, MysqlDDL):
assert index == "ALTER TABLE `category` ADD INDEX `idx_category_name_8b0cb9` (`name`)"
assert index == "ALTER TABLE `category` ADD INDEX `idx_category_name_8b0cb9` (`name`)"
assert (
index_u == "ALTER TABLE `category` ADD UNIQUE INDEX `uid_category_name_8b0cb9` (`name`)"
)
elif isinstance(Migrate.ddl, PostgresDDL):
assert index == 'CREATE INDEX "idx_category_name_8b0cb9" ON "category" ("name")'
assert (
index_u
== 'ALTER TABLE "category" ADD CONSTRAINT "uid_category_name_8b0cb9" UNIQUE ("name")'
)
assert index_u == 'CREATE UNIQUE INDEX "uid_category_name_8b0cb9" ON "category" ("name")'
else:
assert index == 'ALTER TABLE "category" ADD INDEX "idx_category_name_8b0cb9" ("name")'
assert index == 'ALTER TABLE "category" ADD INDEX "idx_category_name_8b0cb9" ("name")'
assert (
index_u == 'ALTER TABLE "category" ADD UNIQUE INDEX "uid_category_name_8b0cb9" ("name")'
)
@@ -173,14 +178,16 @@ def test_drop_index():
assert ret_u == "ALTER TABLE `category` DROP INDEX `uid_category_name_8b0cb9`"
elif isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'DROP INDEX "idx_category_name_8b0cb9"'
assert ret_u == 'ALTER TABLE "category" DROP CONSTRAINT "uid_category_name_8b0cb9"'
assert ret_u == 'DROP INDEX "uid_category_name_8b0cb9"'
else:
assert ret == 'ALTER TABLE "category" DROP INDEX "idx_category_name_8b0cb9"'
assert ret_u == 'ALTER TABLE "category" DROP INDEX "uid_category_name_8b0cb9"'
def test_add_fk():
ret = Migrate.ddl.add_fk(Category, Category._meta.fields_map.get("user"))
ret = Migrate.ddl.add_fk(
Category, Category._meta.fields_map.get("user").describe(False), User.describe(False)
)
if isinstance(Migrate.ddl, MysqlDDL):
assert (
ret
@@ -194,7 +201,9 @@ def test_add_fk():
def test_drop_fk():
ret = Migrate.ddl.drop_fk(Category, Category._meta.fields_map.get("user"))
ret = Migrate.ddl.drop_fk(
Category, Category._meta.fields_map.get("user").describe(False), User.describe(False)
)
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_e2e3874c`"
elif isinstance(Migrate.ddl, PostgresDDL):

View File

@@ -1,60 +1,871 @@
import pytest
from pytest_mock import MockerFixture
from tortoise import Tortoise
from aerich.ddl.mysql import MysqlDDL
from aerich.ddl.postgres import PostgresDDL
from aerich.ddl.sqlite import SqliteDDL
from aerich.exceptions import NotSupportError
from aerich.migrate import Migrate
from aerich.utils import get_models_describe
old_models_describe = {
"models.Category": {
"name": "models.Category",
"app": "models",
"table": "category",
"abstract": False,
"description": None,
"docstring": None,
"unique_together": [],
"pk_field": {
"name": "id",
"field_type": "IntField",
"db_column": "id",
"python_type": "int",
"generated": True,
"nullable": False,
"unique": True,
"indexed": True,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": 1, "le": 2147483647},
"db_field_types": {"": "INT"},
},
"data_fields": [
{
"name": "slug",
"field_type": "CharField",
"db_column": "slug",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 200},
"db_field_types": {"": "VARCHAR(200)"},
},
{
"name": "name",
"field_type": "CharField",
"db_column": "name",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 200},
"db_field_types": {"": "VARCHAR(200)"},
},
{
"name": "created_at",
"field_type": "DatetimeField",
"db_column": "created_at",
"python_type": "datetime.datetime",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"readOnly": True},
"db_field_types": {
"": "TIMESTAMP",
"mysql": "DATETIME(6)",
"postgres": "TIMESTAMPTZ",
},
"auto_now_add": True,
"auto_now": False,
},
{
"name": "user_id",
"field_type": "IntField",
"db_column": "user_id",
"python_type": "int",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": "User",
"docstring": None,
"constraints": {"ge": 1, "le": 2147483647},
"db_field_types": {"": "INT"},
},
],
"fk_fields": [
{
"name": "user",
"field_type": "ForeignKeyFieldInstance",
"python_type": "models.User",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": "User",
"docstring": None,
"constraints": {},
"raw_field": "user_id",
"on_delete": "CASCADE",
}
],
"backward_fk_fields": [],
"o2o_fields": [],
"backward_o2o_fields": [],
"m2m_fields": [
{
"name": "products",
"field_type": "ManyToManyFieldInstance",
"python_type": "models.Product",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {},
"model_name": "models.Product",
"related_name": "categories",
"forward_key": "product_id",
"backward_key": "category_id",
"through": "product_category",
"on_delete": "CASCADE",
"_generated": True,
}
],
},
"models.Config": {
"name": "models.Config",
"app": "models",
"table": "config",
"abstract": False,
"description": None,
"docstring": None,
"unique_together": [],
"pk_field": {
"name": "id",
"field_type": "IntField",
"db_column": "id",
"python_type": "int",
"generated": True,
"nullable": False,
"unique": True,
"indexed": True,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": 1, "le": 2147483647},
"db_field_types": {"": "INT"},
},
"data_fields": [
{
"name": "label",
"field_type": "CharField",
"db_column": "label",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 200},
"db_field_types": {"": "VARCHAR(200)"},
},
{
"name": "key",
"field_type": "CharField",
"db_column": "key",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 20},
"db_field_types": {"": "VARCHAR(20)"},
},
{
"name": "value",
"field_type": "JSONField",
"db_column": "value",
"python_type": "Union[dict, list]",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {},
"db_field_types": {"": "TEXT", "postgres": "JSONB"},
},
{
"name": "status",
"field_type": "IntEnumFieldInstance",
"db_column": "status",
"python_type": "int",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": 1,
"description": "on: 1\noff: 0",
"docstring": None,
"constraints": {"ge": -32768, "le": 32767},
"db_field_types": {"": "SMALLINT"},
},
],
"fk_fields": [],
"backward_fk_fields": [],
"o2o_fields": [],
"backward_o2o_fields": [],
"m2m_fields": [],
},
"models.Email": {
"name": "models.Email",
"app": "models",
"table": "email",
"abstract": False,
"description": None,
"docstring": None,
"unique_together": [],
"pk_field": {
"name": "id",
"field_type": "IntField",
"db_column": "id",
"python_type": "int",
"generated": True,
"nullable": False,
"unique": True,
"indexed": True,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": 1, "le": 2147483647},
"db_field_types": {"": "INT"},
},
"data_fields": [
{
"name": "email",
"field_type": "CharField",
"db_column": "email",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 200},
"db_field_types": {"": "VARCHAR(200)"},
},
{
"name": "is_primary",
"field_type": "BooleanField",
"db_column": "is_primary",
"python_type": "bool",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": False,
"description": None,
"docstring": None,
"constraints": {},
"db_field_types": {"": "BOOL", "sqlite": "INT"},
},
{
"name": "user_id",
"field_type": "IntField",
"db_column": "user_id",
"python_type": "int",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": 1, "le": 2147483647},
"db_field_types": {"": "INT"},
},
],
"fk_fields": [
{
"name": "user",
"field_type": "ForeignKeyFieldInstance",
"python_type": "models.User",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {},
"raw_field": "user_id",
"on_delete": "CASCADE",
}
],
"backward_fk_fields": [],
"o2o_fields": [],
"backward_o2o_fields": [],
"m2m_fields": [],
},
"models.Product": {
"name": "models.Product",
"app": "models",
"table": "product",
"abstract": False,
"description": None,
"docstring": None,
"unique_together": [],
"pk_field": {
"name": "id",
"field_type": "IntField",
"db_column": "id",
"python_type": "int",
"generated": True,
"nullable": False,
"unique": True,
"indexed": True,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": 1, "le": 2147483647},
"db_field_types": {"": "INT"},
},
"data_fields": [
{
"name": "name",
"field_type": "CharField",
"db_column": "name",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 50},
"db_field_types": {"": "VARCHAR(50)"},
},
{
"name": "view_num",
"field_type": "IntField",
"db_column": "view_num",
"python_type": "int",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": "View Num",
"docstring": None,
"constraints": {"ge": -2147483648, "le": 2147483647},
"db_field_types": {"": "INT"},
},
{
"name": "sort",
"field_type": "IntField",
"db_column": "sort",
"python_type": "int",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": -2147483648, "le": 2147483647},
"db_field_types": {"": "INT"},
},
{
"name": "is_reviewed",
"field_type": "BooleanField",
"db_column": "is_reviewed",
"python_type": "bool",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": "Is Reviewed",
"docstring": None,
"constraints": {},
"db_field_types": {"": "BOOL", "sqlite": "INT"},
},
{
"name": "type",
"field_type": "IntEnumFieldInstance",
"db_column": "type",
"python_type": "int",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": "Product Type",
"docstring": None,
"constraints": {"ge": -32768, "le": 32767},
"db_field_types": {"": "SMALLINT"},
},
{
"name": "image",
"field_type": "CharField",
"db_column": "image",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 200},
"db_field_types": {"": "VARCHAR(200)"},
},
{
"name": "body",
"field_type": "TextField",
"db_column": "body",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {},
"db_field_types": {"": "TEXT", "mysql": "LONGTEXT"},
},
{
"name": "created_at",
"field_type": "DatetimeField",
"db_column": "created_at",
"python_type": "datetime.datetime",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"readOnly": True},
"db_field_types": {
"": "TIMESTAMP",
"mysql": "DATETIME(6)",
"postgres": "TIMESTAMPTZ",
},
"auto_now_add": True,
"auto_now": False,
},
],
"fk_fields": [],
"backward_fk_fields": [],
"o2o_fields": [],
"backward_o2o_fields": [],
"m2m_fields": [
{
"name": "categories",
"field_type": "ManyToManyFieldInstance",
"python_type": "models.Category",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {},
"model_name": "models.Category",
"related_name": "products",
"forward_key": "category_id",
"backward_key": "product_id",
"through": "product_category",
"on_delete": "CASCADE",
"_generated": False,
}
],
},
"models.User": {
"name": "models.User",
"app": "models",
"table": "user",
"abstract": False,
"description": None,
"docstring": None,
"unique_together": [],
"pk_field": {
"name": "id",
"field_type": "IntField",
"db_column": "id",
"python_type": "int",
"generated": True,
"nullable": False,
"unique": True,
"indexed": True,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": 1, "le": 2147483647},
"db_field_types": {"": "INT"},
},
"data_fields": [
{
"name": "username",
"field_type": "CharField",
"db_column": "username",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 20},
"db_field_types": {"": "VARCHAR(20)"},
},
{
"name": "password",
"field_type": "CharField",
"db_column": "password",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 200},
"db_field_types": {"": "VARCHAR(200)"},
},
{
"name": "last_login",
"field_type": "DatetimeField",
"db_column": "last_login",
"python_type": "datetime.datetime",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": "<function None.now>",
"description": "Last Login",
"docstring": None,
"constraints": {},
"db_field_types": {
"": "TIMESTAMP",
"mysql": "DATETIME(6)",
"postgres": "TIMESTAMPTZ",
},
"auto_now_add": False,
"auto_now": False,
},
{
"name": "is_active",
"field_type": "BooleanField",
"db_column": "is_active",
"python_type": "bool",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": True,
"description": "Is Active",
"docstring": None,
"constraints": {},
"db_field_types": {"": "BOOL", "sqlite": "INT"},
},
{
"name": "is_superuser",
"field_type": "BooleanField",
"db_column": "is_superuser",
"python_type": "bool",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": False,
"description": "Is SuperUser",
"docstring": None,
"constraints": {},
"db_field_types": {"": "BOOL", "sqlite": "INT"},
},
{
"name": "avatar",
"field_type": "CharField",
"db_column": "avatar",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": "",
"description": None,
"docstring": None,
"constraints": {"max_length": 200},
"db_field_types": {"": "VARCHAR(200)"},
},
{
"name": "intro",
"field_type": "TextField",
"db_column": "intro",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": "",
"description": None,
"docstring": None,
"constraints": {},
"db_field_types": {"": "TEXT", "mysql": "LONGTEXT"},
},
],
"fk_fields": [],
"backward_fk_fields": [
{
"name": "categorys",
"field_type": "BackwardFKRelation",
"python_type": "models.Category",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": "User",
"docstring": None,
"constraints": {},
},
{
"name": "emails",
"field_type": "BackwardFKRelation",
"python_type": "models.Email",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {},
},
],
"o2o_fields": [],
"backward_o2o_fields": [],
"m2m_fields": [],
},
"models.Aerich": {
"name": "models.Aerich",
"app": "models",
"table": "aerich",
"abstract": False,
"description": None,
"docstring": None,
"unique_together": [],
"pk_field": {
"name": "id",
"field_type": "IntField",
"db_column": "id",
"python_type": "int",
"generated": True,
"nullable": False,
"unique": True,
"indexed": True,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": 1, "le": 2147483647},
"db_field_types": {"": "INT"},
},
"data_fields": [
{
"name": "version",
"field_type": "CharField",
"db_column": "version",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 255},
"db_field_types": {"": "VARCHAR(255)"},
},
{
"name": "app",
"field_type": "CharField",
"db_column": "app",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 20},
"db_field_types": {"": "VARCHAR(20)"},
},
{
"name": "content",
"field_type": "JSONField",
"db_column": "content",
"python_type": "Union[dict, list]",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {},
"db_field_types": {"": "TEXT", "postgres": "JSONB"},
},
],
"fk_fields": [],
"backward_fk_fields": [],
"o2o_fields": [],
"backward_o2o_fields": [],
"m2m_fields": [],
},
}
def test_migrate(mocker: MockerFixture):
mocker.patch("click.prompt", return_value=True)
apps = Tortoise.apps
models = apps.get("models")
diff_models = apps.get("diff_models")
Migrate.diff_models(diff_models, models)
"""
models.py diff with old_models.py
- change email pk: id -> email_id
- add field: Email.address
- add fk: Config.user
- drop fk: Email.user
- drop field: User.avatar
- add index: Email.email
- add many to many: Email.users
- remove unique: User.username
- change column: length User.password
- add unique_together: (name,type) of Product
- alter default: Config.status
- rename column: Product.image -> Product.pic
"""
mocker.patch("click.prompt", side_effect=(False, True))
models_describe = get_models_describe("models")
Migrate.app = "models"
if isinstance(Migrate.ddl, SqliteDDL):
with pytest.raises(NotSupportError):
Migrate.diff_models(models, diff_models, False)
Migrate.diff_models(old_models_describe, models_describe)
Migrate.diff_models(models_describe, old_models_describe, False)
else:
Migrate.diff_models(models, diff_models, False)
Migrate.diff_models(old_models_describe, models_describe)
Migrate.diff_models(models_describe, old_models_describe, False)
Migrate._merge_operators()
if isinstance(Migrate.ddl, MysqlDDL):
assert Migrate.upgrade_operators == [
"ALTER TABLE `email` DROP FOREIGN KEY `fk_email_user_5b58673d`",
"ALTER TABLE `category` ADD `name` VARCHAR(200) NOT NULL",
"ALTER TABLE `user` ADD UNIQUE INDEX `uid_user_usernam_9987ab` (`username`)",
"ALTER TABLE `user` RENAME COLUMN `last_login_at` TO `last_login`",
]
assert Migrate.downgrade_operators == [
"ALTER TABLE `category` DROP COLUMN `name`",
"ALTER TABLE `user` DROP INDEX `uid_user_usernam_9987ab`",
"ALTER TABLE `user` RENAME COLUMN `last_login` TO `last_login_at`",
"ALTER TABLE `email` ADD CONSTRAINT `fk_email_user_5b58673d` FOREIGN KEY "
"(`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE",
]
assert sorted(Migrate.upgrade_operators) == sorted(
[
"ALTER TABLE `config` ADD `user_id` INT NOT NULL COMMENT 'User'",
"ALTER TABLE `config` ADD CONSTRAINT `fk_config_user_17daa970` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE",
"ALTER TABLE `config` ALTER COLUMN `status` DROP DEFAULT",
"ALTER TABLE `email` ADD `address` VARCHAR(200) NOT NULL",
"ALTER TABLE `email` DROP COLUMN `user_id`",
"ALTER TABLE `product` RENAME COLUMN `image` TO `pic`",
"ALTER TABLE `email` RENAME COLUMN `id` TO `email_id`",
"ALTER TABLE `email` DROP FOREIGN KEY `fk_email_user_5b58673d`",
"ALTER TABLE `email` ADD INDEX `idx_email_email_4a1a33` (`email`)",
"ALTER TABLE `product` ADD UNIQUE INDEX `uid_product_name_f14935` (`name`, `type`)",
"ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0",
"ALTER TABLE `user` DROP COLUMN `avatar`",
"ALTER TABLE `user` CHANGE password password VARCHAR(100)",
"ALTER TABLE `user` ADD UNIQUE INDEX `uid_user_usernam_9987ab` (`username`)",
"CREATE TABLE `email_user` (`email_id` INT NOT NULL REFERENCES `email` (`email_id`) ON DELETE CASCADE,`user_id` INT NOT NULL REFERENCES `user` (`id`) ON DELETE CASCADE) CHARACTER SET utf8mb4",
]
)
assert sorted(Migrate.downgrade_operators) == sorted(
[
"ALTER TABLE `config` DROP COLUMN `user_id`",
"ALTER TABLE `config` DROP FOREIGN KEY `fk_config_user_17daa970`",
"ALTER TABLE `config` ALTER COLUMN `status` SET DEFAULT 1",
"ALTER TABLE `email` ADD `user_id` INT NOT NULL",
"ALTER TABLE `email` DROP COLUMN `address`",
"ALTER TABLE `product` RENAME COLUMN `pic` TO `image`",
"ALTER TABLE `email` RENAME COLUMN `email_id` TO `id`",
"ALTER TABLE `email` ADD CONSTRAINT `fk_email_user_5b58673d` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE",
"ALTER TABLE `email` DROP INDEX `idx_email_email_4a1a33`",
"ALTER TABLE `product` DROP INDEX `uid_product_name_f14935`",
"ALTER TABLE `product` ALTER COLUMN `view_num` DROP DEFAULT",
"ALTER TABLE `user` ADD `avatar` VARCHAR(200) NOT NULL DEFAULT ''",
"ALTER TABLE `user` DROP INDEX `idx_user_usernam_9987ab`",
"ALTER TABLE `user` CHANGE password password VARCHAR(200)",
"DROP TABLE IF EXISTS `email_user`",
]
)
elif isinstance(Migrate.ddl, PostgresDDL):
assert Migrate.upgrade_operators == [
'ALTER TABLE "email" DROP CONSTRAINT "fk_email_user_5b58673d"',
'ALTER TABLE "category" ADD "name" VARCHAR(200) NOT NULL',
'ALTER TABLE "user" ADD CONSTRAINT "uid_user_usernam_9987ab" UNIQUE ("username")',
'ALTER TABLE "user" RENAME COLUMN "last_login_at" TO "last_login"',
]
assert Migrate.downgrade_operators == [
'ALTER TABLE "category" DROP COLUMN "name"',
'ALTER TABLE "user" DROP CONSTRAINT "uid_user_usernam_9987ab"',
'ALTER TABLE "user" RENAME COLUMN "last_login" TO "last_login_at"',
'ALTER TABLE "email" ADD CONSTRAINT "fk_email_user_5b58673d" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE',
]
assert sorted(Migrate.upgrade_operators) == sorted(
[
'ALTER TABLE "config" ADD "user_id" INT NOT NULL',
'ALTER TABLE "config" ADD CONSTRAINT "fk_config_user_17daa970" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE',
'ALTER TABLE "config" ALTER COLUMN "status" DROP DEFAULT',
'ALTER TABLE "email" ADD "address" VARCHAR(200) NOT NULL',
'ALTER TABLE "email" DROP COLUMN "user_id"',
'ALTER TABLE "product" RENAME COLUMN "image" TO "pic"',
'ALTER TABLE "email" RENAME COLUMN "id" TO "email_id"',
'ALTER TABLE "email" DROP CONSTRAINT "fk_email_user_5b58673d"',
'CREATE INDEX "idx_email_email_4a1a33" ON "email" ("email")',
'CREATE UNIQUE INDEX "uid_product_name_f14935" ON "product" ("name", "type")',
'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0',
'ALTER TABLE "user" DROP COLUMN "avatar"',
'ALTER TABLE "user" CHANGE password password VARCHAR(100)',
'CREATE UNIQUE INDEX "uid_user_usernam_9987ab" ON "user" ("username")',
'CREATE TABLE "email_user" ("email_id" INT NOT NULL REFERENCES "email" ("email_id") ON DELETE CASCADE,"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE)',
]
)
assert sorted(Migrate.downgrade_operators) == sorted(
[
'ALTER TABLE "config" DROP COLUMN "user_id"',
'ALTER TABLE "config" DROP CONSTRAINT "fk_config_user_17daa970"',
'ALTER TABLE "config" ALTER COLUMN "status" SET DEFAULT 1',
'ALTER TABLE "email" ADD "user_id" INT NOT NULL',
'ALTER TABLE "email" DROP COLUMN "address"',
'ALTER TABLE "product" RENAME COLUMN "pic" TO "image"',
'ALTER TABLE "email" RENAME COLUMN "email_id" TO "id"',
'ALTER TABLE "email" ADD CONSTRAINT "fk_email_user_5b58673d" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE',
'DROP INDEX "idx_email_email_4a1a33"',
'ALTER TABLE "product" ALTER COLUMN "view_num" DROP DEFAULT',
'ALTER TABLE "user" ADD "avatar" VARCHAR(200) NOT NULL DEFAULT \'\'',
'DROP INDEX "idx_user_usernam_9987ab"',
'DROP INDEX "uid_product_name_f14935"',
'ALTER TABLE "user" CHANGE password password VARCHAR(200)',
'DROP TABLE IF EXISTS "email_user"',
]
)
elif isinstance(Migrate.ddl, SqliteDDL):
assert Migrate.upgrade_operators == [
'ALTER TABLE "email" DROP FOREIGN KEY "fk_email_user_5b58673d"',
'ALTER TABLE "category" ADD "name" VARCHAR(200) NOT NULL',
'ALTER TABLE "user" ADD UNIQUE INDEX "uid_user_usernam_9987ab" ("username")',
'ALTER TABLE "user" RENAME COLUMN "last_login_at" TO "last_login"',
'ALTER TABLE "config" ADD "user_id" INT NOT NULL /* User */',
'ALTER TABLE "config" ADD CONSTRAINT "fk_config_user_17daa970" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE',
]
assert Migrate.downgrade_operators == []
@@ -62,18 +873,18 @@ def test_sort_all_version_files(mocker):
mocker.patch(
"os.listdir",
return_value=[
"1_datetime_update.json",
"11_datetime_update.json",
"10_datetime_update.json",
"2_datetime_update.json",
"1_datetime_update.sql",
"11_datetime_update.sql",
"10_datetime_update.sql",
"2_datetime_update.sql",
],
)
Migrate.migrate_location = "."
assert Migrate.get_all_version_files() == [
"1_datetime_update.json",
"2_datetime_update.json",
"10_datetime_update.json",
"11_datetime_update.json",
"1_datetime_update.sql",
"2_datetime_update.sql",
"10_datetime_update.sql",
"11_datetime_update.sql",
]