16 Commits

Author SHA1 Message Date
long2ice
6339dc86a8 Fix migrate to new database error 2020-10-14 20:33:23 +08:00
long2ice
768747140a update changelog 2020-10-12 21:01:53 +08:00
long2ice
1fde3cd04e fix init KeyError (#61) 2020-10-12 20:59:13 +08:00
long2ice
d0ce545ff5 Fix first version error 2020-10-10 15:07:09 +08:00
long2ice
09b89ed7d0 update README 2020-10-09 15:41:37 +08:00
long2ice
86c8382593 update README 2020-10-09 15:34:48 +08:00
long2ice
48e3ff48a3 update README.md 2020-10-09 12:04:36 +08:00
long2ice
1bf6d45bb0 Merge pull request #60 from tortoise/refactoring
Refactoring migrate logic
2020-10-09 11:54:37 +08:00
long2ice
342f4cdd3b complete refactoring 2020-10-09 11:53:50 +08:00
long2ice
8cace21fde refactoring migrate logic 2020-10-09 00:05:22 +08:00
long2ice
9889d9492b add ? when ask rename 2020-09-28 17:22:05 +08:00
long2ice
823368aea8 fix event loop error 2020-09-28 17:16:35 +08:00
long2ice
6b1ad46cf1 update README.md 2020-09-28 12:50:43 +08:00
long2ice
ce8c0b1f06 Support db_constraint in fk 2020-09-28 10:40:04 +08:00
long2ice
43922d3734 update CHANGELOG.md 2020-09-27 14:18:19 +08:00
long2ice
48c5318737 remove asyncclick 2020-09-25 18:27:08 +08:00
15 changed files with 269 additions and 228 deletions

View File

@@ -1,7 +1,33 @@
# ChangeLog
## 0.3
### 0.3.2
- Fix migrate to new database error. (#62)
### 0.3.1
- Fix first version error.
- Fix init error. (#61)
### 0.3.0
- Refactoring migrate logic, and this version is not compatible with previous version.
- Now there don't need `old_models.py` and it store in database.
- Upgrade steps:
1. Upgrade aerich version.
2. Drop aerich table in database.
3. Delete `migrations/{app}` folder and rerun `aerich init-db`.
4. Update model and `aerich migrate` normally.
## 0.2
### 0.2.5
- Fix windows support. (#46)
- Support `db_constraint` in fk, m2m should manual define table with fk. (#52)
### 0.2.4
- Raise error with SQLite unsupported features.

View File

@@ -40,7 +40,7 @@ test_sqlite:
$(py_warn) TEST_DB=sqlite://:memory: py.test
test_mysql:
$(py_warn) TEST_DB="mysql://root:$(MYSQL_PASS)@$(MYSQL_HOST):$(MYSQL_PORT)/test_\{\}" pytest -v -s
$(py_warn) TEST_DB="mysql://root:$(MYSQL_PASS)@$(MYSQL_HOST):$(MYSQL_PORT)/test_\{\}" pytest -vv -s
test_postgres:
$(py_warn) TEST_DB="postgres://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" pytest

View File

@@ -7,12 +7,10 @@
## Introduction
Tortoise-ORM is the best asyncio ORM now, but it lacks a database
migrations tool like alembic for SQLAlchemy, or Django ORM with it\'s
own migrations tool.
Aerich is a database migrations tool for Tortoise-ORM, which like alembic for SQLAlchemy, or Django ORM with it\'s
own migrations solution.
This project aim to be a best migrations tool for Tortoise-ORM and which
written by one of contributors of Tortoise-ORM.
**If you upgrade aerich from <= 0.2.5 to >= 0.3.0, see [changelog](https://github.com/tortoise/aerich/blob/dev/CHANGELOG.md) for upgrade steps.**
## Install
@@ -25,7 +23,7 @@ Just install from pypi:
## Quick Start
```shell
$ aerich -h
> aerich -h
Usage: aerich [OPTIONS] COMMAND [ARGS]...
@@ -37,7 +35,7 @@ Options:
-h, --help Show this message and exit.
Commands:
downgrade Downgrade to previous version.
downgrade Downgrade to specified version.
heads Show current available heads in migrate location.
history List all migrate items.
init Init config file and generate root migrate location.
@@ -66,7 +64,7 @@ TORTOISE_ORM = {
### Initialization
```shell
$ aerich init -h
> aerich init -h
Usage: aerich init [OPTIONS]
@@ -82,7 +80,7 @@ Options:
Init config file and location:
```shell
$ aerich init -t tests.backends.mysql.TORTOISE_ORM
> aerich init -t tests.backends.mysql.TORTOISE_ORM
Success create migrate location ./migrations
Success generate config file aerich.ini
@@ -91,7 +89,7 @@ Success generate config file aerich.ini
### Init db
```shell
$ aerich init-db
> aerich init-db
Success create app migrate location ./migrations/models
Success generate schema for app "models"
@@ -103,7 +101,7 @@ If your Tortoise-ORM app is not default `models`, you must specify
### Update models and make migrate
```shell
$ aerich migrate --name drop_column
> aerich migrate --name drop_column
Success migrate 1_202029051520102929_drop_column.json
```
@@ -113,30 +111,44 @@ Format of migrate filename is
And if `aerich` guess you are renaming a column, it will ask `Rename {old_column} to {new_column} [True]`, you can choice `True` to rename column without column drop, or choice `False` to drop column then create.
If you use `MySQL`, only MySQL8.0+ support `rename..to` syntax.
### Upgrade to latest version
```shell
$ aerich upgrade
> aerich upgrade
Success upgrade 1_202029051520102929_drop_column.json
```
Now your db is migrated to latest.
### Downgrade to previous version
### Downgrade to specified version
```shell
$ aerich downgrade
> aerich init -h
Usage: aerich downgrade [OPTIONS]
Downgrade to specified version.
Options:
-v, --version INTEGER Specified version, default to last. [default: -1]
-h, --help Show this message and exit.
```
```shell
> aerich downgrade
Success downgrade 1_202029051520102929_drop_column.json
```
Now your db rollback to previous version.
Now your db rollback to specified version.
### Show history
```shell
$ aerich history
> aerich history
1_202029051520102929_drop_column.json
```
@@ -144,7 +156,7 @@ $ aerich history
### Show heads to be migrated
```shell
$ aerich heads
> aerich heads
1_202029051520102929_drop_column.json
```

View File

@@ -1 +1 @@
__version__ = "0.2.4"
__version__ = "0.3.2"

View File

@@ -1,10 +1,12 @@
import asyncio
import json
import os
import sys
from configparser import ConfigParser
from functools import wraps
import asyncclick as click
from asyncclick import Context, UsageError
import click
from click import Context, UsageError
from tortoise import Tortoise, generate_schema_for_client
from tortoise.exceptions import OperationalError
from tortoise.transactions import in_transaction
@@ -20,6 +22,19 @@ from .models import Aerich
parser = ConfigParser()
def coro(f):
@wraps(f)
def wrapper(*args, **kwargs):
loop = asyncio.get_event_loop()
ctx = args[0]
loop.run_until_complete(f(*args, **kwargs))
app = ctx.obj.get("app")
if app:
Migrate.remove_old_model_file(app, ctx.obj["location"])
return wrapper
@click.group(context_settings={"help_option_names": ["-h", "--help"]})
@click.version_option(__version__, "-V", "--version")
@click.option(
@@ -34,6 +49,7 @@ parser = ConfigParser()
help="Name of section in .ini file to use for aerich config.",
)
@click.pass_context
@coro
async def cli(ctx: Context, config, app, name):
ctx.ensure_object(dict)
ctx.obj["config_file"] = config
@@ -55,7 +71,7 @@ async def cli(ctx: Context, config, app, name):
ctx.obj["config"] = tortoise_config
ctx.obj["location"] = location
ctx.obj["app"] = app
Migrate.app = app
if invoked_subcommand != "init-db":
await Migrate.init_with_old_models(tortoise_config, app, location)
@@ -63,67 +79,89 @@ async def cli(ctx: Context, config, app, name):
@cli.command(help="Generate migrate changes file.")
@click.option("--name", default="update", show_default=True, help="Migrate name.")
@click.pass_context
@coro
async def migrate(ctx: Context, name):
config = ctx.obj["config"]
location = ctx.obj["location"]
app = ctx.obj["app"]
ret = await Migrate.migrate(name)
if not ret:
return click.secho("No changes detected", fg=Color.yellow)
Migrate.write_old_models(config, app, location)
click.secho(f"Success migrate {ret}", fg=Color.green)
@cli.command(help="Upgrade to latest version.")
@cli.command(help="Upgrade to specified version.")
@click.pass_context
@coro
async def upgrade(ctx: Context):
config = ctx.obj["config"]
app = ctx.obj["app"]
location = ctx.obj["location"]
migrated = False
for version in Migrate.get_all_version_files():
for version_file in Migrate.get_all_version_files():
try:
exists = await Aerich.exists(version=version, app=app)
exists = await Aerich.exists(version=version_file, app=app)
except OperationalError:
exists = False
if not exists:
async with in_transaction(get_app_connection_name(config, app)) as conn:
file_path = os.path.join(Migrate.migrate_location, version)
file_path = os.path.join(Migrate.migrate_location, version_file)
with open(file_path, "r", encoding="utf-8") as f:
content = json.load(f)
upgrade_query_list = content.get("upgrade")
for upgrade_query in upgrade_query_list:
await conn.execute_script(upgrade_query)
await Aerich.create(version=version, app=app)
click.secho(f"Success upgrade {version}", fg=Color.green)
await Aerich.create(
version=version_file,
app=app,
content=Migrate.get_models_content(config, app, location),
)
click.secho(f"Success upgrade {version_file}", fg=Color.green)
migrated = True
if not migrated:
click.secho("No migrate items", fg=Color.yellow)
@cli.command(help="Downgrade to previous version.")
@cli.command(help="Downgrade to specified version.")
@click.option(
"-v",
"--version",
default=-1,
type=int,
show_default=True,
help="Specified version, default to last.",
)
@click.pass_context
async def downgrade(ctx: Context):
@coro
async def downgrade(ctx: Context, version: int):
app = ctx.obj["app"]
config = ctx.obj["config"]
last_version = await Migrate.get_last_version()
if not last_version:
return click.secho("No last version found", fg=Color.yellow)
file = last_version.version
async with in_transaction(get_app_connection_name(config, app)) as conn:
file_path = os.path.join(Migrate.migrate_location, file)
with open(file_path, "r", encoding="utf-8") as f:
content = json.load(f)
downgrade_query_list = content.get("downgrade")
if not downgrade_query_list:
return click.secho("No downgrade item found", fg=Color.yellow)
for downgrade_query in downgrade_query_list:
await conn.execute_query(downgrade_query)
await last_version.delete()
return click.secho(f"Success downgrade {file}", fg=Color.green)
if version == -1:
specified_version = await Migrate.get_last_version()
else:
specified_version = await Aerich.filter(app=app, version__startswith=f"{version}_").first()
if not specified_version:
return click.secho("No specified version found", fg=Color.yellow)
if version == -1:
versions = [specified_version]
else:
versions = await Aerich.filter(app=app, pk__gte=specified_version.pk)
for version in versions:
file = version.version
async with in_transaction(get_app_connection_name(config, app)) as conn:
file_path = os.path.join(Migrate.migrate_location, file)
with open(file_path, "r", encoding="utf-8") as f:
content = json.load(f)
downgrade_query_list = content.get("downgrade")
if not downgrade_query_list:
return click.secho("No downgrade item found", fg=Color.yellow)
for downgrade_query in downgrade_query_list:
await conn.execute_query(downgrade_query)
await version.delete()
os.unlink(file_path)
click.secho(f"Success downgrade {file}", fg=Color.green)
@cli.command(help="Show current available heads in migrate location.")
@click.pass_context
@coro
async def heads(ctx: Context):
app = ctx.obj["app"]
versions = Migrate.get_all_version_files()
@@ -133,11 +171,12 @@ async def heads(ctx: Context):
click.secho(version, fg=Color.green)
is_heads = True
if not is_heads:
click.secho("No available heads,try migrate", fg=Color.green)
click.secho("No available heads,try migrate first", fg=Color.green)
@cli.command(help="List all migrate items.")
@click.pass_context
@coro
async def history(ctx: Context):
versions = Migrate.get_all_version_files()
for version in versions:
@@ -157,6 +196,7 @@ async def history(ctx: Context):
"--location", default="./migrations", show_default=True, help="Migrate store location."
)
@click.pass_context
@coro
async def init(
ctx: Context, tortoise_orm, location,
):
@@ -188,6 +228,7 @@ async def init(
show_default=True,
)
@click.pass_context
@coro
async def init_db(ctx: Context, safe):
config = ctx.obj["config"]
location = ctx.obj["location"]
@@ -200,8 +241,6 @@ async def init_db(ctx: Context, safe):
else:
return click.secho(f"Inited {app} already", fg=Color.yellow)
Migrate.write_old_models(config, app, location)
await Tortoise.init(config=config)
connection = get_app_connection(config, app)
await generate_schema_for_client(connection, safe)
@@ -209,7 +248,9 @@ async def init_db(ctx: Context, safe):
schema = get_schema_sql(connection, safe)
version = await Migrate.generate_version()
await Aerich.create(version=version, app=app)
await Aerich.create(
version=version, app=app, content=Migrate.get_models_content(config, app, location)
)
with open(os.path.join(dirname, version), "w", encoding="utf-8") as f:
content = {
"upgrade": [schema],
@@ -220,4 +261,4 @@ async def init_db(ctx: Context, safe):
def main():
sys.path.insert(0, ".")
cli(_anyio_backend="asyncio")
cli()

View File

@@ -2,7 +2,7 @@ from typing import List, Type
from tortoise import BaseDBAsyncClient, ForeignKeyFieldInstance, ManyToManyFieldInstance, Model
from tortoise.backends.base.schema_generator import BaseSchemaGenerator
from tortoise.fields import Field, JSONField, TextField, UUIDField
from tortoise.fields import CASCADE, Field, JSONField, TextField, UUIDField
class BaseDDL:
@@ -20,7 +20,7 @@ class BaseDDL:
_DROP_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" DROP INDEX "{index_name}"'
_ADD_FK_TEMPLATE = 'ALTER TABLE "{table_name}" ADD CONSTRAINT "{fk_name}" FOREIGN KEY ("{db_column}") REFERENCES "{table}" ("{field}") ON DELETE {on_delete}'
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP FOREIGN KEY "{fk_name}"'
_M2M_TABLE_TEMPLATE = 'CREATE TABLE "{table_name}" ("{backward_key}" {backward_type} NOT NULL REFERENCES "{backward_table}" ("{backward_field}") ON DELETE CASCADE,"{forward_key}" {forward_type} NOT NULL REFERENCES "{forward_table}" ("{forward_field}") ON DELETE CASCADE){extra}{comment};'
_M2M_TABLE_TEMPLATE = 'CREATE TABLE "{table_name}" ("{backward_key}" {backward_type} NOT NULL REFERENCES "{backward_table}" ("{backward_field}") ON DELETE CASCADE,"{forward_key}" {forward_type} NOT NULL REFERENCES "{forward_table}" ("{forward_field}") ON DELETE {on_delete}){extra}{comment};'
_MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" MODIFY COLUMN {column}'
def __init__(self, client: "BaseDBAsyncClient"):
@@ -44,6 +44,7 @@ class BaseDDL:
backward_type=model._meta.pk.get_for_dialect(self.DIALECT, "SQL_TYPE"),
forward_key=field.forward_key,
forward_type=field.related_model._meta.pk.get_for_dialect(self.DIALECT, "SQL_TYPE"),
on_delete=CASCADE,
extra=self.schema_generator._table_generate_extra(table=field.through),
comment=self.schema_generator._table_comment_generator(
table=field.through, comment=field.description

View File

@@ -13,7 +13,7 @@ class SqliteDDL(BaseDDL):
DIALECT = SqliteSchemaGenerator.DIALECT
def drop_column(self, model: "Type[Model]", column_name: str):
raise NotSupportError("Drop column is not support in SQLite.")
raise NotSupportError("Drop column is unsupported in SQLite.")
def modify_column(self, model: "Type[Model]", field_object: Field):
raise NotSupportError("Modify column is not support in SQLite.")
raise NotSupportError("Modify column is unsupported in SQLite.")

View File

@@ -3,7 +3,8 @@ import os
import re
from datetime import datetime
from importlib import import_module
from typing import Dict, List, Tuple, Type
from io import StringIO
from typing import Dict, List, Optional, Tuple, Type
import click
from tortoise import (
@@ -14,6 +15,7 @@ from tortoise import (
Model,
Tortoise,
)
from tortoise.exceptions import OperationalError
from tortoise.fields import Field
from aerich.ddl import BaseDDL
@@ -41,8 +43,8 @@ class Migrate:
dialect: str
@classmethod
def get_old_model_file(cls):
return cls.old_models + ".py"
def get_old_model_file(cls, app: str, location: str):
return os.path.join(location, app, cls.old_models + ".py")
@classmethod
def get_all_version_files(cls) -> List[str]:
@@ -52,18 +54,33 @@ class Migrate:
)
@classmethod
async def get_last_version(cls) -> Aerich:
return await Aerich.filter(app=cls.app).first()
async def get_last_version(cls) -> Optional[Aerich]:
try:
return await Aerich.filter(app=cls.app).first()
except OperationalError:
pass
@classmethod
def remove_old_model_file(cls, app: str, location: str):
try:
os.unlink(cls.get_old_model_file(app, location))
except FileNotFoundError:
pass
@classmethod
async def init_with_old_models(cls, config: dict, app: str, location: str):
migrate_config = cls._get_migrate_config(config, app, location)
await Tortoise.init(config=config)
last_version = await cls.get_last_version()
cls.app = app
cls.migrate_config = migrate_config
cls.migrate_location = os.path.join(location, app)
if last_version:
content = last_version.content
with open(cls.get_old_model_file(app, location), "w") as f:
f.write(content)
await Tortoise.init(config=migrate_config)
migrate_config = cls._get_migrate_config(config, app, location)
cls.migrate_config = migrate_config
await Tortoise.init(config=migrate_config)
connection = get_app_connection(config, app)
cls.dialect = connection.schema_generator.DIALECT
@@ -102,6 +119,10 @@ class Migrate:
@classmethod
async def _generate_diff_sql(cls, name):
version = await cls.generate_version(name)
# delete if same version exists
for version_file in cls.get_all_version_files():
if version_file.startswith(version.split("_")[0]):
os.unlink(os.path.join(cls.migrate_location, version_file))
content = {
"upgrade": cls.upgrade_operators,
"downgrade": cls.downgrade_operators,
@@ -132,7 +153,7 @@ class Migrate:
return await cls._generate_diff_sql(name)
@classmethod
def _add_operator(cls, operator: str, upgrade=True, fk=False):
def _add_operator(cls, operator: str, upgrade=True, fk_m2m=False):
"""
add operator,differentiate fk because fk is order limit
:param operator:
@@ -141,36 +162,16 @@ class Migrate:
:return:
"""
if upgrade:
if fk:
if fk_m2m:
cls._upgrade_fk_m2m_index_operators.append(operator)
else:
cls.upgrade_operators.append(operator)
else:
if fk:
if fk_m2m:
cls._downgrade_fk_m2m_index_operators.append(operator)
else:
cls.downgrade_operators.append(operator)
@classmethod
def cp_models(
cls, app: str, model_files: List[str], old_model_file,
):
"""
cp currents models to old_model_files
:param app:
:param model_files:
:param old_model_file:
:return:
"""
pattern = rf"(\n)?('|\")({app})(.\w+)('|\")"
for i, model_file in enumerate(model_files):
with open(model_file, "r", encoding="utf-8") as f:
content = f.read()
ret = re.sub(pattern, rf"\2{cls.diff_app}\4\5", content)
mode = "w" if i == 0 else "a"
with open(old_model_file, mode, encoding="utf-8") as f:
f.write(f"{ret}\n")
@classmethod
def _get_migrate_config(cls, config: dict, app: str, location: str):
"""
@@ -189,7 +190,7 @@ class Migrate:
return config
@classmethod
def write_old_models(cls, config: dict, app: str, location: str):
def get_models_content(cls, config: dict, app: str, location: str):
"""
write new models to old models
:param config:
@@ -197,14 +198,18 @@ class Migrate:
:param location:
:return:
"""
cls.app = app
old_model_files = []
models = config.get("apps").get(app).get("models")
for model in models:
old_model_files.append(import_module(model).__file__)
cls.cp_models(app, old_model_files, os.path.join(location, app, cls.get_old_model_file()))
pattern = rf"(\n)?('|\")({app})(.\w+)('|\")"
str_io = StringIO()
for i, model_file in enumerate(old_model_files):
with open(model_file, "r", encoding="utf-8") as f:
content = f.read()
ret = re.sub(pattern, rf"\2{cls.diff_app}\4\5", content)
str_io.write(f"{ret}\n")
return str_io.getvalue()
@classmethod
def diff_models(
@@ -268,17 +273,17 @@ class Migrate:
continue
if new_key not in old_keys:
new_field_dict = new_field.describe(serializable=True)
new_field_dict.pop("name")
new_field_dict.pop("db_column")
new_field_dict.pop("name", None)
new_field_dict.pop("db_column", None)
for diff_key in old_keys - new_keys:
old_field = old_fields_map.get(diff_key)
old_field_dict = old_field.describe(serializable=True)
old_field_dict.pop("name")
old_field_dict.pop("db_column")
old_field_dict.pop("name", None)
old_field_dict.pop("db_column", None)
if old_field_dict == new_field_dict:
if upgrade:
is_rename = click.prompt(
f"Rename {diff_key} to {new_key}",
f"Rename {diff_key} to {new_key}?",
default=True,
type=bool,
show_choices=True,
@@ -294,9 +299,7 @@ class Migrate:
break
else:
cls._add_operator(
cls._add_field(new_model, new_field),
upgrade,
isinstance(new_field, (ForeignKeyFieldInstance, ManyToManyFieldInstance)),
cls._add_field(new_model, new_field), upgrade, cls._is_fk_m2m(new_field),
)
else:
old_field = old_fields_map.get(new_key)
@@ -344,6 +347,15 @@ class Migrate:
upgrade,
cls._is_fk_m2m(new_field),
)
if isinstance(new_field, ForeignKeyFieldInstance):
if old_field.db_constraint and not new_field.db_constraint:
cls._add_operator(
cls._drop_fk(new_model, new_field), upgrade, True,
)
if new_field.db_constraint and not old_field.db_constraint:
cls._add_operator(
cls._add_fk(new_model, new_field), upgrade, True,
)
for old_key in old_keys:
field = old_fields_map.get(old_key)
@@ -437,6 +449,10 @@ class Migrate:
def _modify_field(cls, model: Type[Model], field: Field):
return cls.ddl.modify_column(model, field)
@classmethod
def _drop_fk(cls, model: Type[Model], field: ForeignKeyFieldInstance):
return cls.ddl.drop_fk(model, field)
@classmethod
def _remove_field(cls, model: Type[Model], field: Field):
if isinstance(field, ForeignKeyFieldInstance):

View File

@@ -6,6 +6,7 @@ MAX_VERSION_LENGTH = 255
class Aerich(Model):
version = fields.CharField(max_length=MAX_VERSION_LENGTH)
app = fields.CharField(max_length=20)
content = fields.TextField()
class Meta:
ordering = ["-id"]

View File

@@ -1,6 +1,6 @@
import importlib
from asyncclick import BadOptionUsage, Context
from click import BadOptionUsage, Context
from tortoise import BaseDBAsyncClient, Tortoise

169
poetry.lock generated
View File

@@ -23,28 +23,6 @@ version = "0.15.0"
[package.dependencies]
typing_extensions = "*"
[[package]]
category = "main"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
name = "anyio"
optional = false
python-versions = ">=3.6.2"
version = "2.0.0"
[package.dependencies]
idna = ">=2.8"
sniffio = ">=1.1"
[package.dependencies.typing-extensions]
python = "<3.8"
version = "*"
[package.extras]
curio = ["curio (>=1.4)"]
doc = ["sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"]
test = ["coverage (>=4.5)", "hypothesis (>=4.0)", "pytest (>=4.3)", "trustme", "uvloop"]
trio = ["trio (>=0.16)"]
[[package]]
category = "dev"
description = "apipkg: namespace control and lazy-import mechanism"
@@ -61,21 +39,6 @@ optional = false
python-versions = "*"
version = "1.4.4"
[[package]]
category = "main"
description = "A simple anyio-compatible fork of Click, for powerful command line utilities."
name = "asyncclick"
optional = false
python-versions = ">=3.6"
version = "7.1.2.1"
[package.dependencies]
anyio = ">=2"
[package.extras]
dev = ["coverage", "pytest-runner", "pytest-trio", "pytest (>=3)", "sphinx", "tox"]
docs = ["sphinx"]
[[package]]
category = "main"
description = "An asyncio PostgreSQL driver"
@@ -159,7 +122,7 @@ version = "1.14.3"
pycparser = "*"
[[package]]
category = "dev"
category = "main"
description = "Composable command line interface toolkit"
name = "click"
optional = false
@@ -214,7 +177,7 @@ description = "the modular source code checker: pep8 pyflakes and co"
name = "flake8"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
version = "3.8.3"
version = "3.8.4"
[package.dependencies]
mccabe = ">=0.6.0,<0.7.0"
@@ -242,19 +205,11 @@ description = "Python Git Library"
name = "gitpython"
optional = false
python-versions = ">=3.4"
version = "3.1.8"
version = "3.1.9"
[package.dependencies]
gitdb = ">=4.0.1,<5"
[[package]]
category = "main"
description = "Internationalized Domain Names in Applications (IDNA)"
name = "idna"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "2.10"
[[package]]
category = "dev"
description = "Read metadata from Python packages"
@@ -293,7 +248,7 @@ description = "A Python utility / library to sort Python imports."
name = "isort"
optional = false
python-versions = ">=3.6,<4.0"
version = "5.5.3"
version = "5.6.1"
[package.extras]
colors = ["colorama (>=0.4.3,<0.5.0)"]
@@ -308,14 +263,6 @@ optional = false
python-versions = "*"
version = "0.6.1"
[[package]]
category = "dev"
description = "More routines for operating on iterables, beyond itertools"
name = "more-itertools"
optional = false
python-versions = ">=3.5"
version = "8.5.0"
[[package]]
category = "dev"
description = "Core utilities for Python packages"
@@ -430,7 +377,7 @@ description = "A SQL query builder API for Python"
name = "pypika"
optional = false
python-versions = "*"
version = "0.39.1"
version = "0.42.1"
[[package]]
category = "dev"
@@ -438,14 +385,13 @@ description = "pytest: simple powerful testing with Python"
name = "pytest"
optional = false
python-versions = ">=3.5"
version = "6.0.2"
version = "6.1.1"
[package.dependencies]
atomicwrites = ">=1.0"
attrs = ">=17.4.0"
colorama = "*"
iniconfig = "*"
more-itertools = ">=4.0.0"
packaging = "*"
pluggy = ">=0.12,<1.0"
py = ">=1.8.2"
@@ -530,7 +476,7 @@ description = "Alternative regular expression module, to replace re."
name = "regex"
optional = false
python-versions = "*"
version = "2020.7.14"
version = "2020.9.27"
[[package]]
category = "main"
@@ -548,14 +494,6 @@ optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "3.0.4"
[[package]]
category = "main"
description = "Sniff out which async library your code is running under"
name = "sniffio"
optional = false
python-versions = ">=3.5"
version = "1.1.0"
[[package]]
category = "dev"
description = "Manage dynamic plugins for Python applications"
@@ -619,7 +557,7 @@ marker = "python_version < \"3.8\""
name = "zipp"
optional = false
python-versions = ">=3.6"
version = "3.2.0"
version = "3.3.0"
[package.extras]
docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
@@ -629,8 +567,7 @@ testing = ["pytest (>=3.5,<3.7.3 || >3.7.3)", "pytest-checkdocs (>=1.2.3)", "pyt
dbdrivers = ["aiomysql", "asyncpg"]
[metadata]
content-hash = "06f00778f783c4ad5b174a9c9ee80f4f0e38db9da9ff1012f09c7d306eaa0975"
lock-version = "1.0"
content-hash = "43fe9c0036f4d55d38f82c263887d8a7d9a35a597e02036b70a631955ff73149"
python-versions = "^3.7"
[metadata.files]
@@ -642,10 +579,6 @@ aiosqlite = [
{file = "aiosqlite-0.15.0-py3-none-any.whl", hash = "sha256:19b984b6702aed9f1c85c023f37296954547fc4030dae8e9d027b2a930bed78b"},
{file = "aiosqlite-0.15.0.tar.gz", hash = "sha256:a2884793f4dc8f2798d90e1dfecb2b56a6d479cf039f7ec52356a7fd5f3bdc57"},
]
anyio = [
{file = "anyio-2.0.0-py3-none-any.whl", hash = "sha256:0b8375c8fc665236cb4d143ea13e849eb9e074d727b1b5c27d88aba44ca8c547"},
{file = "anyio-2.0.0.tar.gz", hash = "sha256:ceca4669ffa3f02bf20ef3d6c2a0c323b16cdc71d1ce0b0bc03c6f1f36054826"},
]
apipkg = [
{file = "apipkg-1.5-py2.py3-none-any.whl", hash = "sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c"},
{file = "apipkg-1.5.tar.gz", hash = "sha256:37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6"},
@@ -654,9 +587,6 @@ appdirs = [
{file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
]
asyncclick = [
{file = "asyncclick-7.1.2.1.tar.gz", hash = "sha256:fe9fd8c44a6ae396e54471bd5f209838d46124e019ae701dd71a9c898928483b"},
]
asyncpg = [
{file = "asyncpg-0.21.0-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:09badce47a4645cfe523cc8a182bd047d5d62af0caaea77935e6a3c9e77dc364"},
{file = "asyncpg-0.21.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:6b7807bfedd24dd15cfb2c17c60977ce01410615ecc285268b5144a944ec97ff"},
@@ -775,20 +705,16 @@ execnet = [
{file = "execnet-1.7.1.tar.gz", hash = "sha256:cacb9df31c9680ec5f95553976c4da484d407e85e41c83cb812aa014f0eddc50"},
]
flake8 = [
{file = "flake8-3.8.3-py2.py3-none-any.whl", hash = "sha256:15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c"},
{file = "flake8-3.8.3.tar.gz", hash = "sha256:f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208"},
{file = "flake8-3.8.4-py2.py3-none-any.whl", hash = "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839"},
{file = "flake8-3.8.4.tar.gz", hash = "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b"},
]
gitdb = [
{file = "gitdb-4.0.5-py3-none-any.whl", hash = "sha256:91f36bfb1ab7949b3b40e23736db18231bf7593edada2ba5c3a174a7b23657ac"},
{file = "gitdb-4.0.5.tar.gz", hash = "sha256:c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9"},
]
gitpython = [
{file = "GitPython-3.1.8-py3-none-any.whl", hash = "sha256:1858f4fd089abe92ae465f01d5aaaf55e937eca565fb2c1fce35a51b5f85c910"},
{file = "GitPython-3.1.8.tar.gz", hash = "sha256:080bf8e2cf1a2b907634761c2eaefbe83b69930c94c66ad11b65a8252959f912"},
]
idna = [
{file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"},
{file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"},
{file = "GitPython-3.1.9-py3-none-any.whl", hash = "sha256:138016d519bf4dd55b22c682c904ed2fd0235c3612b2f8f65ce218ff358deed8"},
{file = "GitPython-3.1.9.tar.gz", hash = "sha256:a03f728b49ce9597a6655793207c6ab0da55519368ff5961e4a74ae475b9fa8e"},
]
importlib-metadata = [
{file = "importlib_metadata-2.0.0-py2.py3-none-any.whl", hash = "sha256:cefa1a2f919b866c5beb7c9f7b0ebb4061f30a8a9bf16d609b000e2dfaceb9c3"},
@@ -804,17 +730,13 @@ iso8601 = [
{file = "iso8601-0.1.13.tar.gz", hash = "sha256:f7dec22af52025d4526be94cc1303c7d8f5379b746a3f54a8c8446384392eeb1"},
]
isort = [
{file = "isort-5.5.3-py3-none-any.whl", hash = "sha256:c16eaa7432a1c004c585d79b12ad080c6c421dd18fe27982ca11f95e6898e432"},
{file = "isort-5.5.3.tar.gz", hash = "sha256:6187a9f1ce8784cbc6d1b88790a43e6083a6302f03e9ae482acc0f232a98c843"},
{file = "isort-5.6.1-py3-none-any.whl", hash = "sha256:dd3211f513f4a92ec1ec1876fc1dc3c686649c349d49523f5b5adbb0814e5960"},
{file = "isort-5.6.1.tar.gz", hash = "sha256:2f510f34ae18a8d0958c53eec51ef84fd099f07c4c639676525acbcd7b5bd3ff"},
]
mccabe = [
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
]
more-itertools = [
{file = "more-itertools-8.5.0.tar.gz", hash = "sha256:6f83822ae94818eae2612063a5101a7311e68ae8002005b5e05f03fd74a86a20"},
{file = "more_itertools-8.5.0-py3-none-any.whl", hash = "sha256:9b30f12df9393f0d28af9210ff8efe48d10c94f73e5daf886f10c4b0b0b4f03c"},
]
packaging = [
{file = "packaging-20.4-py2.py3-none-any.whl", hash = "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"},
{file = "packaging-20.4.tar.gz", hash = "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8"},
@@ -875,11 +797,11 @@ pyparsing = [
{file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
]
pypika = [
{file = "PyPika-0.39.1.tar.gz", hash = "sha256:f6c715348cb6fa0042fe5b69f40cbb19c36ee3ff68414ece8660465c56523012"},
{file = "PyPika-0.42.1.tar.gz", hash = "sha256:f9466259179d5cb12d6cf3d453f4370a511039a6dd88fcb5089f22ba8824b408"},
]
pytest = [
{file = "pytest-6.0.2-py3-none-any.whl", hash = "sha256:0e37f61339c4578776e090c3b8f6b16ce4db333889d65d0efb305243ec544b40"},
{file = "pytest-6.0.2.tar.gz", hash = "sha256:c8f57c2a30983f469bf03e68cdfa74dc474ce56b8f280ddcb080dfd91df01043"},
{file = "pytest-6.1.1-py3-none-any.whl", hash = "sha256:7a8190790c17d79a11f847fba0b004ee9a8122582ebff4729a082c109e81a4c9"},
{file = "pytest-6.1.1.tar.gz", hash = "sha256:8f593023c1a0f916110285b6efd7f99db07d59546e3d8c36fc60e2ab05d3be92"},
]
pytest-asyncio = [
{file = "pytest-asyncio-0.14.0.tar.gz", hash = "sha256:9882c0c6b24429449f5f969a5158b528f39bde47dc32e85b9f0403965017e700"},
@@ -911,26 +833,33 @@ pyyaml = [
{file = "PyYAML-5.3.1.tar.gz", hash = "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"},
]
regex = [
{file = "regex-2020.7.14-cp27-cp27m-win32.whl", hash = "sha256:e46d13f38cfcbb79bfdb2964b0fe12561fe633caf964a77a5f8d4e45fe5d2ef7"},
{file = "regex-2020.7.14-cp27-cp27m-win_amd64.whl", hash = "sha256:6961548bba529cac7c07af2fd4d527c5b91bb8fe18995fed6044ac22b3d14644"},
{file = "regex-2020.7.14-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c50a724d136ec10d920661f1442e4a8b010a4fe5aebd65e0c2241ea41dbe93dc"},
{file = "regex-2020.7.14-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8a51f2c6d1f884e98846a0a9021ff6861bdb98457879f412fdc2b42d14494067"},
{file = "regex-2020.7.14-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:9c568495e35599625f7b999774e29e8d6b01a6fb684d77dee1f56d41b11b40cd"},
{file = "regex-2020.7.14-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:51178c738d559a2d1071ce0b0f56e57eb315bcf8f7d4cf127674b533e3101f88"},
{file = "regex-2020.7.14-cp36-cp36m-win32.whl", hash = "sha256:9eddaafb3c48e0900690c1727fba226c4804b8e6127ea409689c3bb492d06de4"},
{file = "regex-2020.7.14-cp36-cp36m-win_amd64.whl", hash = "sha256:14a53646369157baa0499513f96091eb70382eb50b2c82393d17d7ec81b7b85f"},
{file = "regex-2020.7.14-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:1269fef3167bb52631ad4fa7dd27bf635d5a0790b8e6222065d42e91bede4162"},
{file = "regex-2020.7.14-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d0a5095d52b90ff38592bbdc2644f17c6d495762edf47d876049cfd2968fbccf"},
{file = "regex-2020.7.14-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:4c037fd14c5f4e308b8370b447b469ca10e69427966527edcab07f52d88388f7"},
{file = "regex-2020.7.14-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bc3d98f621898b4a9bc7fecc00513eec8f40b5b83913d74ccb445f037d58cd89"},
{file = "regex-2020.7.14-cp37-cp37m-win32.whl", hash = "sha256:46bac5ca10fb748d6c55843a931855e2727a7a22584f302dd9bb1506e69f83f6"},
{file = "regex-2020.7.14-cp37-cp37m-win_amd64.whl", hash = "sha256:0dc64ee3f33cd7899f79a8d788abfbec168410be356ed9bd30bbd3f0a23a7204"},
{file = "regex-2020.7.14-cp38-cp38-manylinux1_i686.whl", hash = "sha256:5ea81ea3dbd6767873c611687141ec7b06ed8bab43f68fad5b7be184a920dc99"},
{file = "regex-2020.7.14-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:bbb332d45b32df41200380fff14712cb6093b61bd142272a10b16778c418e98e"},
{file = "regex-2020.7.14-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:c11d6033115dc4887c456565303f540c44197f4fc1a2bfb192224a301534888e"},
{file = "regex-2020.7.14-cp38-cp38-win32.whl", hash = "sha256:d6cff2276e502b86a25fd10c2a96973fdb45c7a977dca2138d661417f3728341"},
{file = "regex-2020.7.14-cp38-cp38-win_amd64.whl", hash = "sha256:7a2dd66d2d4df34fa82c9dc85657c5e019b87932019947faece7983f2089a840"},
{file = "regex-2020.7.14.tar.gz", hash = "sha256:3a3af27a8d23143c49a3420efe5b3f8cf1a48c6fc8bc6856b03f638abc1833bb"},
{file = "regex-2020.9.27-cp27-cp27m-win32.whl", hash = "sha256:d23a18037313714fb3bb5a94434d3151ee4300bae631894b1ac08111abeaa4a3"},
{file = "regex-2020.9.27-cp27-cp27m-win_amd64.whl", hash = "sha256:84e9407db1b2eb368b7ecc283121b5e592c9aaedbe8c78b1a2f1102eb2e21d19"},
{file = "regex-2020.9.27-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5f18875ac23d9aa2f060838e8b79093e8bb2313dbaaa9f54c6d8e52a5df097be"},
{file = "regex-2020.9.27-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ae91972f8ac958039920ef6e8769277c084971a142ce2b660691793ae44aae6b"},
{file = "regex-2020.9.27-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:9a02d0ae31d35e1ec12a4ea4d4cca990800f66a917d0fb997b20fbc13f5321fc"},
{file = "regex-2020.9.27-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:ebbe29186a3d9b0c591e71b7393f1ae08c83cb2d8e517d2a822b8f7ec99dfd8b"},
{file = "regex-2020.9.27-cp36-cp36m-win32.whl", hash = "sha256:4707f3695b34335afdfb09be3802c87fa0bc27030471dbc082f815f23688bc63"},
{file = "regex-2020.9.27-cp36-cp36m-win_amd64.whl", hash = "sha256:9bc13e0d20b97ffb07821aa3e113f9998e84994fe4d159ffa3d3a9d1b805043b"},
{file = "regex-2020.9.27-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:f1b3afc574a3db3b25c89161059d857bd4909a1269b0b3cb3c904677c8c4a3f7"},
{file = "regex-2020.9.27-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5533a959a1748a5c042a6da71fe9267a908e21eded7a4f373efd23a2cbdb0ecc"},
{file = "regex-2020.9.27-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:1fe0a41437bbd06063aa184c34804efa886bcc128222e9916310c92cd54c3b4c"},
{file = "regex-2020.9.27-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:c570f6fa14b9c4c8a4924aaad354652366577b4f98213cf76305067144f7b100"},
{file = "regex-2020.9.27-cp37-cp37m-win32.whl", hash = "sha256:eda4771e0ace7f67f58bc5b560e27fb20f32a148cbc993b0c3835970935c2707"},
{file = "regex-2020.9.27-cp37-cp37m-win_amd64.whl", hash = "sha256:60b0e9e6dc45683e569ec37c55ac20c582973841927a85f2d8a7d20ee80216ab"},
{file = "regex-2020.9.27-cp38-cp38-manylinux1_i686.whl", hash = "sha256:088afc8c63e7bd187a3c70a94b9e50ab3f17e1d3f52a32750b5b77dbe99ef5ef"},
{file = "regex-2020.9.27-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:eaf548d117b6737df379fdd53bdde4f08870e66d7ea653e230477f071f861121"},
{file = "regex-2020.9.27-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:41bb65f54bba392643557e617316d0d899ed5b4946dccee1cb6696152b29844b"},
{file = "regex-2020.9.27-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:8d69cef61fa50c8133382e61fd97439de1ae623fe943578e477e76a9d9471637"},
{file = "regex-2020.9.27-cp38-cp38-win32.whl", hash = "sha256:f2388013e68e750eaa16ccbea62d4130180c26abb1d8e5d584b9baf69672b30f"},
{file = "regex-2020.9.27-cp38-cp38-win_amd64.whl", hash = "sha256:4318d56bccfe7d43e5addb272406ade7a2274da4b70eb15922a071c58ab0108c"},
{file = "regex-2020.9.27-cp39-cp39-manylinux1_i686.whl", hash = "sha256:84cada8effefe9a9f53f9b0d2ba9b7b6f5edf8d2155f9fdbe34616e06ececf81"},
{file = "regex-2020.9.27-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:816064fc915796ea1f26966163f6845de5af78923dfcecf6551e095f00983650"},
{file = "regex-2020.9.27-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:5d892a4f1c999834eaa3c32bc9e8b976c5825116cde553928c4c8e7e48ebda67"},
{file = "regex-2020.9.27-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:c9443124c67b1515e4fe0bb0aa18df640965e1030f468a2a5dc2589b26d130ad"},
{file = "regex-2020.9.27-cp39-cp39-win32.whl", hash = "sha256:49f23ebd5ac073765ecbcf046edc10d63dcab2f4ae2bce160982cb30df0c0302"},
{file = "regex-2020.9.27-cp39-cp39-win_amd64.whl", hash = "sha256:3d20024a70b97b4f9546696cbf2fd30bae5f42229fbddf8661261b1eaff0deb7"},
{file = "regex-2020.9.27.tar.gz", hash = "sha256:a6f32aea4260dfe0e55dc9733ea162ea38f0ea86aa7d0f77b15beac5bf7b369d"},
]
six = [
{file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"},
@@ -940,10 +869,6 @@ smmap = [
{file = "smmap-3.0.4-py2.py3-none-any.whl", hash = "sha256:54c44c197c819d5ef1991799a7e30b662d1e520f2ac75c9efbeb54a742214cf4"},
{file = "smmap-3.0.4.tar.gz", hash = "sha256:9c98bbd1f9786d22f14b3d4126894d56befb835ec90cef151af566c7e19b5d24"},
]
sniffio = [
{file = "sniffio-1.1.0-py3-none-any.whl", hash = "sha256:20ed6d5b46f8ae136d00b9dcb807615d83ed82ceea6b2058cecb696765246da5"},
{file = "sniffio-1.1.0.tar.gz", hash = "sha256:8e3810100f69fe0edd463d02ad407112542a11ffdc29f67db2bf3771afb87a21"},
]
stevedore = [
{file = "stevedore-3.2.2-py3-none-any.whl", hash = "sha256:5e1ab03eaae06ef6ce23859402de785f08d97780ed774948ef16c4652c41bc62"},
{file = "stevedore-3.2.2.tar.gz", hash = "sha256:f845868b3a3a77a2489d226568abe7328b5c2d4f6a011cc759dfa99144a521f0"},
@@ -984,6 +909,6 @@ typing-extensions = [
{file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"},
]
zipp = [
{file = "zipp-3.2.0-py3-none-any.whl", hash = "sha256:43f4fa8d8bb313e65d8323a3952ef8756bf40f9a5c3ea7334be23ee4ec8278b6"},
{file = "zipp-3.2.0.tar.gz", hash = "sha256:b52f22895f4cfce194bc8172f3819ee8de7540aa6d873535a8668b730b8b411f"},
{file = "zipp-3.3.0-py3-none-any.whl", hash = "sha256:eed8ec0b8d1416b2ca33516a37a08892442f3954dee131e92cfd92d8fe3e7066"},
{file = "zipp-3.3.0.tar.gz", hash = "sha256:64ad89efee774d1897a58607895d80789c59778ea02185dd846ac38394a8642b"},
]

View File

@@ -1,6 +1,6 @@
[tool.poetry]
name = "aerich"
version = "0.2.4"
version = "0.3.2"
description = "A database migrations tool for Tortoise ORM."
authors = ["long2ice <long2ice@gmail.com>"]
license = "Apache-2.0"
@@ -17,7 +17,7 @@ include = ["CHANGELOG.md", "LICENSE", "README.md"]
[tool.poetry.dependencies]
python = "^3.7"
tortoise-orm = "*"
asyncclick = "*"
click = "*"
pydantic = "*"
aiomysql = {version = "*", optional = true}
asyncpg = {version = "*", optional = true}

View File

@@ -31,6 +31,12 @@ class User(Model):
intro = fields.TextField(default="")
class Email(Model):
email = fields.CharField(max_length=200)
is_primary = fields.BooleanField(default=False)
user = fields.ForeignKeyField("diff_models.User", db_constraint=True)
class Category(Model):
slug = fields.CharField(max_length=200)
user = fields.ForeignKeyField("diff_models.User", description="User")

View File

@@ -31,6 +31,12 @@ class User(Model):
intro = fields.TextField(default="")
class Email(Model):
email = fields.CharField(max_length=200)
is_primary = fields.BooleanField(default=False)
user = fields.ForeignKeyField("models.User", db_constraint=False)
class Category(Model):
slug = fields.CharField(max_length=200)
name = fields.CharField(max_length=200)

View File

@@ -20,8 +20,10 @@ def test_migrate(mocker: MockerFixture):
Migrate.diff_models(models, diff_models, False)
else:
Migrate.diff_models(models, diff_models, False)
Migrate._merge_operators()
if isinstance(Migrate.ddl, MysqlDDL):
assert Migrate.upgrade_operators == [
"ALTER TABLE `email` DROP FOREIGN KEY `fk_email_user_5b58673d`",
"ALTER TABLE `category` ADD `name` VARCHAR(200) NOT NULL",
"ALTER TABLE `user` ADD UNIQUE INDEX `uid_user_usernam_9987ab` (`username`)",
"ALTER TABLE `user` RENAME COLUMN `last_login_at` TO `last_login`",
@@ -30,9 +32,12 @@ def test_migrate(mocker: MockerFixture):
"ALTER TABLE `category` DROP COLUMN `name`",
"ALTER TABLE `user` DROP INDEX `uid_user_usernam_9987ab`",
"ALTER TABLE `user` RENAME COLUMN `last_login` TO `last_login_at`",
"ALTER TABLE `email` ADD CONSTRAINT `fk_email_user_5b58673d` FOREIGN KEY "
"(`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE",
]
elif isinstance(Migrate.ddl, PostgresDDL):
assert Migrate.upgrade_operators == [
'ALTER TABLE "email" DROP CONSTRAINT "fk_email_user_5b58673d"',
'ALTER TABLE "category" ADD "name" VARCHAR(200) NOT NULL',
'ALTER TABLE "user" ADD CONSTRAINT "uid_user_usernam_9987ab" UNIQUE ("username")',
'ALTER TABLE "user" RENAME COLUMN "last_login_at" TO "last_login"',
@@ -41,9 +46,11 @@ def test_migrate(mocker: MockerFixture):
'ALTER TABLE "category" DROP COLUMN "name"',
'ALTER TABLE "user" DROP CONSTRAINT "uid_user_usernam_9987ab"',
'ALTER TABLE "user" RENAME COLUMN "last_login" TO "last_login_at"',
'ALTER TABLE "email" ADD CONSTRAINT "fk_email_user_5b58673d" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE',
]
elif isinstance(Migrate.ddl, SqliteDDL):
assert Migrate.upgrade_operators == [
'ALTER TABLE "email" DROP FOREIGN KEY "fk_email_user_5b58673d"',
'ALTER TABLE "category" ADD "name" VARCHAR(200) NOT NULL',
'ALTER TABLE "user" ADD UNIQUE INDEX "uid_user_usernam_9987ab" ("username")',
'ALTER TABLE "user" RENAME COLUMN "last_login_at" TO "last_login"',