43 Commits

Author SHA1 Message Date
long2ice
73b75349ee update version 0.2.0 2020-06-12 17:53:17 +08:00
long2ice
7bc553221a raise NotImplementedError 2020-06-12 09:31:01 +08:00
long2ice
7413a05e19 set --safe bool 2020-06-08 18:07:41 +08:00
long2ice
bf194ca8ce Update model file find method 2020-06-03 18:42:35 +08:00
long2ice
b06da0223a add --build 2020-06-03 09:39:52 +08:00
long2ice
83554cdc5d Merge remote-tracking branch 'origin/dev' into dev
# Conflicts:
#	.github/workflows/pypi.yml
#	.github/workflows/test.yml
#	Makefile
2020-06-03 09:38:20 +08:00
long2ice
6c76bfccad Merge remote-tracking branch 'origin/dev' into dev 2020-06-02 22:23:26 +08:00
long2ice
a1746e457c update github actions
update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions
2020-06-02 22:23:15 +08:00
long2ice
2a0435dea9 update github actions 2020-06-02 22:14:44 +08:00
long2ice
e87f67f1e1 update github actions
update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions
2020-06-02 22:02:46 +08:00
long2ice
7b4b7ac749 update github actions 2020-06-02 18:58:59 +08:00
long2ice
5b9b51db3f update github actions 2020-06-02 18:38:39 +08:00
long2ice
ffeee3c901 update github actions 2020-06-02 18:28:50 +08:00
long2ice
b4366d2427 update github actions 2020-06-02 18:20:55 +08:00
long2ice
ec1c80f3a9 remove requirements 2020-06-01 14:57:29 +08:00
long2ice
d2083632eb add cli -V 2020-05-27 12:44:49 +08:00
long2ice
125389461f check tortoise add aerich.models 2020-05-26 14:44:55 +08:00
long2ice
c09c878eaf add support modify column
diff mysql ddl
2020-05-26 10:22:02 +08:00
long2ice
ef3e0c11d5 update version 2020-05-25 23:46:35 +08:00
long2ice
881f70f748 Fix default_connection when upgrade 2020-05-25 23:44:42 +08:00
long2ice
6ffca1a0c7 add support modify column 2020-05-25 22:39:39 +08:00
long2ice
95e41720cb Fix init db sql error 2020-05-25 18:53:34 +08:00
long2ice
40c0008e6e Fix upgrade error when migrate 2020-05-25 18:02:56 +08:00
long2ice
ce75e55d60 update README.rst 2020-05-25 16:36:18 +08:00
long2ice
4d4f951e09 update README.rst 2020-05-25 16:33:56 +08:00
long2ice
354e861dad add more test 2020-05-24 13:47:10 +08:00
long2ice
3a76486993 migrate raise error 2020-05-24 00:05:45 +08:00
long2ice
4d0a6b4de6 Fix version num str 2020-05-22 15:35:35 +08:00
long2ice
c01d2993e0 Exclude models.Aerich.
Add init record when init-db.
2020-05-22 11:59:03 +08:00
long2ice
bab5ebf2f0 migrate exclude aerich.models 2020-05-22 11:14:16 +08:00
long2ice
7e5cefd7d6 write old models exclude aerich.models 2020-05-22 11:03:52 +08:00
long2ice
0cea28d521 update version 2020-05-21 23:57:13 +08:00
long2ice
b92e6551fd update dependency_links 2020-05-21 23:33:58 +08:00
long2ice
bbabde32a1 update version 2020-05-21 21:24:21 +08:00
long2ice
aa921355b9 Store versions in db 2020-05-21 21:22:06 +08:00
long2ice
ea1191bb10 TODO: store version in db 2020-05-21 18:38:45 +08:00
long2ice
23dd29644c fix dependency import 2020-05-21 13:44:09 +08:00
long2ice
bf1d745cef remove cov 2020-05-20 19:00:03 +08:00
long2ice
f5e5d24855 perfect test 2020-05-20 18:42:13 +08:00
long2ice
6d92aec4b1 add pytest.yml 2020-05-19 17:45:41 +08:00
long2ice
55e78bdd2d update regex 2020-05-19 17:16:21 +08:00
long2ice
b07d1abf49 raise Tortoise ConfigurationError 2020-05-19 16:16:07 +08:00
long2ice
ad9c3c809d update setup.py 2020-05-19 14:01:07 +08:00
34 changed files with 1004 additions and 420 deletions

View File

@@ -1,21 +1,19 @@
name: pypi name: pypi
on: on:
release: release:
types: types:
- created - created
jobs: jobs:
build: publish:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/setup-python@v1 - uses: actions/setup-python@v2
with: with:
python-version: '3.x' python-version: '3.x'
- uses: dschep/install-poetry-action@v1.3
- name: Build dists - name: Build dists
run: | run: make build
python3 setup.py sdist
- name: Pypi Publish - name: Pypi Publish
uses: pypa/gh-action-pypi-publish@master uses: pypa/gh-action-pypi-publish@master
with: with:

31
.github/workflows/test.yml vendored Normal file
View File

@@ -0,0 +1,31 @@
name: test
on: [push, pull_request]
jobs:
testall:
runs-on: ubuntu-latest
services:
postgres:
image: postgres:latest
ports:
- 5432:5432
env:
POSTGRES_PASSWORD: 123456
POSTGRES_USER: postgres
options: --health-cmd=pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
steps:
- name: Start MySQL
run: sudo systemctl start mysql.service
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: '3.x'
- uses: dschep/install-poetry-action@v1.3
- name: CI
env:
MYSQL_PASS: root
MYSQL_HOST: 127.0.0.1
MYSQL_PORT: 3306
POSTGRES_PASS: 123456
POSTGRES_HOST: 127.0.0.1
POSTGRES_PORT: 5432
run: make ci

1
.gitignore vendored
View File

@@ -143,3 +143,4 @@ cython_debug/
.idea .idea
migrations migrations
aerich.ini aerich.ini
src

View File

@@ -1,9 +1,44 @@
========= =========
ChangeLog ChangeLog
========= =========
0.2
===
0.2.0
-----
- Update model file find method.
- Set ``--safe`` bool.
0.1 0.1
=== ===
0.1.9
-----
- Fix default_connection when upgrade
- Find default app instead of default.
- Diff MySQL ddl.
- Check tortoise config.
0.1.8
-----
- Fix upgrade error when migrate.
- Fix init db sql error.
- Support change column.
0.1.7
-----
- Exclude models.Aerich.
- Add init record when init-db.
- Fix version num str.
0.1.6
-----
- update dependency_links
0.1.5
-----
- Add sqlite and postgres support.
- Fix dependency import.
- Store versions in db.
0.1.4 0.1.4
----- -----
- Fix transaction and fields import. - Fix transaction and fields import.

View File

@@ -1,3 +0,0 @@
include LICENSE
include README.rst
include requirements.txt

View File

@@ -1,6 +1,10 @@
checkfiles = aerich/ tests/ checkfiles = aerich/ tests/ conftest.py
black_opts = -l 100 -t py38 black_opts = -l 100 -t py38
py_warn = PYTHONDEVMODE=1 py_warn = PYTHONDEVMODE=1
MYSQL_HOST ?= "127.0.0.1"
MYSQL_PORT ?= 3306
POSTGRES_HOST ?= "127.0.0.1"
POSTGRES_PORT ?= 5432
help: help:
@echo "Aerich development makefile" @echo "Aerich development makefile"
@@ -8,19 +12,17 @@ help:
@echo "usage: make <target>" @echo "usage: make <target>"
@echo "Targets:" @echo "Targets:"
@echo " up Updates dev/test dependencies" @echo " up Updates dev/test dependencies"
@echo " deps Ensure dev/test dependencies are installed" @echo " deps Ensure dev/test dependencies are installed"
@echo " check Checks that build is sane" @echo " check Checks that build is sane"
@echo " lint Reports all linter violations" @echo " lint Reports all linter violations"
@echo " test Runs all tests" @echo " test Runs all tests"
@echo " style Auto-formats the code" @echo " style Auto-formats the code"
deps:
@which pip-sync > /dev/null || pip install -q pip-tools
@pip install -r requirements-dev.txt
up: up:
CUSTOM_COMPILE_COMMAND="make up" pip-compile -o requirements-dev.txt -U @poetry update
sed -i "s/^-e .*/-e ./" requirements.txt
deps:
@poetry install -E dbdrivers
style: deps style: deps
isort -rc $(checkfiles) isort -rc $(checkfiles)
@@ -36,20 +38,24 @@ endif
bandit -r $(checkfiles) bandit -r $(checkfiles)
python setup.py check -mrs python setup.py check -mrs
lint: deps
ifneq ($(shell which black),)
black --check $(black_opts) $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false)
endif
flake8 $(checkfiles)
mypy $(checkfiles)
pylint $(checkfiles)
bandit -r $(checkfiles)
python setup.py check -mrs
test: deps test: deps
$(py_warn) py.test $(py_warn) TEST_DB=sqlite://:memory: py.test
test_sqlite:
$(py_warn) TEST_DB=sqlite://:memory: py.test
test_mysql:
$(py_warn) TEST_DB="mysql://root:$(MYSQL_PASS)@$(MYSQL_HOST):$(MYSQL_PORT)/test_\{\}" py.test
test_postgres:
$(py_warn) TEST_DB="postgres://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" py.test
testall: deps test_sqlite test_postgres test_mysql
build: deps
@poetry build
publish: deps publish: deps
rm -fR dist/ @poetry publish --build
python setup.py sdist
twine upload dist/* ci: testall

View File

@@ -8,6 +8,8 @@ Aerich
:target: https://github.com/long2ice/aerich :target: https://github.com/long2ice/aerich
.. image:: https://github.com/long2ice/aerich/workflows/pypi/badge.svg .. image:: https://github.com/long2ice/aerich/workflows/pypi/badge.svg
:target: https://github.com/long2ice/aerich/actions?query=workflow:pypi :target: https://github.com/long2ice/aerich/actions?query=workflow:pypi
.. image:: https://github.com/long2ice/aerich/workflows/test/badge.svg
:target: https://github.com/long2ice/aerich/actions?query=workflow:test
Introduction Introduction
============ ============
@@ -52,6 +54,19 @@ Quick Start
Usage Usage
===== =====
You need add ``aerich.models`` to your ``Tortoise-ORM`` config first, example:
.. code-block:: python
TORTOISE_ORM = {
"connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"},
"apps": {
"models": {
"models": ["tests.models", "aerich.models"],
"default_connection": "default",
},
},
}
Initialization Initialization
-------------- --------------
@@ -89,6 +104,10 @@ Init db
Success create app migrate location ./migrations/models Success create app migrate location ./migrations/models
Success generate schema for app "models" Success generate schema for app "models"
.. note::
If your Tortoise-ORM app is not default ``models``, you must specify ``--app`` like ``aerich --app other_models init-db``.
Update models and make migrate Update models and make migrate
------------------------------ ------------------------------
@@ -98,7 +117,7 @@ Update models and make migrate
Success migrate 1_202029051520102929_drop_column.json Success migrate 1_202029051520102929_drop_column.json
Format of migrate filename is ``{version}_{datetime}_{name|update}.json`` Format of migrate filename is ``{version_num}_{datetime}_{name|update}.json``
Upgrade to latest version Upgrade to latest version
------------------------- -------------------------
@@ -140,6 +159,11 @@ Show heads to be migrated
1_202029051520102929_drop_column.json 1_202029051520102929_drop_column.json
Limitations
===========
* Not support ``rename column`` now.
* ``Sqlite`` and ``Postgres`` may not work as expected because I don't use those in my work.
License License
======= =======
This project is licensed under the `MIT <https://github.com/long2ice/aerich/blob/master/LICENSE>`_ License. This project is licensed under the `MIT <https://github.com/long2ice/aerich/blob/master/LICENSE>`_ License.

View File

@@ -1 +1 @@
__version__ = "0.1.4" __version__ = "0.1.9"

View File

@@ -3,15 +3,20 @@ import os
import sys import sys
from configparser import ConfigParser from configparser import ConfigParser
from enum import Enum from enum import Enum
from . import __version__
import asyncclick as click import asyncclick as click
from asyncclick import Context, UsageError from asyncclick import Context, UsageError
from tortoise import ConfigurationError, Tortoise, generate_schema_for_client from tortoise import Tortoise, generate_schema_for_client
from tortoise.exceptions import OperationalError
from tortoise.transactions import in_transaction from tortoise.transactions import in_transaction
from tortoise.utils import get_schema_sql
from aerich.migrate import Migrate from aerich.migrate import Migrate
from aerich.utils import get_app_connection, get_app_connection_name, get_tortoise_config from aerich.utils import get_app_connection, get_app_connection_name, get_tortoise_config
from . import __version__
from .models import Aerich
class Color(str, Enum): class Color(str, Enum):
green = "green" green = "green"
@@ -23,11 +28,11 @@ parser = ConfigParser()
@click.group(context_settings={"help_option_names": ["-h", "--help"]}) @click.group(context_settings={"help_option_names": ["-h", "--help"]})
@click.version_option(__version__) @click.version_option(__version__, "-V", "--version")
@click.option( @click.option(
"-c", "--config", default="aerich.ini", show_default=True, help="Config file.", "-c", "--config", default="aerich.ini", show_default=True, help="Config file.",
) )
@click.option("--app", default="models", show_default=True, help="Tortoise-ORM app name.") @click.option("--app", required=False, help="Tortoise-ORM app name.")
@click.option( @click.option(
"-n", "-n",
"--name", "--name",
@@ -38,8 +43,9 @@ parser = ConfigParser()
@click.pass_context @click.pass_context
async def cli(ctx: Context, config, app, name): async def cli(ctx: Context, config, app, name):
ctx.ensure_object(dict) ctx.ensure_object(dict)
ctx.obj["config"] = config ctx.obj["config_file"] = config
ctx.obj["name"] = name ctx.obj["name"] = name
invoked_subcommand = ctx.invoked_subcommand invoked_subcommand = ctx.invoked_subcommand
if invoked_subcommand != "init": if invoked_subcommand != "init":
if not os.path.exists(config): if not os.path.exists(config):
@@ -50,16 +56,15 @@ async def cli(ctx: Context, config, app, name):
tortoise_orm = parser[name]["tortoise_orm"] tortoise_orm = parser[name]["tortoise_orm"]
tortoise_config = get_tortoise_config(ctx, tortoise_orm) tortoise_config = get_tortoise_config(ctx, tortoise_orm)
app = app or list(tortoise_config.get("apps").keys())[0]
if "aerich.models" not in tortoise_config.get("apps").get(app).get("models"):
raise UsageError("Check your tortoise config and add aerich.models to it.", ctx=ctx)
ctx.obj["config"] = tortoise_config ctx.obj["config"] = tortoise_config
ctx.obj["location"] = location ctx.obj["location"] = location
ctx.obj["app"] = app ctx.obj["app"] = app
if invoked_subcommand != "init-db": if invoked_subcommand != "init-db":
try: await Migrate.init_with_old_models(tortoise_config, app, location)
await Migrate.init_with_old_models(tortoise_config, app, location)
except ConfigurationError:
raise UsageError(ctx=ctx, message="You must exec ini-db first")
@cli.command(help="Generate migrate changes file.") @cli.command(help="Generate migrate changes file.")
@@ -70,7 +75,7 @@ async def migrate(ctx: Context, name):
location = ctx.obj["location"] location = ctx.obj["location"]
app = ctx.obj["app"] app = ctx.obj["app"]
ret = Migrate.migrate(name) ret = await Migrate.migrate(name)
if not ret: if not ret:
return click.secho("No changes detected", fg=Color.yellow) return click.secho("No changes detected", fg=Color.yellow)
Migrate.write_old_models(config, app, location) Migrate.write_old_models(config, app, location)
@@ -80,24 +85,27 @@ async def migrate(ctx: Context, name):
@cli.command(help="Upgrade to latest version.") @cli.command(help="Upgrade to latest version.")
@click.pass_context @click.pass_context
async def upgrade(ctx: Context): async def upgrade(ctx: Context):
app = ctx.obj["app"]
config = ctx.obj["config"] config = ctx.obj["config"]
available_versions = Migrate.get_all_version_files(is_all=False) app = ctx.obj["app"]
if not available_versions: migrated = False
return click.secho("No migrate items", fg=Color.yellow) for version in Migrate.get_all_version_files():
async with in_transaction(get_app_connection_name(config, app)) as conn: try:
for file in available_versions: exists = await Aerich.exists(version=version, app=app)
file_path = os.path.join(Migrate.migrate_location, file) except OperationalError:
with open(file_path, "r") as f: exists = False
content = json.load(f) if not exists:
upgrade_query_list = content.get("upgrade") async with in_transaction(get_app_connection_name(config, app)) as conn:
for upgrade_query in upgrade_query_list: file_path = os.path.join(Migrate.migrate_location, version)
await conn.execute_query(upgrade_query) with open(file_path, "r") as f:
content = json.load(f)
with open(file_path, "w") as f: upgrade_query_list = content.get("upgrade")
content["migrate"] = True for upgrade_query in upgrade_query_list:
json.dump(content, f, indent=2, ensure_ascii=False) await conn.execute_query(upgrade_query)
click.secho(f"Success upgrade {file}", fg=Color.green) await Aerich.create(version=version, app=app)
click.secho(f"Success upgrade {version}", fg=Color.green)
migrated = True
if not migrated:
click.secho("No migrate items", fg=Color.yellow)
@cli.command(help="Downgrade to previous version.") @cli.command(help="Downgrade to previous version.")
@@ -105,39 +113,45 @@ async def upgrade(ctx: Context):
async def downgrade(ctx: Context): async def downgrade(ctx: Context):
app = ctx.obj["app"] app = ctx.obj["app"]
config = ctx.obj["config"] config = ctx.obj["config"]
available_versions = Migrate.get_all_version_files() last_version = await Migrate.get_last_version()
if not available_versions: if not last_version:
return click.secho("No migrate items", fg=Color.yellow) return click.secho("No last version found", fg=Color.yellow)
file = last_version.version
async with in_transaction(get_app_connection_name(config, app)) as conn: async with in_transaction(get_app_connection_name(config, app)) as conn:
for file in reversed(available_versions): file_path = os.path.join(Migrate.migrate_location, file)
file_path = os.path.join(Migrate.migrate_location, file) with open(file_path, "r") as f:
with open(file_path, "r") as f: content = json.load(f)
content = json.load(f) downgrade_query_list = content.get("downgrade")
if content.get("migrate"): if not downgrade_query_list:
downgrade_query_list = content.get("downgrade") return click.secho(f"No downgrade item dound", fg=Color.yellow)
for downgrade_query in downgrade_query_list: for downgrade_query in downgrade_query_list:
await conn.execute_query(downgrade_query) await conn.execute_query(downgrade_query)
else: await last_version.delete()
continue return click.secho(f"Success downgrade {file}", fg=Color.green)
with open(file_path, "w") as f:
content["migrate"] = False
json.dump(content, f, indent=2, ensure_ascii=False)
return click.secho(f"Success downgrade {file}", fg=Color.green)
@cli.command(help="Show current available heads in migrate location.") @cli.command(help="Show current available heads in migrate location.")
@click.pass_context @click.pass_context
def heads(ctx: Context): async def heads(ctx: Context):
for version in Migrate.get_all_version_files(is_all=False): app = ctx.obj["app"]
click.secho(version, fg=Color.green) versions = Migrate.get_all_version_files()
is_heads = False
for version in versions:
if not await Aerich.exists(version=version, app=app):
click.secho(version, fg=Color.green)
is_heads = True
if not is_heads:
click.secho("No available heads,try migrate", fg=Color.green)
@cli.command(help="List all migrate items.") @cli.command(help="List all migrate items.")
@click.pass_context @click.pass_context
def history(ctx): def history(ctx):
for version in Migrate.get_all_version_files(): versions = Migrate.get_all_version_files()
for version in versions:
click.secho(version, fg=Color.green) click.secho(version, fg=Color.green)
if not versions:
click.secho("No history,try migrate", fg=Color.green)
@cli.command(help="Init config file and generate root migrate location.") @cli.command(help="Init config file and generate root migrate location.")
@@ -152,29 +166,31 @@ def history(ctx):
) )
@click.pass_context @click.pass_context
async def init( async def init(
ctx: Context, tortoise_orm, location, ctx: Context, tortoise_orm, location,
): ):
config = ctx.obj["config"] config_file = ctx.obj["config_file"]
name = ctx.obj["name"] name = ctx.obj["name"]
if os.path.exists(config_file):
return click.secho("You have inited", fg=Color.yellow)
parser.add_section(name) parser.add_section(name)
parser.set(name, "tortoise_orm", tortoise_orm) parser.set(name, "tortoise_orm", tortoise_orm)
parser.set(name, "location", location) parser.set(name, "location", location)
with open(config, "w") as f: with open(config_file, "w") as f:
parser.write(f) parser.write(f)
if not os.path.isdir(location): if not os.path.isdir(location):
os.mkdir(location) os.mkdir(location)
click.secho(f"Success create migrate location {location}", fg=Color.green) click.secho(f"Success create migrate location {location}", fg=Color.green)
click.secho(f"Success generate config file {config}", fg=Color.green) click.secho(f"Success generate config file {config_file}", fg=Color.green)
@cli.command(help="Generate schema and generate app migrate location.") @cli.command(help="Generate schema and generate app migrate location.")
@click.option( @click.option(
"--safe", "--safe",
is_flag=True, type=bool,
default=True, default=True,
help="When set to true, creates the table only when it does not already exist.", help="When set to true, creates the table only when it does not already exist.",
show_default=True, show_default=True,
@@ -190,7 +206,7 @@ async def init_db(ctx: Context, safe):
os.mkdir(dirname) os.mkdir(dirname)
click.secho(f"Success create app migrate location {dirname}", fg=Color.green) click.secho(f"Success create app migrate location {dirname}", fg=Color.green)
else: else:
return click.secho(f'Already inited app "{app}"', fg=Color.yellow) return click.secho(f"Inited {app} already", fg=Color.yellow)
Migrate.write_old_models(config, app, location) Migrate.write_old_models(config, app, location)
@@ -198,6 +214,15 @@ async def init_db(ctx: Context, safe):
connection = get_app_connection(config, app) connection = get_app_connection(config, app)
await generate_schema_for_client(connection, safe) await generate_schema_for_client(connection, safe)
schema = get_schema_sql(connection, safe)
version = await Migrate.generate_version()
await Aerich.create(version=version, app=app)
with open(os.path.join(dirname, version), "w") as f:
content = {
"upgrade": [schema],
}
json.dump(content, f, ensure_ascii=False, indent=2)
return click.secho(f'Success generate schema for app "{app}"', fg=Color.green) return click.secho(f'Success generate schema for app "{app}"', fg=Color.green)

View File

@@ -8,16 +8,17 @@ from tortoise.fields import Field, JSONField, TextField, UUIDField
class BaseDDL: class BaseDDL:
schema_generator_cls: Type[BaseSchemaGenerator] = BaseSchemaGenerator schema_generator_cls: Type[BaseSchemaGenerator] = BaseSchemaGenerator
DIALECT = "sql" DIALECT = "sql"
_DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS {table_name}" _DROP_TABLE_TEMPLATE = 'DROP TABLE IF EXISTS "{table_name}"'
_ADD_COLUMN_TEMPLATE = "ALTER TABLE {table_name} ADD {column}" _ADD_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ADD {column}'
_DROP_COLUMN_TEMPLATE = "ALTER TABLE {table_name} DROP COLUMN {column_name}" _DROP_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" DROP COLUMN "{column_name}"'
_ADD_INDEX_TEMPLATE = ( _ADD_INDEX_TEMPLATE = (
"ALTER TABLE {table_name} ADD {unique} INDEX {index_name} ({column_names})" 'ALTER TABLE "{table_name}" ADD {unique} INDEX "{index_name}" ({column_names})'
) )
_DROP_INDEX_TEMPLATE = "ALTER TABLE {table_name} DROP INDEX {index_name}" _DROP_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" DROP INDEX "{index_name}"'
_ADD_FK_TEMPLATE = "ALTER TABLE {table_name} ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}" _ADD_FK_TEMPLATE = 'ALTER TABLE "{table_name}" ADD CONSTRAINT "{fk_name}" FOREIGN KEY ("{db_column}") REFERENCES "{table}" ("{field}") ON DELETE {on_delete}'
_DROP_FK_TEMPLATE = "ALTER TABLE {table_name} DROP FOREIGN KEY {fk_name}" _DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP FOREIGN KEY "{fk_name}"'
_M2M_TABLE_TEMPLATE = "CREATE TABLE {table_name} ({backward_key} {backward_type} NOT NULL REFERENCES {backward_table} ({backward_field}) ON DELETE CASCADE,{forward_key} {forward_type} NOT NULL REFERENCES {forward_table} ({forward_field}) ON DELETE CASCADE){extra}{comment};" _M2M_TABLE_TEMPLATE = 'CREATE TABLE "{table_name}" ("{backward_key}" {backward_type} NOT NULL REFERENCES "{backward_table}" ("{backward_field}") ON DELETE CASCADE,"{forward_key}" {forward_type} NOT NULL REFERENCES "{forward_table}" ("{forward_field}") ON DELETE CASCADE){extra}{comment};'
_MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" MODIFY COLUMN {column}'
def __init__(self, client: "BaseDBAsyncClient"): def __init__(self, client: "BaseDBAsyncClient"):
self.client = client self.client = client
@@ -51,7 +52,7 @@ class BaseDDL:
def drop_m2m(self, field: ManyToManyFieldInstance): def drop_m2m(self, field: ManyToManyFieldInstance):
return self._DROP_TABLE_TEMPLATE.format(table_name=field.through) return self._DROP_TABLE_TEMPLATE.format(table_name=field.through)
def add_column(self, model: "Type[Model]", field_object: Field): def _get_default(self, model: "Type[Model]", field_object: Field):
db_table = model._meta.db_table db_table = model._meta.db_table
default = field_object.default default = field_object.default
db_column = field_object.model_field_name db_column = field_object.model_field_name
@@ -74,6 +75,11 @@ class BaseDDL:
default = "" default = ""
else: else:
default = "" default = ""
return default
def add_column(self, model: "Type[Model]", field_object: Field):
db_table = model._meta.db_table
return self._ADD_COLUMN_TEMPLATE.format( return self._ADD_COLUMN_TEMPLATE.format(
table_name=db_table, table_name=db_table,
column=self.schema_generator._create_string( column=self.schema_generator._create_string(
@@ -89,7 +95,7 @@ class BaseDDL:
if field_object.description if field_object.description
else "", else "",
is_primary_key=field_object.pk, is_primary_key=field_object.pk,
default=default, default=self._get_default(model, field_object),
), ),
) )
@@ -98,6 +104,27 @@ class BaseDDL:
table_name=model._meta.db_table, column_name=column_name table_name=model._meta.db_table, column_name=column_name
) )
def modify_column(self, model: "Type[Model]", field_object: Field):
db_table = model._meta.db_table
return self._MODIFY_COLUMN_TEMPLATE.format(
table_name=db_table,
column=self.schema_generator._create_string(
db_column=field_object.model_field_name,
field_type=field_object.get_for_dialect(self.DIALECT, "SQL_TYPE"),
nullable="NOT NULL" if not field_object.null else "",
unique="",
comment=self.schema_generator._column_comment_generator(
table=db_table,
column=field_object.model_field_name,
comment=field_object.description,
)
if field_object.description
else "",
is_primary_key=field_object.pk,
default=self._get_default(model, field_object),
),
)
def add_index(self, model: "Type[Model]", field_names: List[str], unique=False): def add_index(self, model: "Type[Model]", field_names: List[str], unique=False):
return self._ADD_INDEX_TEMPLATE.format( return self._ADD_INDEX_TEMPLATE.format(
unique="UNIQUE" if unique else "", unique="UNIQUE" if unique else "",

View File

@@ -6,3 +6,14 @@ from aerich.ddl import BaseDDL
class MysqlDDL(BaseDDL): class MysqlDDL(BaseDDL):
schema_generator_cls = MySQLSchemaGenerator schema_generator_cls = MySQLSchemaGenerator
DIALECT = MySQLSchemaGenerator.DIALECT DIALECT = MySQLSchemaGenerator.DIALECT
_DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS `{table_name}`"
_ADD_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` ADD {column}"
_DROP_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` DROP COLUMN `{column_name}`"
_ADD_INDEX_TEMPLATE = (
"ALTER TABLE `{table_name}` ADD {unique} INDEX `{index_name}` ({column_names})"
)
_DROP_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` DROP INDEX `{index_name}`"
_ADD_FK_TEMPLATE = "ALTER TABLE `{table_name}` ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}"
_DROP_FK_TEMPLATE = "ALTER TABLE `{table_name}` DROP FOREIGN KEY `{fk_name}`"
_M2M_TABLE_TEMPLATE = "CREATE TABLE `{table_name}` (`{backward_key}` {backward_type} NOT NULL REFERENCES `{backward_table}` (`{backward_field}`) ON DELETE CASCADE,`{forward_key}` {forward_type} NOT NULL REFERENCES `{forward_table}` (`{forward_field}`) ON DELETE CASCADE){extra}{comment};"
_MODIFY_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` MODIFY COLUMN {column}"

View File

@@ -0,0 +1,8 @@
from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator
from aerich.ddl import BaseDDL
class PostgresDDL(BaseDDL):
schema_generator_cls = AsyncpgSchemaGenerator
DIALECT = AsyncpgSchemaGenerator.DIALECT

View File

@@ -0,0 +1,8 @@
from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator
from aerich.ddl import BaseDDL
class SqliteDDL(BaseDDL):
schema_generator_cls = SqliteSchemaGenerator
DIALECT = SqliteSchemaGenerator.DIALECT

View File

@@ -3,7 +3,8 @@ import os
import re import re
from copy import deepcopy from copy import deepcopy
from datetime import datetime from datetime import datetime
from typing import Dict, List, Type from importlib import import_module
from typing import Dict, List, Tuple, Type
from tortoise import ( from tortoise import (
BackwardFKRelation, BackwardFKRelation,
@@ -13,12 +14,10 @@ from tortoise import (
Model, Model,
Tortoise, Tortoise,
) )
from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator
from tortoise.fields import Field from tortoise.fields import Field
from aerich.ddl import BaseDDL from aerich.ddl import BaseDDL
from aerich.ddl.mysql import MysqlDDL from aerich.models import Aerich
from aerich.exceptions import ConfigurationError
from aerich.utils import get_app_connection from aerich.utils import get_app_connection
@@ -29,6 +28,7 @@ class Migrate:
_downgrade_fk_m2m_index_operators: List[str] = [] _downgrade_fk_m2m_index_operators: List[str] = []
_upgrade_m2m: List[str] = [] _upgrade_m2m: List[str] = []
_downgrade_m2m: List[str] = [] _downgrade_m2m: List[str] = []
_aerich = Aerich.__name__
ddl: BaseDDL ddl: BaseDDL
migrate_config: dict migrate_config: dict
@@ -42,26 +42,12 @@ class Migrate:
return cls.old_models + ".py" return cls.old_models + ".py"
@classmethod @classmethod
def _get_all_migrate_files(cls): def get_all_version_files(cls) -> List[str]:
return sorted(filter(lambda x: x.endswith("json"), os.listdir(cls.migrate_location))) return sorted(filter(lambda x: x.endswith("json"), os.listdir(cls.migrate_location)))
@classmethod @classmethod
def _get_latest_version(cls) -> int: async def get_last_version(cls) -> Aerich:
ret = cls._get_all_migrate_files() return await Aerich.filter(app=cls.app).first()
if ret:
return int(ret[-1].split("_")[0])
return 0
@classmethod
def get_all_version_files(cls, is_all=True):
files = cls._get_all_migrate_files()
ret = []
for file in files:
with open(os.path.join(cls.migrate_location, file), "r") as f:
content = json.load(f)
if is_all or not content.get("migrate"):
ret.append(file)
return ret
@classmethod @classmethod
async def init_with_old_models(cls, config: dict, app: str, location: str): async def init_with_old_models(cls, config: dict, app: str, location: str):
@@ -74,46 +60,66 @@ class Migrate:
await Tortoise.init(config=migrate_config) await Tortoise.init(config=migrate_config)
connection = get_app_connection(config, app) connection = get_app_connection(config, app)
if connection.schema_generator is MySQLSchemaGenerator: if connection.schema_generator.DIALECT == "mysql":
from aerich.ddl.mysql import MysqlDDL
cls.ddl = MysqlDDL(connection) cls.ddl = MysqlDDL(connection)
else: elif connection.schema_generator.DIALECT == "sqlite":
raise NotImplementedError("Current only support MySQL") from aerich.ddl.sqlite import SqliteDDL
cls.ddl = SqliteDDL(connection)
elif connection.schema_generator.DIALECT == "postgres":
from aerich.ddl.postgres import PostgresDDL
cls.ddl = PostgresDDL(connection)
@classmethod @classmethod
def _generate_diff_sql(cls, name): async def _get_last_version_num(cls):
last_version = await cls.get_last_version()
if not last_version:
return None
version = last_version.version
return int(version.split("_")[0])
@classmethod
async def generate_version(cls, name=None):
now = datetime.now().strftime("%Y%M%D%H%M%S").replace("/", "") now = datetime.now().strftime("%Y%M%D%H%M%S").replace("/", "")
filename = f"{cls._get_latest_version() + 1}_{now}_{name}.json" last_version_num = await cls._get_last_version_num()
if last_version_num is None:
return f"0_{now}_init.json"
return f"{last_version_num + 1}_{now}_{name}.json"
@classmethod
async def _generate_diff_sql(cls, name):
version = await cls.generate_version(name)
content = { content = {
"upgrade": cls.upgrade_operators, "upgrade": cls.upgrade_operators,
"downgrade": cls.downgrade_operators, "downgrade": cls.downgrade_operators,
"migrate": False,
} }
with open(os.path.join(cls.migrate_location, filename), "w") as f: with open(os.path.join(cls.migrate_location, version), "w") as f:
json.dump(content, f, indent=2, ensure_ascii=False) json.dump(content, f, indent=2, ensure_ascii=False)
return filename return version
@classmethod @classmethod
def migrate(cls, name): async def migrate(cls, name) -> str:
""" """
diff old models and new models to generate diff content diff old models and new models to generate diff content
:param name: :param name:
:return: :return:
""" """
if not cls.migrate_config:
raise ConfigurationError("You must call init_with_old_models() first!")
apps = Tortoise.apps apps = Tortoise.apps
diff_models = apps.get(cls.diff_app) diff_models = apps.get(cls.diff_app)
app_models = apps.get(cls.app) app_models = apps.get(cls.app)
cls._diff_models(diff_models, app_models) cls.diff_models(diff_models, app_models)
cls._diff_models(app_models, diff_models, False) cls.diff_models(app_models, diff_models, False)
cls._merge_operators() cls._merge_operators()
if not cls.upgrade_operators: if not cls.upgrade_operators:
return False return ""
return cls._generate_diff_sql(name) return await cls._generate_diff_sql(name)
@classmethod @classmethod
def _add_operator(cls, operator: str, upgrade=True, fk=False): def _add_operator(cls, operator: str, upgrade=True, fk=False):
@@ -146,12 +152,13 @@ class Migrate:
:param old_model_file: :param old_model_file:
:return: :return:
""" """
pattern = rf"\(('|\")({app})(.\w+)('|\")" pattern = rf"(\n)?('|\")({app})(.\w+)('|\")"
for i, model_file in enumerate(model_files): for i, model_file in enumerate(model_files):
with open(model_file, "r") as f: with open(model_file, "r") as f:
content = f.read() content = f.read()
ret = re.sub(pattern, rf"(\1{cls.diff_app}\3\4", content) ret = re.sub(pattern, rf"\2{cls.diff_app}\4\5", content)
with open(old_model_file, "w" if i == 0 else "w+a") as f: mode = "w" if i == 0 else "a"
with open(old_model_file, mode) as f:
f.write(ret) f.write(ret)
@classmethod @classmethod
@@ -166,7 +173,10 @@ class Migrate:
temp_config = deepcopy(config) temp_config = deepcopy(config)
path = os.path.join(location, app, cls.old_models) path = os.path.join(location, app, cls.old_models)
path = path.replace("/", ".").lstrip(".") path = path.replace("/", ".").lstrip(".")
temp_config["apps"][cls.diff_app] = {"models": [path]} temp_config["apps"][cls.diff_app] = {
"models": [path],
"default_connection": config.get("apps").get(app).get("default_connection", "default"),
}
return temp_config return temp_config
@classmethod @classmethod
@@ -178,15 +188,17 @@ class Migrate:
:param location: :param location:
:return: :return:
""" """
cls.app = app
old_model_files = [] old_model_files = []
models = config.get("apps").get(app).get("models") models = config.get("apps").get(app).get("models")
for model in models: for model in models:
old_model_files.append(model.replace(".", "/") + ".py") old_model_files.append(import_module(model).__file__)
cls.cp_models(app, old_model_files, os.path.join(location, app, cls.get_old_model_file())) cls.cp_models(app, old_model_files, os.path.join(location, app, cls.get_old_model_file()))
@classmethod @classmethod
def _diff_models( def diff_models(
cls, old_models: Dict[str, Type[Model]], new_models: Dict[str, Type[Model]], upgrade=True cls, old_models: Dict[str, Type[Model]], new_models: Dict[str, Type[Model]], upgrade=True
): ):
""" """
@@ -196,6 +208,9 @@ class Migrate:
:param upgrade: :param upgrade:
:return: :return:
""" """
old_models.pop(cls._aerich, None)
new_models.pop(cls._aerich, None)
for new_model_str, new_model in new_models.items(): for new_model_str, new_model in new_models.items():
if new_model_str not in old_models.keys(): if new_model_str not in old_models.keys():
cls._add_operator(cls.add_model(new_model), upgrade) cls._add_operator(cls.add_model(new_model), upgrade)
@@ -206,6 +221,10 @@ class Migrate:
if old_model not in new_models.keys(): if old_model not in new_models.keys():
cls._add_operator(cls.remove_model(old_models.get(old_model)), upgrade) cls._add_operator(cls.remove_model(old_models.get(old_model)), upgrade)
@classmethod
def _is_fk_m2m(cls, field: Field):
return isinstance(field, (ForeignKeyFieldInstance, ManyToManyFieldInstance))
@classmethod @classmethod
def add_model(cls, model: Type[Model]): def add_model(cls, model: Type[Model]):
return cls.ddl.create_table(model) return cls.ddl.create_table(model)
@@ -246,32 +265,38 @@ class Migrate:
) )
else: else:
old_field = old_fields_map.get(new_key) old_field = old_fields_map.get(new_key)
new_field_dict = new_field.describe(serializable=True)
new_field_dict.pop("unique")
new_field_dict.pop("indexed")
old_field_dict = old_field.describe(serializable=True)
old_field_dict.pop("unique")
old_field_dict.pop("indexed")
if not cls._is_fk_m2m(new_field) and new_field_dict != old_field_dict:
cls._add_operator(cls._modify_field(new_model, new_field), upgrade=upgrade)
if (old_field.index and not new_field.index) or ( if (old_field.index and not new_field.index) or (
old_field.unique and not new_field.unique old_field.unique and not new_field.unique
): ):
cls._add_operator( cls._add_operator(
cls._remove_index( cls._remove_index(
old_model, [old_field.model_field_name], old_field.unique old_model, (old_field.model_field_name,), old_field.unique
), ),
upgrade, upgrade,
isinstance(old_field, (ForeignKeyFieldInstance, ManyToManyFieldInstance)), cls._is_fk_m2m(old_field),
) )
elif (new_field.index and not old_field.index) or ( elif (new_field.index and not old_field.index) or (
new_field.unique and not old_field.unique new_field.unique and not old_field.unique
): ):
cls._add_operator( cls._add_operator(
cls._add_index(new_model, [new_field.model_field_name], new_field.unique), cls._add_index(new_model, (new_field.model_field_name,), new_field.unique),
upgrade, upgrade,
isinstance(new_field, (ForeignKeyFieldInstance, ManyToManyFieldInstance)), cls._is_fk_m2m(new_field),
) )
for old_key in old_keys: for old_key in old_keys:
field = old_fields_map.get(old_key) field = old_fields_map.get(old_key)
if old_key not in new_keys and not cls._exclude_field(field, upgrade): if old_key not in new_keys and not cls._exclude_field(field, upgrade):
cls._add_operator( cls._add_operator(
cls._remove_field(old_model, field), cls._remove_field(old_model, field), upgrade, cls._is_fk_m2m(field),
upgrade,
isinstance(field, (ForeignKeyFieldInstance, ManyToManyFieldInstance)),
) )
for new_index in new_indexes: for new_index in new_indexes:
@@ -290,7 +315,7 @@ class Migrate:
cls._add_operator(cls._remove_index(old_model, old_unique, unique=True), upgrade) cls._add_operator(cls._remove_index(old_model, old_unique, unique=True), upgrade)
@classmethod @classmethod
def _resolve_fk_fields_name(cls, model: Type[Model], fields_name: List[str]): def _resolve_fk_fields_name(cls, model: Type[Model], fields_name: Tuple[str]):
ret = [] ret = []
for field_name in fields_name: for field_name in fields_name:
if field_name in model._meta.fk_fields: if field_name in model._meta.fk_fields:
@@ -300,12 +325,12 @@ class Migrate:
return ret return ret
@classmethod @classmethod
def _remove_index(cls, model: Type[Model], fields_name: List[str], unique=False): def _remove_index(cls, model: Type[Model], fields_name: Tuple[str], unique=False):
fields_name = cls._resolve_fk_fields_name(model, fields_name) fields_name = cls._resolve_fk_fields_name(model, fields_name)
return cls.ddl.drop_index(model, fields_name, unique) return cls.ddl.drop_index(model, fields_name, unique)
@classmethod @classmethod
def _add_index(cls, model: Type[Model], fields_name: List[str], unique=False): def _add_index(cls, model: Type[Model], fields_name: Tuple[str], unique=False):
fields_name = cls._resolve_fk_fields_name(model, fields_name) fields_name = cls._resolve_fk_fields_name(model, fields_name)
return cls.ddl.add_index(model, fields_name, unique) return cls.ddl.add_index(model, fields_name, unique)
@@ -340,6 +365,10 @@ class Migrate:
return cls.ddl.create_m2m_table(model, field) return cls.ddl.create_m2m_table(model, field)
return cls.ddl.add_column(model, field) return cls.ddl.add_column(model, field)
@classmethod
def _modify_field(cls, model: Type[Model], field: Field):
return cls.ddl.modify_column(model, field)
@classmethod @classmethod
def _remove_field(cls, model: Type[Model], field: Field): def _remove_field(cls, model: Type[Model], field: Field):
if isinstance(field, ForeignKeyFieldInstance): if isinstance(field, ForeignKeyFieldInstance):

9
aerich/models.py Normal file
View File

@@ -0,0 +1,9 @@
from tortoise import Model, fields
class Aerich(Model):
version = fields.CharField(max_length=50)
app = fields.CharField(max_length=20)
class Meta:
ordering = ["-id"]

View File

@@ -1,10 +1,10 @@
import importlib import importlib
from asyncclick import BadOptionUsage, Context from asyncclick import BadOptionUsage, Context
from tortoise import Tortoise from tortoise import BaseDBAsyncClient, Tortoise
def get_app_connection_name(config, app): def get_app_connection_name(config, app) -> str:
""" """
get connection name get connection name
:param config: :param config:
@@ -14,7 +14,7 @@ def get_app_connection_name(config, app):
return config.get("apps").get(app).get("default_connection") return config.get("apps").get(app).get("default_connection")
def get_app_connection(config, app): def get_app_connection(config, app) -> BaseDBAsyncClient:
""" """
get connection name get connection name
:param config: :param config:

61
conftest.py Normal file
View File

@@ -0,0 +1,61 @@
import asyncio
import os
import pytest
from tortoise import Tortoise, expand_db_url, generate_schema_for_client
from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator
from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator
from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator
from aerich.ddl.mysql import MysqlDDL
from aerich.ddl.postgres import PostgresDDL
from aerich.ddl.sqlite import SqliteDDL
from aerich.migrate import Migrate
db_url = os.getenv("TEST_DB", "sqlite://:memory:")
tortoise_orm = {
"connections": {"default": expand_db_url(db_url, True)},
"apps": {
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default",},
},
}
@pytest.fixture(scope="function", autouse=True)
def reset_migrate():
Migrate.upgrade_operators = []
Migrate.downgrade_operators = []
Migrate._upgrade_fk_m2m_index_operators = []
Migrate._downgrade_fk_m2m_index_operators = []
Migrate._upgrade_m2m = []
Migrate._downgrade_m2m = []
@pytest.fixture(scope="session")
def loop():
loop = asyncio.get_event_loop()
return loop
@pytest.fixture(scope="session", autouse=True)
def initialize_tests(loop, request):
tortoise_orm["connections"]["diff_models"] = "sqlite://:memory:"
tortoise_orm["apps"]["diff_models"] = {
"models": ["tests.diff_models"],
"default_connection": "diff_models",
}
loop.run_until_complete(Tortoise.init(config=tortoise_orm, _create_db=True))
loop.run_until_complete(
generate_schema_for_client(Tortoise.get_connection("default"), safe=True)
)
client = Tortoise.get_connection("default")
if client.schema_generator is MySQLSchemaGenerator:
Migrate.ddl = MysqlDDL(client)
elif client.schema_generator is SqliteSchemaGenerator:
Migrate.ddl = SqliteDDL(client)
elif client.schema_generator is AsyncpgSchemaGenerator:
Migrate.ddl = PostgresDDL(client)
request.addfinalizer(lambda: loop.run_until_complete(Tortoise._drop_databases()))

375
poetry.lock generated
View File

@@ -2,7 +2,7 @@
category = "main" category = "main"
description = "MySQL driver for asyncio." description = "MySQL driver for asyncio."
name = "aiomysql" name = "aiomysql"
optional = false optional = true
python-versions = "*" python-versions = "*"
version = "0.0.20" version = "0.0.20"
@@ -26,7 +26,7 @@ description = "High level compatibility layer for multiple asynchronous event lo
name = "anyio" name = "anyio"
optional = false optional = false
python-versions = ">=3.5.3" python-versions = ">=3.5.3"
version = "1.3.0" version = "1.3.1"
[package.dependencies] [package.dependencies]
async-generator = "*" async-generator = "*"
@@ -38,6 +38,14 @@ doc = ["sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"]
test = ["coverage (>=4.5)", "hypothesis (>=4.0)", "pytest (>=3.7.2)", "uvloop"] test = ["coverage (>=4.5)", "hypothesis (>=4.0)", "pytest (>=3.7.2)", "uvloop"]
trio = ["trio (>=0.12)"] trio = ["trio (>=0.12)"]
[[package]]
category = "dev"
description = "apipkg: namespace control and lazy-import mechanism"
name = "apipkg"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "1.5"
[[package]] [[package]]
category = "dev" category = "dev"
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
@@ -69,13 +77,27 @@ anyio = "*"
dev = ["coverage", "pytest-runner", "pytest-trio", "pytest (>=3)", "sphinx", "tox"] dev = ["coverage", "pytest-runner", "pytest-trio", "pytest (>=3)", "sphinx", "tox"]
docs = ["sphinx"] docs = ["sphinx"]
[[package]]
category = "main"
description = "An asyncio PostgreSQL driver"
name = "asyncpg"
optional = true
python-versions = ">=3.5.0"
version = "0.20.1"
[package.extras]
dev = ["Cython (0.29.14)", "pytest (>=3.6.0)", "Sphinx (>=1.7.3,<1.8.0)", "sphinxcontrib-asyncio (>=0.2.0,<0.3.0)", "sphinx-rtd-theme (>=0.2.4,<0.3.0)", "pycodestyle (>=2.5.0,<2.6.0)", "flake8 (>=3.7.9,<3.8.0)", "uvloop (>=0.14.0,<0.15.0)"]
docs = ["Sphinx (>=1.7.3,<1.8.0)", "sphinxcontrib-asyncio (>=0.2.0,<0.3.0)", "sphinx-rtd-theme (>=0.2.4,<0.3.0)"]
test = ["pycodestyle (>=2.5.0,<2.6.0)", "flake8 (>=3.7.9,<3.8.0)", "uvloop (>=0.14.0,<0.15.0)"]
[[package]] [[package]]
category = "dev" category = "dev"
description = "Enhance the standard unittest package with features for testing asyncio libraries" description = "Atomic file writes."
name = "asynctest" marker = "sys_platform == \"win32\""
name = "atomicwrites"
optional = false optional = false
python-versions = ">=3.5" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "0.13.0" version = "1.4.0"
[[package]] [[package]]
category = "dev" category = "dev"
@@ -115,7 +137,7 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"]
category = "main" category = "main"
description = "Foreign Function Interface for Python calling C code." description = "Foreign Function Interface for Python calling C code."
name = "cffi" name = "cffi"
optional = false optional = true
python-versions = "*" python-versions = "*"
version = "1.14.0" version = "1.14.0"
@@ -139,11 +161,20 @@ optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
version = "7.1.2" version = "7.1.2"
[[package]]
category = "dev"
description = "Cross-platform colored terminal text."
marker = "sys_platform == \"win32\""
name = "colorama"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
version = "0.4.3"
[[package]] [[package]]
category = "main" category = "main"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
name = "cryptography" name = "cryptography"
optional = false optional = true
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
version = "2.9.2" version = "2.9.2"
@@ -158,13 +189,27 @@ idna = ["idna (>=2.1)"]
pep8test = ["flake8", "flake8-import-order", "pep8-naming"] pep8test = ["flake8", "flake8-import-order", "pep8-naming"]
test = ["pytest (>=3.6.0,<3.9.0 || >3.9.0,<3.9.1 || >3.9.1,<3.9.2 || >3.9.2)", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,<3.79.2 || >3.79.2)"] test = ["pytest (>=3.6.0,<3.9.0 || >3.9.0,<3.9.1 || >3.9.1,<3.9.2 || >3.9.2)", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,<3.79.2 || >3.79.2)"]
[[package]]
category = "dev"
description = "execnet: rapid multi-Python deployment"
name = "execnet"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "1.7.1"
[package.dependencies]
apipkg = ">=1.4"
[package.extras]
testing = ["pre-commit"]
[[package]] [[package]]
category = "dev" category = "dev"
description = "the modular source code checker: pep8 pyflakes and co" description = "the modular source code checker: pep8 pyflakes and co"
name = "flake8" name = "flake8"
optional = false optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
version = "3.8.1" version = "3.8.2"
[package.dependencies] [package.dependencies]
mccabe = ">=0.6.0,<0.7.0" mccabe = ">=0.6.0,<0.7.0"
@@ -202,6 +247,50 @@ optional = false
python-versions = "*" python-versions = "*"
version = "0.6.1" version = "0.6.1"
[[package]]
category = "dev"
description = "More routines for operating on iterables, beyond itertools"
name = "more-itertools"
optional = false
python-versions = ">=3.5"
version = "8.3.0"
[[package]]
category = "dev"
description = "Optional static typing for Python"
name = "mypy"
optional = false
python-versions = ">=3.5"
version = "0.770"
[package.dependencies]
mypy-extensions = ">=0.4.3,<0.5.0"
typed-ast = ">=1.4.0,<1.5.0"
typing-extensions = ">=3.7.4"
[package.extras]
dmypy = ["psutil (>=4.0)"]
[[package]]
category = "dev"
description = "Experimental type system extensions for programs checked with the mypy typechecker."
name = "mypy-extensions"
optional = false
python-versions = "*"
version = "0.4.3"
[[package]]
category = "dev"
description = "Core utilities for Python packages"
name = "packaging"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "20.4"
[package.dependencies]
pyparsing = ">=2.0.2"
six = "*"
[[package]] [[package]]
category = "dev" category = "dev"
description = "Utility library for gitignore style pattern matching of file paths." description = "Utility library for gitignore style pattern matching of file paths."
@@ -210,6 +299,25 @@ optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
version = "0.8.0" version = "0.8.0"
[[package]]
category = "dev"
description = "plugin and hook calling mechanisms for python"
name = "pluggy"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "0.13.1"
[package.extras]
dev = ["pre-commit", "tox"]
[[package]]
category = "dev"
description = "library with cross-python path, ini-parsing, io, code, log facilities"
name = "py"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "1.8.1"
[[package]] [[package]]
category = "dev" category = "dev"
description = "Python style guide checker" description = "Python style guide checker"
@@ -222,10 +330,23 @@ version = "2.6.0"
category = "main" category = "main"
description = "C parser in Python" description = "C parser in Python"
name = "pycparser" name = "pycparser"
optional = false optional = true
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "2.20" version = "2.20"
[[package]]
category = "main"
description = "Data validation and settings management using python 3.6 type hinting"
name = "pydantic"
optional = false
python-versions = ">=3.6"
version = "1.5.1"
[package.extras]
dotenv = ["python-dotenv (>=0.10.4)"]
email = ["email-validator (>=1.0.3)"]
typing_extensions = ["typing-extensions (>=3.7.2)"]
[[package]] [[package]]
category = "dev" category = "dev"
description = "passive checker of Python programs" description = "passive checker of Python programs"
@@ -238,20 +359,92 @@ version = "2.2.0"
category = "main" category = "main"
description = "Pure Python MySQL Driver" description = "Pure Python MySQL Driver"
name = "pymysql" name = "pymysql"
optional = false optional = true
python-versions = "*" python-versions = "*"
version = "0.9.2" version = "0.9.2"
[package.dependencies] [package.dependencies]
cryptography = "*" cryptography = "*"
[[package]]
category = "dev"
description = "Python parsing module"
name = "pyparsing"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
version = "2.4.7"
[[package]] [[package]]
category = "main" category = "main"
description = "A SQL query builder API for Python" description = "A SQL query builder API for Python"
name = "pypika" name = "pypika"
optional = false optional = false
python-versions = "*" python-versions = "*"
version = "0.37.6" version = "0.37.7"
[[package]]
category = "dev"
description = "pytest: simple powerful testing with Python"
name = "pytest"
optional = false
python-versions = ">=3.5"
version = "5.4.2"
[package.dependencies]
atomicwrites = ">=1.0"
attrs = ">=17.4.0"
colorama = "*"
more-itertools = ">=4.0.0"
packaging = "*"
pluggy = ">=0.12,<1.0"
py = ">=1.5.0"
wcwidth = "*"
[package.extras]
checkqa-mypy = ["mypy (v0.761)"]
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
[[package]]
category = "dev"
description = "Pytest support for asyncio."
name = "pytest-asyncio"
optional = false
python-versions = ">= 3.5"
version = "0.12.0"
[package.dependencies]
pytest = ">=5.4.0"
[package.extras]
testing = ["async_generator (>=1.3)", "coverage", "hypothesis (>=5.7.1)"]
[[package]]
category = "dev"
description = "run tests in isolated forked subprocesses"
name = "pytest-forked"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "1.1.3"
[package.dependencies]
pytest = ">=3.1.0"
[[package]]
category = "dev"
description = "pytest xdist plugin for distributed testing and loop-on-failing modes"
name = "pytest-xdist"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
version = "1.32.0"
[package.dependencies]
execnet = ">=1.1"
pytest = ">=4.4.0"
pytest-forked = "*"
six = "*"
[package.extras]
testing = ["filelock"]
[[package]] [[package]]
category = "dev" category = "dev"
@@ -267,7 +460,7 @@ description = "Python 2 and 3 compatibility utilities"
name = "six" name = "six"
optional = false optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
version = "1.14.0" version = "1.15.0"
[[package]] [[package]]
category = "main" category = "main"
@@ -302,7 +495,7 @@ description = "Easy async ORM for python, built with relations in mind"
name = "tortoise-orm" name = "tortoise-orm"
optional = false optional = false
python-versions = "*" python-versions = "*"
version = "0.16.11" version = "0.16.12"
[package.dependencies] [package.dependencies]
aiosqlite = ">=0.11.0" aiosqlite = ">=0.11.0"
@@ -311,10 +504,6 @@ iso8601 = ">=0.1.12"
pypika = ">=0.36.5" pypika = ">=0.36.5"
typing-extensions = ">=3.7" typing-extensions = ">=3.7"
[package.source]
reference = "95c384a4742ee5980f8e4ae934bfdb0d8137bb40"
type = "git"
url = "https://github.com/tortoise/tortoise-orm.git"
[[package]] [[package]]
category = "dev" category = "dev"
description = "a fork of Python 2 and 3 ast modules with type comment support" description = "a fork of Python 2 and 3 ast modules with type comment support"
@@ -331,8 +520,19 @@ optional = false
python-versions = "*" python-versions = "*"
version = "3.7.4.2" version = "3.7.4.2"
[[package]]
category = "dev"
description = "Measures the displayed width of unicode strings in a terminal"
name = "wcwidth"
optional = false
python-versions = "*"
version = "0.2.2"
[extras]
dbdrivers = ["aiomysql", "asyncpg"]
[metadata] [metadata]
content-hash = "9ce51215dcc82924bab54fca5ee46097cb5ccc2f6ecd455994b9b9f37b801523" content-hash = "6b1f30cb32cf5915f1ee1f6c6b0e52130bc8f7af92f1a9703dc9632ebce2a977"
python-versions = "^3.8" python-versions = "^3.8"
[metadata.files] [metadata.files]
@@ -345,8 +545,12 @@ aiosqlite = [
{file = "aiosqlite-0.13.0.tar.gz", hash = "sha256:6e92961ae9e606b43b05e29b129e346b29e400fcbd63e3c0c564d89230257645"}, {file = "aiosqlite-0.13.0.tar.gz", hash = "sha256:6e92961ae9e606b43b05e29b129e346b29e400fcbd63e3c0c564d89230257645"},
] ]
anyio = [ anyio = [
{file = "anyio-1.3.0-py3-none-any.whl", hash = "sha256:db2c3d21576870b95d4fd0b8f4a0f9c64057f777c578f3a8127179a17c8c067e"}, {file = "anyio-1.3.1-py3-none-any.whl", hash = "sha256:f21b4fafeec1b7db81e09a907e44e374a1e39718d782a488fdfcdcf949c8950c"},
{file = "anyio-1.3.0.tar.gz", hash = "sha256:7deae0315dd10aa41c21528b83352e4b52f44e6153a21081a3d1cd8c03728e46"}, {file = "anyio-1.3.1.tar.gz", hash = "sha256:a46bb2b7743455434afd9adea848a3c4e0b7321aee3e9d08844b11d348d3b5a0"},
]
apipkg = [
{file = "apipkg-1.5-py2.py3-none-any.whl", hash = "sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c"},
{file = "apipkg-1.5.tar.gz", hash = "sha256:37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6"},
] ]
appdirs = [ appdirs = [
{file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
@@ -359,9 +563,32 @@ async-generator = [
asyncclick = [ asyncclick = [
{file = "asyncclick-7.0.9.tar.gz", hash = "sha256:62cebf3eca36d973802e2dd521ca1db11c5bf4544e9795e093d1a53cb688a8c2"}, {file = "asyncclick-7.0.9.tar.gz", hash = "sha256:62cebf3eca36d973802e2dd521ca1db11c5bf4544e9795e093d1a53cb688a8c2"},
] ]
asynctest = [ asyncpg = [
{file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, {file = "asyncpg-0.20.1-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:f7184689177eeb5a11fa1b2baf3f6f2e26bfd7a85acf4de1a3adbd0867d7c0e2"},
{file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"}, {file = "asyncpg-0.20.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:f0c9719ac00615f097fe91082b785bce36dbf02a5ec4115ede0ebfd2cd9500cb"},
{file = "asyncpg-0.20.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:1388caa456070dab102be874205e3ae8fd1de2577d5de9fa22e65ba5c0f8b110"},
{file = "asyncpg-0.20.1-cp35-cp35m-win32.whl", hash = "sha256:ec6e7046c98730cb2ba4df41387e10cb8963a3ac2918f69ae416f8aab9ca7b1b"},
{file = "asyncpg-0.20.1-cp35-cp35m-win_amd64.whl", hash = "sha256:25edb0b947eb632b6b53e5a4b36cba5677297bb34cbaba270019714d0a5fed76"},
{file = "asyncpg-0.20.1-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:95cd2df61ee00b789bdcd04a080e6d9188693b841db2bf9a87ebaed9e53147e0"},
{file = "asyncpg-0.20.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:058baec9d6b75612412baa872a1aa47317d0ff88c318a49f9c4a2389043d5a8d"},
{file = "asyncpg-0.20.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:c773c7dbe2f4d3ebc9e3030e94303e45d6742e6c2fc25da0c46a56ea3d83caeb"},
{file = "asyncpg-0.20.1-cp36-cp36m-win32.whl", hash = "sha256:5664d1bd8abe64fc60a0e701eb85fa1d8c9a4a8018a5a59164d27238f2caf395"},
{file = "asyncpg-0.20.1-cp36-cp36m-win_amd64.whl", hash = "sha256:57666dfae38f4dbf84ffbf0c5c0f78733fef0e8e083230275dcb9ccad1d5ee09"},
{file = "asyncpg-0.20.1-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:0c336903c3b08e970f8af2f606332f1738dba156bca83ed0467dc2f5c70da796"},
{file = "asyncpg-0.20.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ad5ba062e09673b1a4b8d0facaf5a6d9719bf7b337440d10b07fe994d90a9552"},
{file = "asyncpg-0.20.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ba90d3578bc6dddcbce461875672fd9bdb34f0b8215b68612dd3b65a956ff51c"},
{file = "asyncpg-0.20.1-cp37-cp37m-win32.whl", hash = "sha256:da238592235717419a6a7b5edc8564da410ebfd056ca4ecc41e70b1b5df86fba"},
{file = "asyncpg-0.20.1-cp37-cp37m-win_amd64.whl", hash = "sha256:74510234c294c6a6767089ba9c938f09a491426c24405634eb357bd91dffd734"},
{file = "asyncpg-0.20.1-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:391aea89871df8c1560750af6c7170f2772c2d133b34772acf3637e3cf4db93e"},
{file = "asyncpg-0.20.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:a981500bf6947926e53c48f4d60ae080af1b4ad7fa78e363465a5b5ad4f2b65e"},
{file = "asyncpg-0.20.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:a9e6fd6f0f9e8bd77e9a4e1ef9a4f83a80674d9136a754ae3603e915da96b627"},
{file = "asyncpg-0.20.1-cp38-cp38-win32.whl", hash = "sha256:e39aac2b3a2f839ce65aa255ce416de899c58b7d38d601d24ca35558e13b48e3"},
{file = "asyncpg-0.20.1-cp38-cp38-win_amd64.whl", hash = "sha256:2af6a5a705accd36e13292ea43d08c20b15e52d684beb522cb3a7d3c9c8f3f48"},
{file = "asyncpg-0.20.1.tar.gz", hash = "sha256:394bf19bdddbba07a38cd6fb526ebf66e120444d6b3097332b78efd5b26495b0"},
]
atomicwrites = [
{file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
{file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
] ]
attrs = [ attrs = [
{file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"}, {file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"},
@@ -408,6 +635,10 @@ click = [
{file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"},
{file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"},
] ]
colorama = [
{file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"},
{file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"},
]
cryptography = [ cryptography = [
{file = "cryptography-2.9.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:daf54a4b07d67ad437ff239c8a4080cfd1cc7213df57d33c97de7b4738048d5e"}, {file = "cryptography-2.9.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:daf54a4b07d67ad437ff239c8a4080cfd1cc7213df57d33c97de7b4738048d5e"},
{file = "cryptography-2.9.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:3b3eba865ea2754738616f87292b7f29448aec342a7c720956f8083d252bf28b"}, {file = "cryptography-2.9.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:3b3eba865ea2754738616f87292b7f29448aec342a7c720956f8083d252bf28b"},
@@ -429,9 +660,13 @@ cryptography = [
{file = "cryptography-2.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:1dfa985f62b137909496e7fc182dac687206d8d089dd03eaeb28ae16eec8e7d5"}, {file = "cryptography-2.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:1dfa985f62b137909496e7fc182dac687206d8d089dd03eaeb28ae16eec8e7d5"},
{file = "cryptography-2.9.2.tar.gz", hash = "sha256:a0c30272fb4ddda5f5ffc1089d7405b7a71b0b0f51993cb4e5dbb4590b2fc229"}, {file = "cryptography-2.9.2.tar.gz", hash = "sha256:a0c30272fb4ddda5f5ffc1089d7405b7a71b0b0f51993cb4e5dbb4590b2fc229"},
] ]
execnet = [
{file = "execnet-1.7.1-py2.py3-none-any.whl", hash = "sha256:d4efd397930c46415f62f8a31388d6be4f27a91d7550eb79bc64a756e0056547"},
{file = "execnet-1.7.1.tar.gz", hash = "sha256:cacb9df31c9680ec5f95553976c4da484d407e85e41c83cb812aa014f0eddc50"},
]
flake8 = [ flake8 = [
{file = "flake8-3.8.1-py2.py3-none-any.whl", hash = "sha256:6c1193b0c3f853ef763969238f6c81e9e63ace9d024518edc020d5f1d6d93195"}, {file = "flake8-3.8.2-py2.py3-none-any.whl", hash = "sha256:ccaa799ef9893cebe69fdfefed76865aeaefbb94cb8545617b2298786a4de9a5"},
{file = "flake8-3.8.1.tar.gz", hash = "sha256:ea6623797bf9a52f4c9577d780da0bb17d65f870213f7b5bcc9fca82540c31d5"}, {file = "flake8-3.8.2.tar.gz", hash = "sha256:c69ac1668e434d37a2d2880b3ca9aafd54b3a10a3ac1ab101d22f29e29cf8634"},
] ]
iso8601 = [ iso8601 = [
{file = "iso8601-0.1.12-py2.py3-none-any.whl", hash = "sha256:210e0134677cc0d02f6028087fee1df1e1d76d372ee1db0bf30bf66c5c1c89a3"}, {file = "iso8601-0.1.12-py2.py3-none-any.whl", hash = "sha256:210e0134677cc0d02f6028087fee1df1e1d76d372ee1db0bf30bf66c5c1c89a3"},
@@ -446,10 +681,46 @@ mccabe = [
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
] ]
more-itertools = [
{file = "more-itertools-8.3.0.tar.gz", hash = "sha256:558bb897a2232f5e4f8e2399089e35aecb746e1f9191b6584a151647e89267be"},
{file = "more_itertools-8.3.0-py3-none-any.whl", hash = "sha256:7818f596b1e87be009031c7653d01acc46ed422e6656b394b0f765ce66ed4982"},
]
mypy = [
{file = "mypy-0.770-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:a34b577cdf6313bf24755f7a0e3f3c326d5c1f4fe7422d1d06498eb25ad0c600"},
{file = "mypy-0.770-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:86c857510a9b7c3104cf4cde1568f4921762c8f9842e987bc03ed4f160925754"},
{file = "mypy-0.770-cp35-cp35m-win_amd64.whl", hash = "sha256:a8ffcd53cb5dfc131850851cc09f1c44689c2812d0beb954d8138d4f5fc17f65"},
{file = "mypy-0.770-cp36-cp36m-macosx_10_6_x86_64.whl", hash = "sha256:7687f6455ec3ed7649d1ae574136835a4272b65b3ddcf01ab8704ac65616c5ce"},
{file = "mypy-0.770-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3beff56b453b6ef94ecb2996bea101a08f1f8a9771d3cbf4988a61e4d9973761"},
{file = "mypy-0.770-cp36-cp36m-win_amd64.whl", hash = "sha256:15b948e1302682e3682f11f50208b726a246ab4e6c1b39f9264a8796bb416aa2"},
{file = "mypy-0.770-cp37-cp37m-macosx_10_6_x86_64.whl", hash = "sha256:b90928f2d9eb2f33162405f32dde9f6dcead63a0971ca8a1b50eb4ca3e35ceb8"},
{file = "mypy-0.770-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:c56ffe22faa2e51054c5f7a3bc70a370939c2ed4de308c690e7949230c995913"},
{file = "mypy-0.770-cp37-cp37m-win_amd64.whl", hash = "sha256:8dfb69fbf9f3aeed18afffb15e319ca7f8da9642336348ddd6cab2713ddcf8f9"},
{file = "mypy-0.770-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:219a3116ecd015f8dca7b5d2c366c973509dfb9a8fc97ef044a36e3da66144a1"},
{file = "mypy-0.770-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7ec45a70d40ede1ec7ad7f95b3c94c9cf4c186a32f6bacb1795b60abd2f9ef27"},
{file = "mypy-0.770-cp38-cp38-win_amd64.whl", hash = "sha256:f91c7ae919bbc3f96cd5e5b2e786b2b108343d1d7972ea130f7de27fdd547cf3"},
{file = "mypy-0.770-py3-none-any.whl", hash = "sha256:3b1fc683fb204c6b4403a1ef23f0b1fac8e4477091585e0c8c54cbdf7d7bb164"},
{file = "mypy-0.770.tar.gz", hash = "sha256:8a627507ef9b307b46a1fea9513d5c98680ba09591253082b4c48697ba05a4ae"},
]
mypy-extensions = [
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
]
packaging = [
{file = "packaging-20.4-py2.py3-none-any.whl", hash = "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"},
{file = "packaging-20.4.tar.gz", hash = "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8"},
]
pathspec = [ pathspec = [
{file = "pathspec-0.8.0-py2.py3-none-any.whl", hash = "sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0"}, {file = "pathspec-0.8.0-py2.py3-none-any.whl", hash = "sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0"},
{file = "pathspec-0.8.0.tar.gz", hash = "sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061"}, {file = "pathspec-0.8.0.tar.gz", hash = "sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061"},
] ]
pluggy = [
{file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
]
py = [
{file = "py-1.8.1-py2.py3-none-any.whl", hash = "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0"},
{file = "py-1.8.1.tar.gz", hash = "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa"},
]
pycodestyle = [ pycodestyle = [
{file = "pycodestyle-2.6.0-py2.py3-none-any.whl", hash = "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367"}, {file = "pycodestyle-2.6.0-py2.py3-none-any.whl", hash = "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367"},
{file = "pycodestyle-2.6.0.tar.gz", hash = "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"}, {file = "pycodestyle-2.6.0.tar.gz", hash = "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"},
@@ -458,6 +729,25 @@ pycparser = [
{file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"}, {file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"},
{file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"}, {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"},
] ]
pydantic = [
{file = "pydantic-1.5.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2a6904e9f18dea58f76f16b95cba6a2f20b72d787abd84ecd67ebc526e61dce6"},
{file = "pydantic-1.5.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:da8099fca5ee339d5572cfa8af12cf0856ae993406f0b1eb9bb38c8a660e7416"},
{file = "pydantic-1.5.1-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:68dece67bff2b3a5cc188258e46b49f676a722304f1c6148ae08e9291e284d98"},
{file = "pydantic-1.5.1-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ab863853cb502480b118187d670f753be65ec144e1654924bec33d63bc8b3ce2"},
{file = "pydantic-1.5.1-cp36-cp36m-win_amd64.whl", hash = "sha256:2007eb062ed0e57875ce8ead12760a6e44bf5836e6a1a7ea81d71eeecf3ede0f"},
{file = "pydantic-1.5.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:20a15a303ce1e4d831b4e79c17a4a29cb6740b12524f5bba3ea363bff65732bc"},
{file = "pydantic-1.5.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:473101121b1bd454c8effc9fe66d54812fdc128184d9015c5aaa0d4e58a6d338"},
{file = "pydantic-1.5.1-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:9be755919258d5d168aeffbe913ed6e8bd562e018df7724b68cabdee3371e331"},
{file = "pydantic-1.5.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:b96ce81c4b5ca62ab81181212edfd057beaa41411cd9700fbcb48a6ba6564b4e"},
{file = "pydantic-1.5.1-cp37-cp37m-win_amd64.whl", hash = "sha256:93b9f265329d9827f39f0fca68f5d72cc8321881cdc519a1304fa73b9f8a75bd"},
{file = "pydantic-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e2c753d355126ddd1eefeb167fa61c7037ecd30b98e7ebecdc0d1da463b4ea09"},
{file = "pydantic-1.5.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:8433dbb87246c0f562af75d00fa80155b74e4f6924b0db6a2078a3cd2f11c6c4"},
{file = "pydantic-1.5.1-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:0a1cdf24e567d42dc762d3fed399bd211a13db2e8462af9dfa93b34c41648efb"},
{file = "pydantic-1.5.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:8be325fc9da897029ee48d1b5e40df817d97fe969f3ac3fd2434ba7e198c55d5"},
{file = "pydantic-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:3714a4056f5bdbecf3a41e0706ec9b228c9513eee2ad884dc2c568c4dfa540e9"},
{file = "pydantic-1.5.1-py36.py37.py38-none-any.whl", hash = "sha256:70f27d2f0268f490fe3de0a9b6fca7b7492b8fd6623f9fecd25b221ebee385e3"},
{file = "pydantic-1.5.1.tar.gz", hash = "sha256:f0018613c7a0d19df3240c2a913849786f21b6539b9f23d85ce4067489dfacfa"},
]
pyflakes = [ pyflakes = [
{file = "pyflakes-2.2.0-py2.py3-none-any.whl", hash = "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92"}, {file = "pyflakes-2.2.0-py2.py3-none-any.whl", hash = "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92"},
{file = "pyflakes-2.2.0.tar.gz", hash = "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"}, {file = "pyflakes-2.2.0.tar.gz", hash = "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"},
@@ -466,8 +756,27 @@ pymysql = [
{file = "PyMySQL-0.9.2-py2.py3-none-any.whl", hash = "sha256:95f057328357e0e13a30e67857a8c694878b0175797a9a203ee7adbfb9b1ec5f"}, {file = "PyMySQL-0.9.2-py2.py3-none-any.whl", hash = "sha256:95f057328357e0e13a30e67857a8c694878b0175797a9a203ee7adbfb9b1ec5f"},
{file = "PyMySQL-0.9.2.tar.gz", hash = "sha256:9ec760cbb251c158c19d6c88c17ca00a8632bac713890e465b2be01fdc30713f"}, {file = "PyMySQL-0.9.2.tar.gz", hash = "sha256:9ec760cbb251c158c19d6c88c17ca00a8632bac713890e465b2be01fdc30713f"},
] ]
pyparsing = [
{file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
{file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
]
pypika = [ pypika = [
{file = "PyPika-0.37.6.tar.gz", hash = "sha256:64510fa36667e8bb654bdc1be5a3a77bac1dbc2f03d4848efac08e39d9cac6f5"}, {file = "PyPika-0.37.7.tar.gz", hash = "sha256:20bebc05983cd401d428e3beb62d037e5f0271daab2bb5aba82f4e092d4a3694"},
]
pytest = [
{file = "pytest-5.4.2-py3-none-any.whl", hash = "sha256:95c710d0a72d91c13fae35dce195633c929c3792f54125919847fdcdf7caa0d3"},
{file = "pytest-5.4.2.tar.gz", hash = "sha256:eb2b5e935f6a019317e455b6da83dd8650ac9ffd2ee73a7b657a30873d67a698"},
]
pytest-asyncio = [
{file = "pytest-asyncio-0.12.0.tar.gz", hash = "sha256:475bd2f3dc0bc11d2463656b3cbaafdbec5a47b47508ea0b329ee693040eebd2"},
]
pytest-forked = [
{file = "pytest-forked-1.1.3.tar.gz", hash = "sha256:1805699ed9c9e60cb7a8179b8d4fa2b8898098e82d229b0825d8095f0f261100"},
{file = "pytest_forked-1.1.3-py2.py3-none-any.whl", hash = "sha256:1ae25dba8ee2e56fb47311c9638f9e58552691da87e82d25b0ce0e4bf52b7d87"},
]
pytest-xdist = [
{file = "pytest-xdist-1.32.0.tar.gz", hash = "sha256:1d4166dcac69adb38eeaedb88c8fada8588348258a3492ab49ba9161f2971129"},
{file = "pytest_xdist-1.32.0-py2.py3-none-any.whl", hash = "sha256:ba5ec9fde3410bd9a116ff7e4f26c92e02fa3d27975ef3ad03f330b3d4b54e91"},
] ]
regex = [ regex = [
{file = "regex-2020.5.14-cp27-cp27m-win32.whl", hash = "sha256:e565569fc28e3ba3e475ec344d87ed3cd8ba2d575335359749298a0899fe122e"}, {file = "regex-2020.5.14-cp27-cp27m-win32.whl", hash = "sha256:e565569fc28e3ba3e475ec344d87ed3cd8ba2d575335359749298a0899fe122e"},
@@ -493,8 +802,8 @@ regex = [
{file = "regex-2020.5.14.tar.gz", hash = "sha256:ce450ffbfec93821ab1fea94779a8440e10cf63819be6e176eb1973a6017aff5"}, {file = "regex-2020.5.14.tar.gz", hash = "sha256:ce450ffbfec93821ab1fea94779a8440e10cf63819be6e176eb1973a6017aff5"},
] ]
six = [ six = [
{file = "six-1.14.0-py2.py3-none-any.whl", hash = "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"}, {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"},
{file = "six-1.14.0.tar.gz", hash = "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a"}, {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"},
] ]
sniffio = [ sniffio = [
{file = "sniffio-1.1.0-py3-none-any.whl", hash = "sha256:20ed6d5b46f8ae136d00b9dcb807615d83ed82ceea6b2058cecb696765246da5"}, {file = "sniffio-1.1.0-py3-none-any.whl", hash = "sha256:20ed6d5b46f8ae136d00b9dcb807615d83ed82ceea6b2058cecb696765246da5"},
@@ -508,7 +817,9 @@ toml = [
{file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"}, {file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"},
{file = "toml-0.10.1.tar.gz", hash = "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f"}, {file = "toml-0.10.1.tar.gz", hash = "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f"},
] ]
tortoise-orm = [] tortoise-orm = [
{file = "tortoise-orm-0.16.12.tar.gz", hash = "sha256:170e4bbfe1c98223ad1fba33d7fded7923e4bb49c9d74c78bd173a0ebc861658"},
]
typed-ast = [ typed-ast = [
{file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"}, {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"},
{file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"}, {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"},
@@ -537,3 +848,7 @@ typing-extensions = [
{file = "typing_extensions-3.7.4.2-py3-none-any.whl", hash = "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5"}, {file = "typing_extensions-3.7.4.2-py3-none-any.whl", hash = "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5"},
{file = "typing_extensions-3.7.4.2.tar.gz", hash = "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae"}, {file = "typing_extensions-3.7.4.2.tar.gz", hash = "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae"},
] ]
wcwidth = [
{file = "wcwidth-0.2.2-py2.py3-none-any.whl", hash = "sha256:b651b6b081476420e4e9ae61239ac4c1b49d0c5ace42b2e81dc2ff49ed50c566"},
{file = "wcwidth-0.2.2.tar.gz", hash = "sha256:3de2e41158cb650b91f9654cbf9a3e053cee0719c9df4ddc11e4b568669e9829"},
]

View File

@@ -1,27 +1,43 @@
[tool.poetry] [tool.poetry]
name = "aerich" name = "aerich"
version = "0.1.1" version = "0.2.0"
description = "A database migrations tool for Tortoise ORM." description = "A database migrations tool for Tortoise ORM."
authors = ["long2ice <long2ice@gmail.com>"] authors = ["long2ice <long2ice@gmail.com>"]
license = "MIT"
readme = "README.rst"
homepage = "https://github.com/long2ice/aerich"
repository = "git@github.com:long2ice/aerich.git"
documentation = "https://github.com/long2ice/aerich"
keywords = ["migrate", "Tortoise-ORM", "mysql"]
packages = [
{ include = "aerich" }
]
include = ["CHANGELOG.rst", "LICENSE", "README.rst"]
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.8" python = "^3.8"
tortoise-orm = {git = "https://github.com/tortoise/tortoise-orm.git", branch = "develop"} tortoise-orm = "*"
aiomysql = "*"
asyncclick = "*" asyncclick = "*"
pydantic = "*"
aiomysql = {version = "*", optional = true}
asyncpg = {version = "*", optional = true}
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
taskipy = "*" taskipy = "*"
asynctest = "*"
flake8 = "*" flake8 = "*"
isort = "*" isort = "*"
black = "^19.10b0" black = "^19.10b0"
pytest = "*"
pytest-xdist = "*"
mypy = "*"
pytest-asyncio = "*"
[tool.taskipy.tasks] [tool.poetry.extras]
export = "poetry export -f requirements.txt --without-hashes > requirements.txt" dbdrivers = ["aiomysql", "asyncpg"]
export-dev = "poetry export -f requirements.txt --dev --without-hashes > requirements-dev.txt"
[build-system] [build-system]
requires = ["poetry>=0.12"] requires = ["poetry>=0.12"]
build-backend = "poetry.masonry.api" build-backend = "poetry.masonry.api"
[tool.poetry.scripts]
aerich = "aerich.cli:main"

2
pytest.ini Normal file
View File

@@ -0,0 +1,2 @@
[pytest]
addopts = -p no:warnings --ignore=src

View File

@@ -1,31 +0,0 @@
aiomysql==0.0.20
aiosqlite==0.13.0
anyio==1.3.0
appdirs==1.4.4
async-generator==1.10
asyncclick==7.0.9
asynctest==0.13.0
attrs==19.3.0
black==19.10b0
cffi==1.14.0
ciso8601==2.1.3; sys_platform != "win32" and implementation_name == "cpython"
click==7.1.2
cryptography==2.9.2
flake8==3.8.1
iso8601==0.1.12; sys_platform == "win32" or implementation_name != "cpython"
isort==4.3.21
mccabe==0.6.1
pathspec==0.8.0
pycodestyle==2.6.0
pycparser==2.20
pyflakes==2.2.0
pymysql==0.9.2
pypika==0.37.6
regex==2020.5.14
six==1.14.0
sniffio==1.1.0
taskipy==1.2.1
toml==0.10.1
-e git+https://github.com/tortoise/tortoise-orm.git@95c384a4742ee5980f8e4ae934bfdb0d8137bb40#egg=tortoise-orm
typed-ast==1.4.1
typing-extensions==3.7.4.2

View File

@@ -1,15 +0,0 @@
aiomysql==0.0.20
aiosqlite==0.13.0
anyio==1.3.0
async-generator==1.10
asyncclick==7.0.9
cffi==1.14.0
ciso8601==2.1.3; sys_platform != "win32" and implementation_name == "cpython"
cryptography==2.9.2
iso8601==0.1.12; sys_platform == "win32" or implementation_name != "cpython"
pycparser==2.20
pymysql==0.9.2
pypika==0.37.6
six==1.14.0
sniffio==1.1.0
typing-extensions==3.7.4.2

View File

@@ -1,18 +0,0 @@
[flake8]
max-line-length = 100
exclude =
ignore = E501,W503,DAR101,DAR201,DAR402
[darglint]
docstring_style=sphinx
[isort]
not_skip=__init__.py
multi_line_output=3
include_trailing_comma=True
force_grid_wrap=0
use_parentheses=True
line_length=100

View File

@@ -1,44 +0,0 @@
import os
import re
from setuptools import find_packages, setup
def version():
ver_str_line = open('aerich/__init__.py', 'rt').read()
mob = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", ver_str_line, re.M)
if not mob:
raise RuntimeError("Unable to find version string")
return mob.group(1)
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as f:
long_description = f.read()
def requirements():
return open('requirements.txt', 'rt').read().splitlines()
setup(
name='aerich',
version=version(),
description='A database migrations tool for Tortoise-ORM.',
author='long2ice',
long_description_content_type='text/x-rst',
long_description=long_description,
author_email='long2ice@gmail.com',
url='https://github.com/long2ice/aerich',
license='MIT License',
packages=find_packages(include=['aerich*']),
include_package_data=True,
zip_safe=True,
entry_points={
'console_scripts': ['aerich = aerich.cli:main'],
},
platforms='any',
keywords=(
'migrate Tortoise-ORM mysql'
),
dependency_links=['https://github.com/tortoise/tortoise-orm.git@branch#egg=tortoise-orm'],
install_requires=requirements(),
)

View File

@@ -1,19 +0,0 @@
from asynctest import TestCase
from tortoise import Tortoise
from aerich.ddl.mysql import MysqlDDL
TORTOISE_ORM = {
"connections": {"default": "mysql://root:123456@127.0.0.1:3306/test",},
"apps": {"models": {"models": ["tests.models"], "default_connection": "default",},},
}
class DBTestCase(TestCase):
async def setUp(self) -> None:
await Tortoise.init(config=TORTOISE_ORM)
self.client = Tortoise.get_connection("default")
self.ddl = MysqlDDL(self.client)
async def tearDown(self) -> None:
await Tortoise.close_connections()

View File

@@ -1,55 +0,0 @@
from tests.backends.mysql import DBTestCase
from tests.models import Category
class TestDDL(DBTestCase):
def test_create_table(self):
ret = self.ddl.create_table(Category)
self.assertEqual(
ret,
"""CREATE TABLE IF NOT EXISTS `category` (
`id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
`slug` VARCHAR(200) NOT NULL,
`name` VARCHAR(200) NOT NULL,
`created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6),
`user_id` INT NOT NULL COMMENT 'User',
CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE
) CHARACTER SET utf8mb4;""",
)
def test_drop_table(self):
ret = self.ddl.drop_table(Category)
self.assertEqual(ret, "DROP TABLE category IF EXISTS")
def test_add_column(self):
ret = self.ddl.add_column(Category, Category._meta.fields_map.get("name"))
self.assertEqual(ret, "ALTER TABLE category ADD `name` VARCHAR(200) NOT NULL")
def test_drop_column(self):
ret = self.ddl.drop_column(Category, "name")
self.assertEqual(ret, "ALTER TABLE category DROP COLUMN name")
def test_add_index(self):
ret = self.ddl.add_index(Category, ["name"])
self.assertEqual(ret, "ALTER TABLE category ADD INDEX idx_category_name_8b0cb9 (`name`)")
ret = self.ddl.add_index(Category, ["name"], True)
self.assertEqual(
ret, "ALTER TABLE category ADD UNIQUE INDEX uid_category_name_8b0cb9 (`name`)"
)
def test_drop_index(self):
ret = self.ddl.drop_index(Category, ["name"])
self.assertEqual(ret, "ALTER TABLE category DROP INDEX idx_category_name_8b0cb9")
ret = self.ddl.drop_index(Category, ["name"], True)
self.assertEqual(ret, "ALTER TABLE category DROP INDEX uid_category_name_8b0cb9")
def test_add_fk(self):
ret = self.ddl.add_fk(Category, Category._meta.fields_map.get("user"))
self.assertEqual(
ret,
"ALTER TABLE category ADD CONSTRAINT `fk_category_user_366ffa6f` FOREIGN KEY (`user`) REFERENCES `user` (`id`) ON DELETE CASCADE",
)
def test_drop_fk(self):
ret = self.ddl.drop_fk(Category, Category._meta.fields_map.get("user"))
self.assertEqual(ret, "ALTER TABLE category DROP FOREIGN KEY fk_category_user_366ffa6f")

View File

@@ -1,17 +0,0 @@
from asynctest import TestCase
from tortoise import Tortoise
from aerich.migrate import Migrate
from tests.backends.mysql import TORTOISE_ORM
class TestMigrate(TestCase):
async def setUp(self) -> None:
await Migrate.init_with_old_models(TORTOISE_ORM, "models", "./migrations")
async def test_migrate(self):
Migrate.diff_model(
Tortoise.apps.get("models").get("Category"),
Tortoise.apps.get("diff_models").get("Category"),
)
print(Migrate.upgrade_operators)

56
tests/diff_models.py Normal file
View File

@@ -0,0 +1,56 @@
import datetime
from enum import IntEnum
from tortoise import Model, fields
class ProductType(IntEnum):
article = 1
page = 2
class PermissionAction(IntEnum):
create = 1
delete = 2
update = 3
read = 4
class Status(IntEnum):
on = 1
off = 0
class User(Model):
username = fields.CharField(max_length=20,)
password = fields.CharField(max_length=200)
last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now)
is_active = fields.BooleanField(default=True, description="Is Active")
is_superuser = fields.BooleanField(default=False, description="Is SuperUser")
avatar = fields.CharField(max_length=200, default="")
intro = fields.TextField(default="")
class Category(Model):
slug = fields.CharField(max_length=200)
user = fields.ForeignKeyField("diff_models.User", description="User")
created_at = fields.DatetimeField(auto_now_add=True)
class Product(Model):
categories = fields.ManyToManyField("diff_models.Category")
name = fields.CharField(max_length=50)
view_num = fields.IntField(description="View Num")
sort = fields.IntField()
is_reviewed = fields.BooleanField(description="Is Reviewed")
type = fields.IntEnumField(ProductType, description="Product Type")
image = fields.CharField(max_length=200)
body = fields.TextField()
created_at = fields.DatetimeField(auto_now_add=True)
class Config(Model):
label = fields.CharField(max_length=200)
key = fields.CharField(max_length=20)
value = fields.JSONField()
status: Status = fields.IntEnumField(Status, default=Status.on)

View File

@@ -30,9 +30,6 @@ class User(Model):
avatar = fields.CharField(max_length=200, default="") avatar = fields.CharField(max_length=200, default="")
intro = fields.TextField(default="") intro = fields.TextField(default="")
def __str__(self):
return f"{self.pk}#{self.username}"
class Category(Model): class Category(Model):
slug = fields.CharField(max_length=200) slug = fields.CharField(max_length=200)
@@ -40,9 +37,6 @@ class Category(Model):
user = fields.ForeignKeyField("models.User", description="User") user = fields.ForeignKeyField("models.User", description="User")
created_at = fields.DatetimeField(auto_now_add=True) created_at = fields.DatetimeField(auto_now_add=True)
def __str__(self):
return f"{self.pk}#{self.name}"
class Product(Model): class Product(Model):
categories = fields.ManyToManyField("models.Category") categories = fields.ManyToManyField("models.Category")
@@ -55,15 +49,9 @@ class Product(Model):
body = fields.TextField() body = fields.TextField()
created_at = fields.DatetimeField(auto_now_add=True) created_at = fields.DatetimeField(auto_now_add=True)
def __str__(self):
return f"{self.pk}#{self.name}"
class Config(Model): class Config(Model):
label = fields.CharField(max_length=200) label = fields.CharField(max_length=200)
key = fields.CharField(max_length=20) key = fields.CharField(max_length=20)
value = fields.JSONField() value = fields.JSONField()
status: Status = fields.IntEnumField(Status, default=Status.on) status: Status = fields.IntEnumField(Status, default=Status.on)
def __str__(self):
return f"{self.pk}#{self.label}"

123
tests/test_ddl.py Normal file
View File

@@ -0,0 +1,123 @@
from aerich.ddl.mysql import MysqlDDL
from aerich.ddl.postgres import PostgresDDL
from aerich.ddl.sqlite import SqliteDDL
from aerich.migrate import Migrate
from tests.models import Category
def test_create_table():
ret = Migrate.ddl.create_table(Category)
if isinstance(Migrate.ddl, MysqlDDL):
assert (
ret
== """CREATE TABLE IF NOT EXISTS `category` (
`id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
`slug` VARCHAR(200) NOT NULL,
`name` VARCHAR(200) NOT NULL,
`created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6),
`user_id` INT NOT NULL COMMENT 'User',
CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE
) CHARACTER SET utf8mb4;"""
)
elif isinstance(Migrate.ddl, SqliteDDL):
assert (
ret
== """CREATE TABLE IF NOT EXISTS "category" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
"slug" VARCHAR(200) NOT NULL,
"name" VARCHAR(200) NOT NULL,
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE /* User */
);"""
)
elif isinstance(Migrate.ddl, PostgresDDL):
assert (
ret
== """CREATE TABLE IF NOT EXISTS "category" (
"id" SERIAL NOT NULL PRIMARY KEY,
"slug" VARCHAR(200) NOT NULL,
"name" VARCHAR(200) NOT NULL,
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE
);
COMMENT ON COLUMN "category"."user_id" IS 'User';"""
)
def test_drop_table():
ret = Migrate.ddl.drop_table(Category)
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "DROP TABLE IF EXISTS `category`"
else:
assert ret == 'DROP TABLE IF EXISTS "category"'
def test_add_column():
ret = Migrate.ddl.add_column(Category, Category._meta.fields_map.get("name"))
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` ADD `name` VARCHAR(200) NOT NULL"
else:
assert ret == 'ALTER TABLE "category" ADD "name" VARCHAR(200) NOT NULL'
def test_modify_column():
ret = Migrate.ddl.modify_column(Category, Category._meta.fields_map.get("name"))
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200) NOT NULL"
else:
assert ret == 'ALTER TABLE "category" MODIFY COLUMN "name" VARCHAR(200) NOT NULL'
def test_drop_column():
ret = Migrate.ddl.drop_column(Category, "name")
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` DROP COLUMN `name`"
else:
assert ret == 'ALTER TABLE "category" DROP COLUMN "name"'
def test_add_index():
index = Migrate.ddl.add_index(Category, ["name"])
index_u = Migrate.ddl.add_index(Category, ["name"], True)
if isinstance(Migrate.ddl, MysqlDDL):
assert index == "ALTER TABLE `category` ADD INDEX `idx_category_name_8b0cb9` (`name`)"
assert (
index_u == "ALTER TABLE `category` ADD UNIQUE INDEX `uid_category_name_8b0cb9` (`name`)"
)
else:
assert index == 'ALTER TABLE "category" ADD INDEX "idx_category_name_8b0cb9" ("name")'
assert (
index_u == 'ALTER TABLE "category" ADD UNIQUE INDEX "uid_category_name_8b0cb9" ("name")'
)
def test_drop_index():
ret = Migrate.ddl.drop_index(Category, ["name"])
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` DROP INDEX `idx_category_name_8b0cb9`"
else:
assert ret == 'ALTER TABLE "category" DROP INDEX "idx_category_name_8b0cb9"'
def test_add_fk():
ret = Migrate.ddl.add_fk(Category, Category._meta.fields_map.get("user"))
if isinstance(Migrate.ddl, MysqlDDL):
assert (
ret
== "ALTER TABLE `category` ADD CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE"
)
else:
assert (
ret
== 'ALTER TABLE "category" ADD CONSTRAINT "fk_category_user_e2e3874c" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE'
)
def test_drop_fk():
ret = Migrate.ddl.drop_fk(Category, Category._meta.fields_map.get("user"))
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_e2e3874c`"
else:
assert ret == 'ALTER TABLE "category" DROP FOREIGN KEY "fk_category_user_e2e3874c"'

30
tests/test_migrate.py Normal file
View File

@@ -0,0 +1,30 @@
from tortoise import Tortoise
from aerich.ddl.mysql import MysqlDDL
from aerich.migrate import Migrate
def test_migrate():
apps = Tortoise.apps
models = apps.get("models")
diff_models = apps.get("diff_models")
Migrate.diff_models(diff_models, models)
Migrate.diff_models(models, diff_models, False)
if isinstance(Migrate.ddl, MysqlDDL):
assert Migrate.upgrade_operators == [
"ALTER TABLE `category` ADD `name` VARCHAR(200) NOT NULL",
"ALTER TABLE `user` ADD UNIQUE INDEX `uid_user_usernam_9987ab` (`username`)",
]
assert Migrate.downgrade_operators == [
"ALTER TABLE `category` DROP COLUMN `name`",
"ALTER TABLE `user` DROP INDEX `uid_user_usernam_9987ab`",
]
else:
assert Migrate.upgrade_operators == [
'ALTER TABLE "category" ADD "name" VARCHAR(200) NOT NULL',
'ALTER TABLE "user" ADD UNIQUE INDEX "uid_user_usernam_9987ab" ("username")',
]
assert Migrate.downgrade_operators == [
'ALTER TABLE "category" DROP COLUMN "name"',
'ALTER TABLE "user" DROP INDEX "uid_user_usernam_9987ab"',
]

View File

@@ -1,6 +0,0 @@
from unittest import TestCase
class TestUtils(TestCase):
def test_get_app_connection(self):
pass

11
tox.ini
View File

@@ -1,11 +0,0 @@
[tox]
envlist = py{37,38,39}
skip_missing_interpreters = True
[testenv]
whitelist_externals=
make
commands=
make ci
deps =
-r requirements-dev.txt