25 Commits

Author SHA1 Message Date
long2ice
73b75349ee update version 0.2.0 2020-06-12 17:53:17 +08:00
long2ice
7bc553221a raise NotImplementedError 2020-06-12 09:31:01 +08:00
long2ice
7413a05e19 set --safe bool 2020-06-08 18:07:41 +08:00
long2ice
bf194ca8ce Update model file find method 2020-06-03 18:42:35 +08:00
long2ice
b06da0223a add --build 2020-06-03 09:39:52 +08:00
long2ice
83554cdc5d Merge remote-tracking branch 'origin/dev' into dev
# Conflicts:
#	.github/workflows/pypi.yml
#	.github/workflows/test.yml
#	Makefile
2020-06-03 09:38:20 +08:00
long2ice
6c76bfccad Merge remote-tracking branch 'origin/dev' into dev 2020-06-02 22:23:26 +08:00
long2ice
a1746e457c update github actions
update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions
2020-06-02 22:23:15 +08:00
long2ice
2a0435dea9 update github actions 2020-06-02 22:14:44 +08:00
long2ice
e87f67f1e1 update github actions
update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions
2020-06-02 22:02:46 +08:00
long2ice
7b4b7ac749 update github actions 2020-06-02 18:58:59 +08:00
long2ice
5b9b51db3f update github actions 2020-06-02 18:38:39 +08:00
long2ice
ffeee3c901 update github actions 2020-06-02 18:28:50 +08:00
long2ice
b4366d2427 update github actions 2020-06-02 18:20:55 +08:00
long2ice
ec1c80f3a9 remove requirements 2020-06-01 14:57:29 +08:00
long2ice
d2083632eb add cli -V 2020-05-27 12:44:49 +08:00
long2ice
125389461f check tortoise add aerich.models 2020-05-26 14:44:55 +08:00
long2ice
c09c878eaf add support modify column
diff mysql ddl
2020-05-26 10:22:02 +08:00
long2ice
ef3e0c11d5 update version 2020-05-25 23:46:35 +08:00
long2ice
881f70f748 Fix default_connection when upgrade 2020-05-25 23:44:42 +08:00
long2ice
6ffca1a0c7 add support modify column 2020-05-25 22:39:39 +08:00
long2ice
95e41720cb Fix init db sql error 2020-05-25 18:53:34 +08:00
long2ice
40c0008e6e Fix upgrade error when migrate 2020-05-25 18:02:56 +08:00
long2ice
ce75e55d60 update README.rst 2020-05-25 16:36:18 +08:00
long2ice
4d4f951e09 update README.rst 2020-05-25 16:33:56 +08:00
20 changed files with 232 additions and 277 deletions

View File

@@ -8,12 +8,12 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/setup-python@v1 - uses: actions/setup-python@v2
with: with:
python-version: '3.x' python-version: '3.x'
- uses: dschep/install-poetry-action@v1.3
- name: Build dists - name: Build dists
run: | run: make build
python3 setup.py sdist
- name: Pypi Publish - name: Pypi Publish
uses: pypa/gh-action-pypi-publish@master uses: pypa/gh-action-pypi-publish@master
with: with:

View File

@@ -19,10 +19,7 @@ jobs:
- uses: actions/setup-python@v2 - uses: actions/setup-python@v2
with: with:
python-version: '3.x' python-version: '3.x'
- name: Install dependencies - uses: dschep/install-poetry-action@v1.3
run: |
python -m pip install --upgrade pip
pip install -r requirements-dev.txt
- name: CI - name: CI
env: env:
MYSQL_PASS: root MYSQL_PASS: root
@@ -31,4 +28,4 @@ jobs:
POSTGRES_PASS: 123456 POSTGRES_PASS: 123456
POSTGRES_HOST: 127.0.0.1 POSTGRES_HOST: 127.0.0.1
POSTGRES_PORT: 5432 POSTGRES_PORT: 5432
run: make testall run: make ci

View File

@@ -1,9 +1,28 @@
========= =========
ChangeLog ChangeLog
========= =========
0.2
===
0.2.0
-----
- Update model file find method.
- Set ``--safe`` bool.
0.1 0.1
=== ===
0.1.9
-----
- Fix default_connection when upgrade
- Find default app instead of default.
- Diff MySQL ddl.
- Check tortoise config.
0.1.8
-----
- Fix upgrade error when migrate.
- Fix init db sql error.
- Support change column.
0.1.7 0.1.7
----- -----
- Exclude models.Aerich. - Exclude models.Aerich.

View File

@@ -1,3 +0,0 @@
include LICENSE
include README.rst
include requirements.txt

View File

@@ -12,15 +12,17 @@ help:
@echo "usage: make <target>" @echo "usage: make <target>"
@echo "Targets:" @echo "Targets:"
@echo " up Updates dev/test dependencies" @echo " up Updates dev/test dependencies"
@echo " deps Ensure dev/test dependencies are installed" @echo " deps Ensure dev/test dependencies are installed"
@echo " check Checks that build is sane" @echo " check Checks that build is sane"
@echo " lint Reports all linter violations" @echo " lint Reports all linter violations"
@echo " test Runs all tests" @echo " test Runs all tests"
@echo " style Auto-formats the code" @echo " style Auto-formats the code"
up:
@poetry update
deps: deps:
@which pip-sync > /dev/null || pip install -q pip-tools @poetry install -E dbdrivers
@pip install -r requirements-dev.txt
style: deps style: deps
isort -rc $(checkfiles) isort -rc $(checkfiles)
@@ -50,10 +52,10 @@ test_postgres:
testall: deps test_sqlite test_postgres test_mysql testall: deps test_sqlite test_postgres test_mysql
publish: deps build: deps
rm -fR dist/ @poetry build
python setup.py sdist
twine upload dist/*
ci: publish: deps
@act -P ubuntu-latest=nektos/act-environments-ubuntu:18.04 -b @poetry publish --build
ci: testall

View File

@@ -159,6 +159,11 @@ Show heads to be migrated
1_202029051520102929_drop_column.json 1_202029051520102929_drop_column.json
Limitations
===========
* Not support ``rename column`` now.
* ``Sqlite`` and ``Postgres`` may not work as expected because I don't use those in my work.
License License
======= =======
This project is licensed under the `MIT <https://github.com/long2ice/aerich/blob/master/LICENSE>`_ License. This project is licensed under the `MIT <https://github.com/long2ice/aerich/blob/master/LICENSE>`_ License.

View File

@@ -1 +1 @@
__version__ = "0.1.7" __version__ = "0.1.9"

View File

@@ -7,6 +7,7 @@ from enum import Enum
import asyncclick as click import asyncclick as click
from asyncclick import Context, UsageError from asyncclick import Context, UsageError
from tortoise import Tortoise, generate_schema_for_client from tortoise import Tortoise, generate_schema_for_client
from tortoise.exceptions import OperationalError
from tortoise.transactions import in_transaction from tortoise.transactions import in_transaction
from tortoise.utils import get_schema_sql from tortoise.utils import get_schema_sql
@@ -27,11 +28,11 @@ parser = ConfigParser()
@click.group(context_settings={"help_option_names": ["-h", "--help"]}) @click.group(context_settings={"help_option_names": ["-h", "--help"]})
@click.version_option(__version__) @click.version_option(__version__, "-V", "--version")
@click.option( @click.option(
"-c", "--config", default="aerich.ini", show_default=True, help="Config file.", "-c", "--config", default="aerich.ini", show_default=True, help="Config file.",
) )
@click.option("--app", default="models", show_default=True, help="Tortoise-ORM app name.") @click.option("--app", required=False, help="Tortoise-ORM app name.")
@click.option( @click.option(
"-n", "-n",
"--name", "--name",
@@ -44,7 +45,6 @@ async def cli(ctx: Context, config, app, name):
ctx.ensure_object(dict) ctx.ensure_object(dict)
ctx.obj["config_file"] = config ctx.obj["config_file"] = config
ctx.obj["name"] = name ctx.obj["name"] = name
ctx.obj["app"] = app
invoked_subcommand = ctx.invoked_subcommand invoked_subcommand = ctx.invoked_subcommand
if invoked_subcommand != "init": if invoked_subcommand != "init":
@@ -56,9 +56,12 @@ async def cli(ctx: Context, config, app, name):
tortoise_orm = parser[name]["tortoise_orm"] tortoise_orm = parser[name]["tortoise_orm"]
tortoise_config = get_tortoise_config(ctx, tortoise_orm) tortoise_config = get_tortoise_config(ctx, tortoise_orm)
app = app or list(tortoise_config.get("apps").keys())[0]
if "aerich.models" not in tortoise_config.get("apps").get(app).get("models"):
raise UsageError("Check your tortoise config and add aerich.models to it.", ctx=ctx)
ctx.obj["config"] = tortoise_config ctx.obj["config"] = tortoise_config
ctx.obj["location"] = location ctx.obj["location"] = location
ctx.obj["app"] = app
if invoked_subcommand != "init-db": if invoked_subcommand != "init-db":
await Migrate.init_with_old_models(tortoise_config, app, location) await Migrate.init_with_old_models(tortoise_config, app, location)
@@ -86,7 +89,11 @@ async def upgrade(ctx: Context):
app = ctx.obj["app"] app = ctx.obj["app"]
migrated = False migrated = False
for version in Migrate.get_all_version_files(): for version in Migrate.get_all_version_files():
if not await Aerich.exists(version=version, app=app): try:
exists = await Aerich.exists(version=version, app=app)
except OperationalError:
exists = False
if not exists:
async with in_transaction(get_app_connection_name(config, app)) as conn: async with in_transaction(get_app_connection_name(config, app)) as conn:
file_path = os.path.join(Migrate.migrate_location, version) file_path = os.path.join(Migrate.migrate_location, version)
with open(file_path, "r") as f: with open(file_path, "r") as f:
@@ -183,7 +190,7 @@ async def init(
@cli.command(help="Generate schema and generate app migrate location.") @cli.command(help="Generate schema and generate app migrate location.")
@click.option( @click.option(
"--safe", "--safe",
is_flag=True, type=bool,
default=True, default=True,
help="When set to true, creates the table only when it does not already exist.", help="When set to true, creates the table only when it does not already exist.",
show_default=True, show_default=True,
@@ -213,7 +220,7 @@ async def init_db(ctx: Context, safe):
await Aerich.create(version=version, app=app) await Aerich.create(version=version, app=app)
with open(os.path.join(dirname, version), "w") as f: with open(os.path.join(dirname, version), "w") as f:
content = { content = {
"upgrade": schema, "upgrade": [schema],
} }
json.dump(content, f, ensure_ascii=False, indent=2) json.dump(content, f, ensure_ascii=False, indent=2)
return click.secho(f'Success generate schema for app "{app}"', fg=Color.green) return click.secho(f'Success generate schema for app "{app}"', fg=Color.green)

View File

@@ -8,16 +8,17 @@ from tortoise.fields import Field, JSONField, TextField, UUIDField
class BaseDDL: class BaseDDL:
schema_generator_cls: Type[BaseSchemaGenerator] = BaseSchemaGenerator schema_generator_cls: Type[BaseSchemaGenerator] = BaseSchemaGenerator
DIALECT = "sql" DIALECT = "sql"
_DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS {table_name}" _DROP_TABLE_TEMPLATE = 'DROP TABLE IF EXISTS "{table_name}"'
_ADD_COLUMN_TEMPLATE = "ALTER TABLE {table_name} ADD {column}" _ADD_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ADD {column}'
_DROP_COLUMN_TEMPLATE = "ALTER TABLE {table_name} DROP COLUMN {column_name}" _DROP_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" DROP COLUMN "{column_name}"'
_ADD_INDEX_TEMPLATE = ( _ADD_INDEX_TEMPLATE = (
"ALTER TABLE {table_name} ADD {unique} INDEX {index_name} ({column_names})" 'ALTER TABLE "{table_name}" ADD {unique} INDEX "{index_name}" ({column_names})'
) )
_DROP_INDEX_TEMPLATE = "ALTER TABLE {table_name} DROP INDEX {index_name}" _DROP_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" DROP INDEX "{index_name}"'
_ADD_FK_TEMPLATE = "ALTER TABLE {table_name} ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}" _ADD_FK_TEMPLATE = 'ALTER TABLE "{table_name}" ADD CONSTRAINT "{fk_name}" FOREIGN KEY ("{db_column}") REFERENCES "{table}" ("{field}") ON DELETE {on_delete}'
_DROP_FK_TEMPLATE = "ALTER TABLE {table_name} DROP FOREIGN KEY {fk_name}" _DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP FOREIGN KEY "{fk_name}"'
_M2M_TABLE_TEMPLATE = "CREATE TABLE {table_name} ({backward_key} {backward_type} NOT NULL REFERENCES {backward_table} ({backward_field}) ON DELETE CASCADE,{forward_key} {forward_type} NOT NULL REFERENCES {forward_table} ({forward_field}) ON DELETE CASCADE){extra}{comment};" _M2M_TABLE_TEMPLATE = 'CREATE TABLE "{table_name}" ("{backward_key}" {backward_type} NOT NULL REFERENCES "{backward_table}" ("{backward_field}") ON DELETE CASCADE,"{forward_key}" {forward_type} NOT NULL REFERENCES "{forward_table}" ("{forward_field}") ON DELETE CASCADE){extra}{comment};'
_MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" MODIFY COLUMN {column}'
def __init__(self, client: "BaseDBAsyncClient"): def __init__(self, client: "BaseDBAsyncClient"):
self.client = client self.client = client
@@ -51,7 +52,7 @@ class BaseDDL:
def drop_m2m(self, field: ManyToManyFieldInstance): def drop_m2m(self, field: ManyToManyFieldInstance):
return self._DROP_TABLE_TEMPLATE.format(table_name=field.through) return self._DROP_TABLE_TEMPLATE.format(table_name=field.through)
def add_column(self, model: "Type[Model]", field_object: Field): def _get_default(self, model: "Type[Model]", field_object: Field):
db_table = model._meta.db_table db_table = model._meta.db_table
default = field_object.default default = field_object.default
db_column = field_object.model_field_name db_column = field_object.model_field_name
@@ -74,6 +75,11 @@ class BaseDDL:
default = "" default = ""
else: else:
default = "" default = ""
return default
def add_column(self, model: "Type[Model]", field_object: Field):
db_table = model._meta.db_table
return self._ADD_COLUMN_TEMPLATE.format( return self._ADD_COLUMN_TEMPLATE.format(
table_name=db_table, table_name=db_table,
column=self.schema_generator._create_string( column=self.schema_generator._create_string(
@@ -89,7 +95,7 @@ class BaseDDL:
if field_object.description if field_object.description
else "", else "",
is_primary_key=field_object.pk, is_primary_key=field_object.pk,
default=default, default=self._get_default(model, field_object),
), ),
) )
@@ -98,6 +104,27 @@ class BaseDDL:
table_name=model._meta.db_table, column_name=column_name table_name=model._meta.db_table, column_name=column_name
) )
def modify_column(self, model: "Type[Model]", field_object: Field):
db_table = model._meta.db_table
return self._MODIFY_COLUMN_TEMPLATE.format(
table_name=db_table,
column=self.schema_generator._create_string(
db_column=field_object.model_field_name,
field_type=field_object.get_for_dialect(self.DIALECT, "SQL_TYPE"),
nullable="NOT NULL" if not field_object.null else "",
unique="",
comment=self.schema_generator._column_comment_generator(
table=db_table,
column=field_object.model_field_name,
comment=field_object.description,
)
if field_object.description
else "",
is_primary_key=field_object.pk,
default=self._get_default(model, field_object),
),
)
def add_index(self, model: "Type[Model]", field_names: List[str], unique=False): def add_index(self, model: "Type[Model]", field_names: List[str], unique=False):
return self._ADD_INDEX_TEMPLATE.format( return self._ADD_INDEX_TEMPLATE.format(
unique="UNIQUE" if unique else "", unique="UNIQUE" if unique else "",

View File

@@ -6,3 +6,14 @@ from aerich.ddl import BaseDDL
class MysqlDDL(BaseDDL): class MysqlDDL(BaseDDL):
schema_generator_cls = MySQLSchemaGenerator schema_generator_cls = MySQLSchemaGenerator
DIALECT = MySQLSchemaGenerator.DIALECT DIALECT = MySQLSchemaGenerator.DIALECT
_DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS `{table_name}`"
_ADD_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` ADD {column}"
_DROP_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` DROP COLUMN `{column_name}`"
_ADD_INDEX_TEMPLATE = (
"ALTER TABLE `{table_name}` ADD {unique} INDEX `{index_name}` ({column_names})"
)
_DROP_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` DROP INDEX `{index_name}`"
_ADD_FK_TEMPLATE = "ALTER TABLE `{table_name}` ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}"
_DROP_FK_TEMPLATE = "ALTER TABLE `{table_name}` DROP FOREIGN KEY `{fk_name}`"
_M2M_TABLE_TEMPLATE = "CREATE TABLE `{table_name}` (`{backward_key}` {backward_type} NOT NULL REFERENCES `{backward_table}` (`{backward_field}`) ON DELETE CASCADE,`{forward_key}` {forward_type} NOT NULL REFERENCES `{forward_table}` (`{forward_field}`) ON DELETE CASCADE){extra}{comment};"
_MODIFY_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` MODIFY COLUMN {column}"

View File

@@ -3,6 +3,7 @@ import os
import re import re
from copy import deepcopy from copy import deepcopy
from datetime import datetime from datetime import datetime
from importlib import import_module
from typing import Dict, List, Tuple, Type from typing import Dict, List, Tuple, Type
from tortoise import ( from tortoise import (
@@ -71,8 +72,6 @@ class Migrate:
from aerich.ddl.postgres import PostgresDDL from aerich.ddl.postgres import PostgresDDL
cls.ddl = PostgresDDL(connection) cls.ddl = PostgresDDL(connection)
else:
raise NotImplementedError("Current only support MySQL")
@classmethod @classmethod
async def _get_last_version_num(cls): async def _get_last_version_num(cls):
@@ -174,7 +173,10 @@ class Migrate:
temp_config = deepcopy(config) temp_config = deepcopy(config)
path = os.path.join(location, app, cls.old_models) path = os.path.join(location, app, cls.old_models)
path = path.replace("/", ".").lstrip(".") path = path.replace("/", ".").lstrip(".")
temp_config["apps"][cls.diff_app] = {"models": [path]} temp_config["apps"][cls.diff_app] = {
"models": [path],
"default_connection": config.get("apps").get(app).get("default_connection", "default"),
}
return temp_config return temp_config
@classmethod @classmethod
@@ -191,8 +193,7 @@ class Migrate:
old_model_files = [] old_model_files = []
models = config.get("apps").get(app).get("models") models = config.get("apps").get(app).get("models")
for model in models: for model in models:
if model != "aerich.models": old_model_files.append(import_module(model).__file__)
old_model_files.append(model.replace(".", "/") + ".py")
cls.cp_models(app, old_model_files, os.path.join(location, app, cls.get_old_model_file())) cls.cp_models(app, old_model_files, os.path.join(location, app, cls.get_old_model_file()))
@@ -220,6 +221,10 @@ class Migrate:
if old_model not in new_models.keys(): if old_model not in new_models.keys():
cls._add_operator(cls.remove_model(old_models.get(old_model)), upgrade) cls._add_operator(cls.remove_model(old_models.get(old_model)), upgrade)
@classmethod
def _is_fk_m2m(cls, field: Field):
return isinstance(field, (ForeignKeyFieldInstance, ManyToManyFieldInstance))
@classmethod @classmethod
def add_model(cls, model: Type[Model]): def add_model(cls, model: Type[Model]):
return cls.ddl.create_table(model) return cls.ddl.create_table(model)
@@ -260,6 +265,14 @@ class Migrate:
) )
else: else:
old_field = old_fields_map.get(new_key) old_field = old_fields_map.get(new_key)
new_field_dict = new_field.describe(serializable=True)
new_field_dict.pop("unique")
new_field_dict.pop("indexed")
old_field_dict = old_field.describe(serializable=True)
old_field_dict.pop("unique")
old_field_dict.pop("indexed")
if not cls._is_fk_m2m(new_field) and new_field_dict != old_field_dict:
cls._add_operator(cls._modify_field(new_model, new_field), upgrade=upgrade)
if (old_field.index and not new_field.index) or ( if (old_field.index and not new_field.index) or (
old_field.unique and not new_field.unique old_field.unique and not new_field.unique
): ):
@@ -268,7 +281,7 @@ class Migrate:
old_model, (old_field.model_field_name,), old_field.unique old_model, (old_field.model_field_name,), old_field.unique
), ),
upgrade, upgrade,
isinstance(old_field, (ForeignKeyFieldInstance, ManyToManyFieldInstance)), cls._is_fk_m2m(old_field),
) )
elif (new_field.index and not old_field.index) or ( elif (new_field.index and not old_field.index) or (
new_field.unique and not old_field.unique new_field.unique and not old_field.unique
@@ -276,16 +289,14 @@ class Migrate:
cls._add_operator( cls._add_operator(
cls._add_index(new_model, (new_field.model_field_name,), new_field.unique), cls._add_index(new_model, (new_field.model_field_name,), new_field.unique),
upgrade, upgrade,
isinstance(new_field, (ForeignKeyFieldInstance, ManyToManyFieldInstance)), cls._is_fk_m2m(new_field),
) )
for old_key in old_keys: for old_key in old_keys:
field = old_fields_map.get(old_key) field = old_fields_map.get(old_key)
if old_key not in new_keys and not cls._exclude_field(field, upgrade): if old_key not in new_keys and not cls._exclude_field(field, upgrade):
cls._add_operator( cls._add_operator(
cls._remove_field(old_model, field), cls._remove_field(old_model, field), upgrade, cls._is_fk_m2m(field),
upgrade,
isinstance(field, (ForeignKeyFieldInstance, ManyToManyFieldInstance)),
) )
for new_index in new_indexes: for new_index in new_indexes:
@@ -354,6 +365,10 @@ class Migrate:
return cls.ddl.create_m2m_table(model, field) return cls.ddl.create_m2m_table(model, field)
return cls.ddl.add_column(model, field) return cls.ddl.add_column(model, field)
@classmethod
def _modify_field(cls, model: Type[Model], field: Field):
return cls.ddl.modify_column(model, field)
@classmethod @classmethod
def _remove_field(cls, model: Type[Model], field: Field): def _remove_field(cls, model: Type[Model], field: Field):
if isinstance(field, ForeignKeyFieldInstance): if isinstance(field, ForeignKeyFieldInstance):

View File

@@ -16,10 +16,7 @@ db_url = os.getenv("TEST_DB", "sqlite://:memory:")
tortoise_orm = { tortoise_orm = {
"connections": {"default": expand_db_url(db_url, True)}, "connections": {"default": expand_db_url(db_url, True)},
"apps": { "apps": {
"models": { "models": {"models": ["tests.models", "aerich.models"], "default_connection": "default",},
"models": ["tests.models", "aerich.models"],
"default_connection": "default",
},
}, },
} }
@@ -42,8 +39,11 @@ def loop():
@pytest.fixture(scope="session", autouse=True) @pytest.fixture(scope="session", autouse=True)
def initialize_tests(loop, request): def initialize_tests(loop, request):
tortoise_orm['connections']['diff_models'] = "sqlite://:memory:" tortoise_orm["connections"]["diff_models"] = "sqlite://:memory:"
tortoise_orm['apps']['diff_models'] = {"models": ["tests.diff_models"], "default_connection": "diff_models"} tortoise_orm["apps"]["diff_models"] = {
"models": ["tests.diff_models"],
"default_connection": "diff_models",
}
loop.run_until_complete(Tortoise.init(config=tortoise_orm, _create_db=True)) loop.run_until_complete(Tortoise.init(config=tortoise_orm, _create_db=True))
loop.run_until_complete( loop.run_until_complete(

49
poetry.lock generated
View File

@@ -1,8 +1,8 @@
[[package]] [[package]]
category = "dev" category = "main"
description = "MySQL driver for asyncio." description = "MySQL driver for asyncio."
name = "aiomysql" name = "aiomysql"
optional = false optional = true
python-versions = "*" python-versions = "*"
version = "0.0.20" version = "0.0.20"
@@ -26,7 +26,7 @@ description = "High level compatibility layer for multiple asynchronous event lo
name = "anyio" name = "anyio"
optional = false optional = false
python-versions = ">=3.5.3" python-versions = ">=3.5.3"
version = "1.3.0" version = "1.3.1"
[package.dependencies] [package.dependencies]
async-generator = "*" async-generator = "*"
@@ -78,10 +78,10 @@ dev = ["coverage", "pytest-runner", "pytest-trio", "pytest (>=3)", "sphinx", "to
docs = ["sphinx"] docs = ["sphinx"]
[[package]] [[package]]
category = "dev" category = "main"
description = "An asyncio PostgreSQL driver" description = "An asyncio PostgreSQL driver"
name = "asyncpg" name = "asyncpg"
optional = false optional = true
python-versions = ">=3.5.0" python-versions = ">=3.5.0"
version = "0.20.1" version = "0.20.1"
@@ -134,10 +134,10 @@ typed-ast = ">=1.4.0"
d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] d = ["aiohttp (>=3.3.2)", "aiohttp-cors"]
[[package]] [[package]]
category = "dev" category = "main"
description = "Foreign Function Interface for Python calling C code." description = "Foreign Function Interface for Python calling C code."
name = "cffi" name = "cffi"
optional = false optional = true
python-versions = "*" python-versions = "*"
version = "1.14.0" version = "1.14.0"
@@ -171,10 +171,10 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
version = "0.4.3" version = "0.4.3"
[[package]] [[package]]
category = "dev" category = "main"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
name = "cryptography" name = "cryptography"
optional = false optional = true
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
version = "2.9.2" version = "2.9.2"
@@ -327,10 +327,10 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "2.6.0" version = "2.6.0"
[[package]] [[package]]
category = "dev" category = "main"
description = "C parser in Python" description = "C parser in Python"
name = "pycparser" name = "pycparser"
optional = false optional = true
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "2.20" version = "2.20"
@@ -356,10 +356,10 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "2.2.0" version = "2.2.0"
[[package]] [[package]]
category = "dev" category = "main"
description = "Pure Python MySQL Driver" description = "Pure Python MySQL Driver"
name = "pymysql" name = "pymysql"
optional = false optional = true
python-versions = "*" python-versions = "*"
version = "0.9.2" version = "0.9.2"
@@ -380,7 +380,7 @@ description = "A SQL query builder API for Python"
name = "pypika" name = "pypika"
optional = false optional = false
python-versions = "*" python-versions = "*"
version = "0.37.6" version = "0.37.7"
[[package]] [[package]]
category = "dev" category = "dev"
@@ -455,7 +455,7 @@ python-versions = "*"
version = "2020.5.14" version = "2020.5.14"
[[package]] [[package]]
category = "dev" category = "main"
description = "Python 2 and 3 compatibility utilities" description = "Python 2 and 3 compatibility utilities"
name = "six" name = "six"
optional = false optional = false
@@ -522,14 +522,17 @@ version = "3.7.4.2"
[[package]] [[package]]
category = "dev" category = "dev"
description = "Measures number of Terminal column cells of wide-character codes" description = "Measures the displayed width of unicode strings in a terminal"
name = "wcwidth" name = "wcwidth"
optional = false optional = false
python-versions = "*" python-versions = "*"
version = "0.1.9" version = "0.2.2"
[extras]
dbdrivers = ["aiomysql", "asyncpg"]
[metadata] [metadata]
content-hash = "35274e9622d359af475f573760ba687b31756b1b1de70bc4d75dab5ddbc5a93d" content-hash = "6b1f30cb32cf5915f1ee1f6c6b0e52130bc8f7af92f1a9703dc9632ebce2a977"
python-versions = "^3.8" python-versions = "^3.8"
[metadata.files] [metadata.files]
@@ -542,8 +545,8 @@ aiosqlite = [
{file = "aiosqlite-0.13.0.tar.gz", hash = "sha256:6e92961ae9e606b43b05e29b129e346b29e400fcbd63e3c0c564d89230257645"}, {file = "aiosqlite-0.13.0.tar.gz", hash = "sha256:6e92961ae9e606b43b05e29b129e346b29e400fcbd63e3c0c564d89230257645"},
] ]
anyio = [ anyio = [
{file = "anyio-1.3.0-py3-none-any.whl", hash = "sha256:db2c3d21576870b95d4fd0b8f4a0f9c64057f777c578f3a8127179a17c8c067e"}, {file = "anyio-1.3.1-py3-none-any.whl", hash = "sha256:f21b4fafeec1b7db81e09a907e44e374a1e39718d782a488fdfcdcf949c8950c"},
{file = "anyio-1.3.0.tar.gz", hash = "sha256:7deae0315dd10aa41c21528b83352e4b52f44e6153a21081a3d1cd8c03728e46"}, {file = "anyio-1.3.1.tar.gz", hash = "sha256:a46bb2b7743455434afd9adea848a3c4e0b7321aee3e9d08844b11d348d3b5a0"},
] ]
apipkg = [ apipkg = [
{file = "apipkg-1.5-py2.py3-none-any.whl", hash = "sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c"}, {file = "apipkg-1.5-py2.py3-none-any.whl", hash = "sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c"},
@@ -758,7 +761,7 @@ pyparsing = [
{file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
] ]
pypika = [ pypika = [
{file = "PyPika-0.37.6.tar.gz", hash = "sha256:64510fa36667e8bb654bdc1be5a3a77bac1dbc2f03d4848efac08e39d9cac6f5"}, {file = "PyPika-0.37.7.tar.gz", hash = "sha256:20bebc05983cd401d428e3beb62d037e5f0271daab2bb5aba82f4e092d4a3694"},
] ]
pytest = [ pytest = [
{file = "pytest-5.4.2-py3-none-any.whl", hash = "sha256:95c710d0a72d91c13fae35dce195633c929c3792f54125919847fdcdf7caa0d3"}, {file = "pytest-5.4.2-py3-none-any.whl", hash = "sha256:95c710d0a72d91c13fae35dce195633c929c3792f54125919847fdcdf7caa0d3"},
@@ -846,6 +849,6 @@ typing-extensions = [
{file = "typing_extensions-3.7.4.2.tar.gz", hash = "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae"}, {file = "typing_extensions-3.7.4.2.tar.gz", hash = "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae"},
] ]
wcwidth = [ wcwidth = [
{file = "wcwidth-0.1.9-py2.py3-none-any.whl", hash = "sha256:cafe2186b3c009a04067022ce1dcd79cb38d8d65ee4f4791b8888d6599d1bbe1"}, {file = "wcwidth-0.2.2-py2.py3-none-any.whl", hash = "sha256:b651b6b081476420e4e9ae61239ac4c1b49d0c5ace42b2e81dc2ff49ed50c566"},
{file = "wcwidth-0.1.9.tar.gz", hash = "sha256:ee73862862a156bf77ff92b09034fc4825dd3af9cf81bc5b360668d425f3c5f1"}, {file = "wcwidth-0.2.2.tar.gz", hash = "sha256:3de2e41158cb650b91f9654cbf9a3e053cee0719c9df4ddc11e4b568669e9829"},
] ]

View File

@@ -1,14 +1,26 @@
[tool.poetry] [tool.poetry]
name = "aerich" name = "aerich"
version = "0.1.7" version = "0.2.0"
description = "A database migrations tool for Tortoise ORM." description = "A database migrations tool for Tortoise ORM."
authors = ["long2ice <long2ice@gmail.com>"] authors = ["long2ice <long2ice@gmail.com>"]
license = "MIT"
readme = "README.rst"
homepage = "https://github.com/long2ice/aerich"
repository = "git@github.com:long2ice/aerich.git"
documentation = "https://github.com/long2ice/aerich"
keywords = ["migrate", "Tortoise-ORM", "mysql"]
packages = [
{ include = "aerich" }
]
include = ["CHANGELOG.rst", "LICENSE", "README.rst"]
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.8" python = "^3.8"
tortoise-orm = "*" tortoise-orm = "*"
asyncclick = "*" asyncclick = "*"
pydantic = "*" pydantic = "*"
aiomysql = {version = "*", optional = true}
asyncpg = {version = "*", optional = true}
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
taskipy = "*" taskipy = "*"
@@ -16,17 +28,16 @@ flake8 = "*"
isort = "*" isort = "*"
black = "^19.10b0" black = "^19.10b0"
pytest = "*" pytest = "*"
aiomysql = "*"
asyncpg = "*"
pytest-xdist = "*" pytest-xdist = "*"
mypy = "*" mypy = "*"
pytest-asyncio = "*" pytest-asyncio = "*"
[tool.taskipy.tasks] [tool.poetry.extras]
export = "poetry export -f requirements.txt --without-hashes > requirements.txt" dbdrivers = ["aiomysql", "asyncpg"]
export-dev = "poetry export -f requirements.txt --dev --without-hashes > requirements-dev.txt"
[build-system] [build-system]
requires = ["poetry>=0.12"] requires = ["poetry>=0.12"]
build-backend = "poetry.masonry.api" build-backend = "poetry.masonry.api"
[tool.poetry.scripts]
aerich = "aerich.cli:main"

View File

@@ -1,48 +0,0 @@
aiomysql==0.0.20
aiosqlite==0.13.0
anyio==1.3.0
apipkg==1.5
appdirs==1.4.4
async-generator==1.10
asyncclick==7.0.9
asyncpg==0.20.1
atomicwrites==1.4.0; sys_platform == "win32"
attrs==19.3.0
black==19.10b0
cffi==1.14.0
ciso8601==2.1.3; sys_platform != "win32" and implementation_name == "cpython"
click==7.1.2
colorama==0.4.3; sys_platform == "win32"
cryptography==2.9.2
execnet==1.7.1
flake8==3.8.2
iso8601==0.1.12; sys_platform == "win32" or implementation_name != "cpython"
isort==4.3.21
mccabe==0.6.1
more-itertools==8.3.0
mypy==0.770
mypy-extensions==0.4.3
packaging==20.4
pathspec==0.8.0
pluggy==0.13.1
py==1.8.1
pycodestyle==2.6.0
pycparser==2.20
pydantic==1.5.1
pyflakes==2.2.0
pymysql==0.9.2
pyparsing==2.4.7
pypika==0.37.6
pytest==5.4.2
pytest-asyncio==0.12.0
pytest-forked==1.1.3
pytest-xdist==1.32.0
regex==2020.5.14
six==1.15.0
sniffio==1.1.0
taskipy==1.2.1
toml==0.10.1
tortoise-orm==0.16.12
typed-ast==1.4.1
typing-extensions==3.7.4.2
wcwidth==0.1.9

View File

@@ -1,11 +0,0 @@
aiosqlite==0.13.0
anyio==1.3.0
async-generator==1.10
asyncclick==7.0.9
ciso8601==2.1.3; sys_platform != "win32" and implementation_name == "cpython"
iso8601==0.1.12; sys_platform == "win32" or implementation_name != "cpython"
pydantic==1.5.1
pypika==0.37.6
sniffio==1.1.0
tortoise-orm==0.16.12
typing-extensions==3.7.4.2

View File

@@ -1,47 +0,0 @@
[flake8]
max-line-length = 100
exclude =
ignore = E501,W503,DAR101,DAR201,DAR402
[darglint]
docstring_style=sphinx
[isort]
not_skip=__init__.py
multi_line_output=3
include_trailing_comma=True
force_grid_wrap=0
use_parentheses=True
line_length=100
[tool:pytest]
addopts = -n auto --tb=native -q
[mypy]
pretty = True
ignore_missing_imports = True
check_untyped_defs = True
disallow_subclassing_any = True
disallow_untyped_calls = True
disallow_untyped_defs = False
disallow_incomplete_defs = False
disallow_untyped_decorators = True
no_implicit_optional = True
warn_redundant_casts = True
warn_unused_ignores = True
warn_no_return = True
warn_return_any = False
warn_unused_configs = True
warn_unreachable = True
allow_redefinition = True
strict_equality = True
show_error_context = True
[mypy-tests.*]
check_untyped_defs = False
disallow_untyped_defs = False
disallow_incomplete_defs = False
warn_unreachable = False
[mypy-conftest]
disallow_untyped_defs = False

View File

@@ -1,44 +0,0 @@
import os
import re
from setuptools import find_packages, setup
def version():
ver_str_line = open('aerich/__init__.py', 'rt').read()
mob = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", ver_str_line, re.M)
if not mob:
raise RuntimeError("Unable to find version string")
return mob.group(1)
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as f:
long_description = f.read()
def requirements():
return open('requirements.txt', 'rt').read().splitlines()
setup(
name='aerich',
version=version(),
description='A database migrations tool for Tortoise-ORM.',
author='long2ice',
long_description_content_type='text/x-rst',
long_description=long_description,
author_email='long2ice@gmail.com',
url='https://github.com/long2ice/aerich',
license='MIT License',
packages=find_packages(include=['aerich*']),
include_package_data=True,
zip_safe=True,
entry_points={
'console_scripts': ['aerich = aerich.cli:main'],
},
platforms='any',
keywords=(
'migrate Tortoise-ORM mysql'
),
dependency_links=['https://github.com/tortoise-orm/tortoise-orm.git@develop#egg=tortoise-orm'],
install_requires=requirements(),
)

View File

@@ -48,54 +48,76 @@ COMMENT ON COLUMN "category"."user_id" IS 'User';"""
def test_drop_table(): def test_drop_table():
ret = Migrate.ddl.drop_table(Category) ret = Migrate.ddl.drop_table(Category)
assert ret == "DROP TABLE IF EXISTS category" if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "DROP TABLE IF EXISTS `category`"
else:
assert ret == 'DROP TABLE IF EXISTS "category"'
def test_add_column(): def test_add_column():
ret = Migrate.ddl.add_column(Category, Category._meta.fields_map.get("name")) ret = Migrate.ddl.add_column(Category, Category._meta.fields_map.get("name"))
if isinstance(Migrate.ddl, MysqlDDL): if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE category ADD `name` VARCHAR(200) NOT NULL" assert ret == "ALTER TABLE `category` ADD `name` VARCHAR(200) NOT NULL"
elif isinstance(Migrate.ddl, PostgresDDL): else:
assert ret == 'ALTER TABLE category ADD "name" VARCHAR(200) NOT NULL' assert ret == 'ALTER TABLE "category" ADD "name" VARCHAR(200) NOT NULL'
elif isinstance(Migrate.ddl, SqliteDDL):
assert ret == 'ALTER TABLE category ADD "name" VARCHAR(200) NOT NULL'
def test_modify_column():
ret = Migrate.ddl.modify_column(Category, Category._meta.fields_map.get("name"))
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200) NOT NULL"
else:
assert ret == 'ALTER TABLE "category" MODIFY COLUMN "name" VARCHAR(200) NOT NULL'
def test_drop_column(): def test_drop_column():
ret = Migrate.ddl.drop_column(Category, "name") ret = Migrate.ddl.drop_column(Category, "name")
assert ret == "ALTER TABLE category DROP COLUMN name" if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE category DROP COLUMN name" assert ret == "ALTER TABLE `category` DROP COLUMN `name`"
else:
assert ret == 'ALTER TABLE "category" DROP COLUMN "name"'
def test_add_index(): def test_add_index():
index = Migrate.ddl.add_index(Category, ["name"]) index = Migrate.ddl.add_index(Category, ["name"])
index_u = Migrate.ddl.add_index(Category, ["name"], True) index_u = Migrate.ddl.add_index(Category, ["name"], True)
if isinstance(Migrate.ddl, MysqlDDL): if isinstance(Migrate.ddl, MysqlDDL):
assert index == "ALTER TABLE category ADD INDEX idx_category_name_8b0cb9 (`name`)" assert index == "ALTER TABLE `category` ADD INDEX `idx_category_name_8b0cb9` (`name`)"
assert (
assert index_u == "ALTER TABLE category ADD UNIQUE INDEX uid_category_name_8b0cb9 (`name`)" index_u == "ALTER TABLE `category` ADD UNIQUE INDEX `uid_category_name_8b0cb9` (`name`)"
)
elif isinstance(Migrate.ddl, SqliteDDL): else:
assert index_u == 'ALTER TABLE category ADD UNIQUE INDEX uid_category_name_8b0cb9 ("name")' assert index == 'ALTER TABLE "category" ADD INDEX "idx_category_name_8b0cb9" ("name")'
assert (
assert index_u == 'ALTER TABLE category ADD UNIQUE INDEX uid_category_name_8b0cb9 ("name")' index_u == 'ALTER TABLE "category" ADD UNIQUE INDEX "uid_category_name_8b0cb9" ("name")'
)
def test_drop_index(): def test_drop_index():
ret = Migrate.ddl.drop_index(Category, ["name"]) ret = Migrate.ddl.drop_index(Category, ["name"])
assert ret == "ALTER TABLE category DROP INDEX idx_category_name_8b0cb9" if isinstance(Migrate.ddl, MysqlDDL):
ret = Migrate.ddl.drop_index(Category, ["name"], True) assert ret == "ALTER TABLE `category` DROP INDEX `idx_category_name_8b0cb9`"
assert ret == "ALTER TABLE category DROP INDEX uid_category_name_8b0cb9" else:
assert ret == 'ALTER TABLE "category" DROP INDEX "idx_category_name_8b0cb9"'
def test_add_fk(): def test_add_fk():
ret = Migrate.ddl.add_fk(Category, Category._meta.fields_map.get("user")) ret = Migrate.ddl.add_fk(Category, Category._meta.fields_map.get("user"))
assert ( if isinstance(Migrate.ddl, MysqlDDL):
ret assert (
== "ALTER TABLE category ADD CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE" ret
) == "ALTER TABLE `category` ADD CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE"
)
else:
assert (
ret
== 'ALTER TABLE "category" ADD CONSTRAINT "fk_category_user_e2e3874c" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE'
)
def test_drop_fk(): def test_drop_fk():
ret = Migrate.ddl.drop_fk(Category, Category._meta.fields_map.get("user")) ret = Migrate.ddl.drop_fk(Category, Category._meta.fields_map.get("user"))
assert ret == "ALTER TABLE category DROP FOREIGN KEY fk_category_user_e2e3874c" if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_e2e3874c`"
else:
assert ret == 'ALTER TABLE "category" DROP FOREIGN KEY "fk_category_user_e2e3874c"'

View File

@@ -1,8 +1,6 @@
from tortoise import Tortoise from tortoise import Tortoise
from aerich.ddl.mysql import MysqlDDL from aerich.ddl.mysql import MysqlDDL
from aerich.ddl.postgres import PostgresDDL
from aerich.ddl.sqlite import SqliteDDL
from aerich.migrate import Migrate from aerich.migrate import Migrate
@@ -14,28 +12,19 @@ def test_migrate():
Migrate.diff_models(models, diff_models, False) Migrate.diff_models(models, diff_models, False)
if isinstance(Migrate.ddl, MysqlDDL): if isinstance(Migrate.ddl, MysqlDDL):
assert Migrate.upgrade_operators == [ assert Migrate.upgrade_operators == [
"ALTER TABLE category ADD `name` VARCHAR(200) NOT NULL", "ALTER TABLE `category` ADD `name` VARCHAR(200) NOT NULL",
"ALTER TABLE user ADD UNIQUE INDEX uid_user_usernam_9987ab (`username`)", "ALTER TABLE `user` ADD UNIQUE INDEX `uid_user_usernam_9987ab` (`username`)",
] ]
assert Migrate.downgrade_operators == [ assert Migrate.downgrade_operators == [
"ALTER TABLE category DROP COLUMN name", "ALTER TABLE `category` DROP COLUMN `name`",
"ALTER TABLE user DROP INDEX uid_user_usernam_9987ab", "ALTER TABLE `user` DROP INDEX `uid_user_usernam_9987ab`",
] ]
elif isinstance(Migrate.ddl, SqliteDDL): else:
assert Migrate.upgrade_operators == [ assert Migrate.upgrade_operators == [
'ALTER TABLE category ADD "name" VARCHAR(200) NOT NULL', 'ALTER TABLE "category" ADD "name" VARCHAR(200) NOT NULL',
'ALTER TABLE user ADD UNIQUE INDEX uid_user_usernam_9987ab ("username")', 'ALTER TABLE "user" ADD UNIQUE INDEX "uid_user_usernam_9987ab" ("username")',
] ]
assert Migrate.downgrade_operators == [ assert Migrate.downgrade_operators == [
"ALTER TABLE category DROP COLUMN name", 'ALTER TABLE "category" DROP COLUMN "name"',
"ALTER TABLE user DROP INDEX uid_user_usernam_9987ab", 'ALTER TABLE "user" DROP INDEX "uid_user_usernam_9987ab"',
]
elif isinstance(Migrate.ddl, PostgresDDL):
assert Migrate.upgrade_operators == [
'ALTER TABLE category ADD "name" VARCHAR(200) NOT NULL',
'ALTER TABLE user ADD UNIQUE INDEX uid_user_usernam_9987ab ("username")',
]
assert Migrate.downgrade_operators == [
"ALTER TABLE category DROP COLUMN name",
"ALTER TABLE user DROP INDEX uid_user_usernam_9987ab",
] ]