11 Commits

Author SHA1 Message Date
long2ice
2fc43cb0d8 Merge pull request #7 from long2ice/dev
v0.1.9
2020-05-26 20:06:56 +08:00
long2ice
125389461f check tortoise add aerich.models 2020-05-26 14:44:55 +08:00
long2ice
c09c878eaf add support modify column
diff mysql ddl
2020-05-26 10:22:02 +08:00
long2ice
ef3e0c11d5 update version 2020-05-25 23:46:35 +08:00
long2ice
881f70f748 Fix default_connection when upgrade 2020-05-25 23:44:42 +08:00
long2ice
615d9747dc Merge pull request #5 from long2ice/dev
v0.1.8
2020-05-25 22:43:36 +08:00
long2ice
6ffca1a0c7 add support modify column 2020-05-25 22:39:39 +08:00
long2ice
95e41720cb Fix init db sql error 2020-05-25 18:53:34 +08:00
long2ice
40c0008e6e Fix upgrade error when migrate 2020-05-25 18:02:56 +08:00
long2ice
ce75e55d60 update README.rst 2020-05-25 16:36:18 +08:00
long2ice
4d4f951e09 update README.rst 2020-05-25 16:33:56 +08:00
12 changed files with 165 additions and 74 deletions

View File

@@ -4,6 +4,19 @@ ChangeLog
0.1 0.1
=== ===
0.1.9
-----
- Fix default_connection when upgrade
- Find default app instead of default.
- Diff MySQL ddl.
- Check tortoise config.
0.1.8
-----
- Fix upgrade error when migrate.
- Fix init db sql error.
- Support change column.
0.1.7 0.1.7
----- -----
- Exclude models.Aerich. - Exclude models.Aerich.

View File

@@ -159,6 +159,11 @@ Show heads to be migrated
1_202029051520102929_drop_column.json 1_202029051520102929_drop_column.json
Limitations
===========
* Not support ``rename column`` now.
* ``Sqlite`` and ``Postgres`` may not work as expected because I don't use those in my work.
License License
======= =======
This project is licensed under the `MIT <https://github.com/long2ice/aerich/blob/master/LICENSE>`_ License. This project is licensed under the `MIT <https://github.com/long2ice/aerich/blob/master/LICENSE>`_ License.

View File

@@ -1 +1 @@
__version__ = "0.1.7" __version__ = "0.1.9"

View File

@@ -7,6 +7,7 @@ from enum import Enum
import asyncclick as click import asyncclick as click
from asyncclick import Context, UsageError from asyncclick import Context, UsageError
from tortoise import Tortoise, generate_schema_for_client from tortoise import Tortoise, generate_schema_for_client
from tortoise.exceptions import OperationalError
from tortoise.transactions import in_transaction from tortoise.transactions import in_transaction
from tortoise.utils import get_schema_sql from tortoise.utils import get_schema_sql
@@ -31,7 +32,7 @@ parser = ConfigParser()
@click.option( @click.option(
"-c", "--config", default="aerich.ini", show_default=True, help="Config file.", "-c", "--config", default="aerich.ini", show_default=True, help="Config file.",
) )
@click.option("--app", default="models", show_default=True, help="Tortoise-ORM app name.") @click.option("--app", required=False, help="Tortoise-ORM app name.")
@click.option( @click.option(
"-n", "-n",
"--name", "--name",
@@ -44,7 +45,6 @@ async def cli(ctx: Context, config, app, name):
ctx.ensure_object(dict) ctx.ensure_object(dict)
ctx.obj["config_file"] = config ctx.obj["config_file"] = config
ctx.obj["name"] = name ctx.obj["name"] = name
ctx.obj["app"] = app
invoked_subcommand = ctx.invoked_subcommand invoked_subcommand = ctx.invoked_subcommand
if invoked_subcommand != "init": if invoked_subcommand != "init":
@@ -56,9 +56,12 @@ async def cli(ctx: Context, config, app, name):
tortoise_orm = parser[name]["tortoise_orm"] tortoise_orm = parser[name]["tortoise_orm"]
tortoise_config = get_tortoise_config(ctx, tortoise_orm) tortoise_config = get_tortoise_config(ctx, tortoise_orm)
app = app or list(tortoise_config.get("apps").keys())[0]
if "aerich.models" not in tortoise_config.get("apps").get(app).get("models"):
raise UsageError("Check your tortoise config and add aerich.models to it.", ctx=ctx)
ctx.obj["config"] = tortoise_config ctx.obj["config"] = tortoise_config
ctx.obj["location"] = location ctx.obj["location"] = location
ctx.obj["app"] = app
if invoked_subcommand != "init-db": if invoked_subcommand != "init-db":
await Migrate.init_with_old_models(tortoise_config, app, location) await Migrate.init_with_old_models(tortoise_config, app, location)
@@ -86,7 +89,11 @@ async def upgrade(ctx: Context):
app = ctx.obj["app"] app = ctx.obj["app"]
migrated = False migrated = False
for version in Migrate.get_all_version_files(): for version in Migrate.get_all_version_files():
if not await Aerich.exists(version=version, app=app): try:
exists = await Aerich.exists(version=version, app=app)
except OperationalError:
exists = False
if not exists:
async with in_transaction(get_app_connection_name(config, app)) as conn: async with in_transaction(get_app_connection_name(config, app)) as conn:
file_path = os.path.join(Migrate.migrate_location, version) file_path = os.path.join(Migrate.migrate_location, version)
with open(file_path, "r") as f: with open(file_path, "r") as f:
@@ -213,7 +220,7 @@ async def init_db(ctx: Context, safe):
await Aerich.create(version=version, app=app) await Aerich.create(version=version, app=app)
with open(os.path.join(dirname, version), "w") as f: with open(os.path.join(dirname, version), "w") as f:
content = { content = {
"upgrade": schema, "upgrade": [schema],
} }
json.dump(content, f, ensure_ascii=False, indent=2) json.dump(content, f, ensure_ascii=False, indent=2)
return click.secho(f'Success generate schema for app "{app}"', fg=Color.green) return click.secho(f'Success generate schema for app "{app}"', fg=Color.green)

View File

@@ -8,16 +8,17 @@ from tortoise.fields import Field, JSONField, TextField, UUIDField
class BaseDDL: class BaseDDL:
schema_generator_cls: Type[BaseSchemaGenerator] = BaseSchemaGenerator schema_generator_cls: Type[BaseSchemaGenerator] = BaseSchemaGenerator
DIALECT = "sql" DIALECT = "sql"
_DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS {table_name}" _DROP_TABLE_TEMPLATE = 'DROP TABLE IF EXISTS "{table_name}"'
_ADD_COLUMN_TEMPLATE = "ALTER TABLE {table_name} ADD {column}" _ADD_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ADD {column}'
_DROP_COLUMN_TEMPLATE = "ALTER TABLE {table_name} DROP COLUMN {column_name}" _DROP_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" DROP COLUMN "{column_name}"'
_ADD_INDEX_TEMPLATE = ( _ADD_INDEX_TEMPLATE = (
"ALTER TABLE {table_name} ADD {unique} INDEX {index_name} ({column_names})" 'ALTER TABLE "{table_name}" ADD {unique} INDEX "{index_name}" ({column_names})'
) )
_DROP_INDEX_TEMPLATE = "ALTER TABLE {table_name} DROP INDEX {index_name}" _DROP_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" DROP INDEX "{index_name}"'
_ADD_FK_TEMPLATE = "ALTER TABLE {table_name} ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}" _ADD_FK_TEMPLATE = 'ALTER TABLE "{table_name}" ADD CONSTRAINT "{fk_name}" FOREIGN KEY ("{db_column}") REFERENCES "{table}" ("{field}") ON DELETE {on_delete}'
_DROP_FK_TEMPLATE = "ALTER TABLE {table_name} DROP FOREIGN KEY {fk_name}" _DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP FOREIGN KEY "{fk_name}"'
_M2M_TABLE_TEMPLATE = "CREATE TABLE {table_name} ({backward_key} {backward_type} NOT NULL REFERENCES {backward_table} ({backward_field}) ON DELETE CASCADE,{forward_key} {forward_type} NOT NULL REFERENCES {forward_table} ({forward_field}) ON DELETE CASCADE){extra}{comment};" _M2M_TABLE_TEMPLATE = 'CREATE TABLE "{table_name}" ("{backward_key}" {backward_type} NOT NULL REFERENCES "{backward_table}" ("{backward_field}") ON DELETE CASCADE,"{forward_key}" {forward_type} NOT NULL REFERENCES "{forward_table}" ("{forward_field}") ON DELETE CASCADE){extra}{comment};'
_MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" MODIFY COLUMN {column}'
def __init__(self, client: "BaseDBAsyncClient"): def __init__(self, client: "BaseDBAsyncClient"):
self.client = client self.client = client
@@ -51,7 +52,7 @@ class BaseDDL:
def drop_m2m(self, field: ManyToManyFieldInstance): def drop_m2m(self, field: ManyToManyFieldInstance):
return self._DROP_TABLE_TEMPLATE.format(table_name=field.through) return self._DROP_TABLE_TEMPLATE.format(table_name=field.through)
def add_column(self, model: "Type[Model]", field_object: Field): def _get_default(self, model: "Type[Model]", field_object: Field):
db_table = model._meta.db_table db_table = model._meta.db_table
default = field_object.default default = field_object.default
db_column = field_object.model_field_name db_column = field_object.model_field_name
@@ -74,6 +75,11 @@ class BaseDDL:
default = "" default = ""
else: else:
default = "" default = ""
return default
def add_column(self, model: "Type[Model]", field_object: Field):
db_table = model._meta.db_table
return self._ADD_COLUMN_TEMPLATE.format( return self._ADD_COLUMN_TEMPLATE.format(
table_name=db_table, table_name=db_table,
column=self.schema_generator._create_string( column=self.schema_generator._create_string(
@@ -89,7 +95,7 @@ class BaseDDL:
if field_object.description if field_object.description
else "", else "",
is_primary_key=field_object.pk, is_primary_key=field_object.pk,
default=default, default=self._get_default(model, field_object),
), ),
) )
@@ -98,6 +104,27 @@ class BaseDDL:
table_name=model._meta.db_table, column_name=column_name table_name=model._meta.db_table, column_name=column_name
) )
def modify_column(self, model: "Type[Model]", field_object: Field):
db_table = model._meta.db_table
return self._MODIFY_COLUMN_TEMPLATE.format(
table_name=db_table,
column=self.schema_generator._create_string(
db_column=field_object.model_field_name,
field_type=field_object.get_for_dialect(self.DIALECT, "SQL_TYPE"),
nullable="NOT NULL" if not field_object.null else "",
unique="",
comment=self.schema_generator._column_comment_generator(
table=db_table,
column=field_object.model_field_name,
comment=field_object.description,
)
if field_object.description
else "",
is_primary_key=field_object.pk,
default=self._get_default(model, field_object),
),
)
def add_index(self, model: "Type[Model]", field_names: List[str], unique=False): def add_index(self, model: "Type[Model]", field_names: List[str], unique=False):
return self._ADD_INDEX_TEMPLATE.format( return self._ADD_INDEX_TEMPLATE.format(
unique="UNIQUE" if unique else "", unique="UNIQUE" if unique else "",

View File

@@ -6,3 +6,14 @@ from aerich.ddl import BaseDDL
class MysqlDDL(BaseDDL): class MysqlDDL(BaseDDL):
schema_generator_cls = MySQLSchemaGenerator schema_generator_cls = MySQLSchemaGenerator
DIALECT = MySQLSchemaGenerator.DIALECT DIALECT = MySQLSchemaGenerator.DIALECT
_DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS `{table_name}`"
_ADD_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` ADD {column}"
_DROP_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` DROP COLUMN `{column_name}`"
_ADD_INDEX_TEMPLATE = (
"ALTER TABLE `{table_name}` ADD {unique} INDEX `{index_name}` ({column_names})"
)
_DROP_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` DROP INDEX `{index_name}`"
_ADD_FK_TEMPLATE = "ALTER TABLE `{table_name}` ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}"
_DROP_FK_TEMPLATE = "ALTER TABLE `{table_name}` DROP FOREIGN KEY `{fk_name}`"
_M2M_TABLE_TEMPLATE = "CREATE TABLE `{table_name}` (`{backward_key}` {backward_type} NOT NULL REFERENCES `{backward_table}` (`{backward_field}`) ON DELETE CASCADE,`{forward_key}` {forward_type} NOT NULL REFERENCES `{forward_table}` (`{forward_field}`) ON DELETE CASCADE){extra}{comment};"
_MODIFY_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` MODIFY COLUMN {column}"

View File

@@ -174,7 +174,10 @@ class Migrate:
temp_config = deepcopy(config) temp_config = deepcopy(config)
path = os.path.join(location, app, cls.old_models) path = os.path.join(location, app, cls.old_models)
path = path.replace("/", ".").lstrip(".") path = path.replace("/", ".").lstrip(".")
temp_config["apps"][cls.diff_app] = {"models": [path]} temp_config["apps"][cls.diff_app] = {
"models": [path],
"default_connection": config.get("apps").get(app).get("default_connection", "default"),
}
return temp_config return temp_config
@classmethod @classmethod
@@ -220,6 +223,10 @@ class Migrate:
if old_model not in new_models.keys(): if old_model not in new_models.keys():
cls._add_operator(cls.remove_model(old_models.get(old_model)), upgrade) cls._add_operator(cls.remove_model(old_models.get(old_model)), upgrade)
@classmethod
def _is_fk_m2m(cls, field: Field):
return isinstance(field, (ForeignKeyFieldInstance, ManyToManyFieldInstance))
@classmethod @classmethod
def add_model(cls, model: Type[Model]): def add_model(cls, model: Type[Model]):
return cls.ddl.create_table(model) return cls.ddl.create_table(model)
@@ -260,6 +267,14 @@ class Migrate:
) )
else: else:
old_field = old_fields_map.get(new_key) old_field = old_fields_map.get(new_key)
new_field_dict = new_field.describe(serializable=True)
new_field_dict.pop("unique")
new_field_dict.pop("indexed")
old_field_dict = old_field.describe(serializable=True)
old_field_dict.pop("unique")
old_field_dict.pop("indexed")
if not cls._is_fk_m2m(new_field) and new_field_dict != old_field_dict:
cls._add_operator(cls._modify_field(new_model, new_field), upgrade=upgrade)
if (old_field.index and not new_field.index) or ( if (old_field.index and not new_field.index) or (
old_field.unique and not new_field.unique old_field.unique and not new_field.unique
): ):
@@ -268,7 +283,7 @@ class Migrate:
old_model, (old_field.model_field_name,), old_field.unique old_model, (old_field.model_field_name,), old_field.unique
), ),
upgrade, upgrade,
isinstance(old_field, (ForeignKeyFieldInstance, ManyToManyFieldInstance)), cls._is_fk_m2m(old_field),
) )
elif (new_field.index and not old_field.index) or ( elif (new_field.index and not old_field.index) or (
new_field.unique and not old_field.unique new_field.unique and not old_field.unique
@@ -276,16 +291,14 @@ class Migrate:
cls._add_operator( cls._add_operator(
cls._add_index(new_model, (new_field.model_field_name,), new_field.unique), cls._add_index(new_model, (new_field.model_field_name,), new_field.unique),
upgrade, upgrade,
isinstance(new_field, (ForeignKeyFieldInstance, ManyToManyFieldInstance)), cls._is_fk_m2m(new_field),
) )
for old_key in old_keys: for old_key in old_keys:
field = old_fields_map.get(old_key) field = old_fields_map.get(old_key)
if old_key not in new_keys and not cls._exclude_field(field, upgrade): if old_key not in new_keys and not cls._exclude_field(field, upgrade):
cls._add_operator( cls._add_operator(
cls._remove_field(old_model, field), cls._remove_field(old_model, field), upgrade, cls._is_fk_m2m(field),
upgrade,
isinstance(field, (ForeignKeyFieldInstance, ManyToManyFieldInstance)),
) )
for new_index in new_indexes: for new_index in new_indexes:
@@ -354,6 +367,10 @@ class Migrate:
return cls.ddl.create_m2m_table(model, field) return cls.ddl.create_m2m_table(model, field)
return cls.ddl.add_column(model, field) return cls.ddl.add_column(model, field)
@classmethod
def _modify_field(cls, model: Type[Model], field: Field):
return cls.ddl.modify_column(model, field)
@classmethod @classmethod
def _remove_field(cls, model: Type[Model], field: Field): def _remove_field(cls, model: Type[Model], field: Field):
if isinstance(field, ForeignKeyFieldInstance): if isinstance(field, ForeignKeyFieldInstance):

View File

@@ -16,10 +16,7 @@ db_url = os.getenv("TEST_DB", "sqlite://:memory:")
tortoise_orm = { tortoise_orm = {
"connections": {"default": expand_db_url(db_url, True)}, "connections": {"default": expand_db_url(db_url, True)},
"apps": { "apps": {
"models": { "models": {"models": ["tests.models", "aerich.models"], "default_connection": "default",},
"models": ["tests.models", "aerich.models"],
"default_connection": "default",
},
}, },
} }
@@ -42,8 +39,11 @@ def loop():
@pytest.fixture(scope="session", autouse=True) @pytest.fixture(scope="session", autouse=True)
def initialize_tests(loop, request): def initialize_tests(loop, request):
tortoise_orm['connections']['diff_models'] = "sqlite://:memory:" tortoise_orm["connections"]["diff_models"] = "sqlite://:memory:"
tortoise_orm['apps']['diff_models'] = {"models": ["tests.diff_models"], "default_connection": "diff_models"} tortoise_orm["apps"]["diff_models"] = {
"models": ["tests.diff_models"],
"default_connection": "diff_models",
}
loop.run_until_complete(Tortoise.init(config=tortoise_orm, _create_db=True)) loop.run_until_complete(Tortoise.init(config=tortoise_orm, _create_db=True))
loop.run_until_complete( loop.run_until_complete(

View File

@@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "aerich" name = "aerich"
version = "0.1.7" version = "0.1.9"
description = "A database migrations tool for Tortoise ORM." description = "A database migrations tool for Tortoise ORM."
authors = ["long2ice <long2ice@gmail.com>"] authors = ["long2ice <long2ice@gmail.com>"]

View File

@@ -48,54 +48,76 @@ COMMENT ON COLUMN "category"."user_id" IS 'User';"""
def test_drop_table(): def test_drop_table():
ret = Migrate.ddl.drop_table(Category) ret = Migrate.ddl.drop_table(Category)
assert ret == "DROP TABLE IF EXISTS category" if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "DROP TABLE IF EXISTS `category`"
else:
assert ret == 'DROP TABLE IF EXISTS "category"'
def test_add_column(): def test_add_column():
ret = Migrate.ddl.add_column(Category, Category._meta.fields_map.get("name")) ret = Migrate.ddl.add_column(Category, Category._meta.fields_map.get("name"))
if isinstance(Migrate.ddl, MysqlDDL): if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE category ADD `name` VARCHAR(200) NOT NULL" assert ret == "ALTER TABLE `category` ADD `name` VARCHAR(200) NOT NULL"
elif isinstance(Migrate.ddl, PostgresDDL): else:
assert ret == 'ALTER TABLE category ADD "name" VARCHAR(200) NOT NULL' assert ret == 'ALTER TABLE "category" ADD "name" VARCHAR(200) NOT NULL'
elif isinstance(Migrate.ddl, SqliteDDL):
assert ret == 'ALTER TABLE category ADD "name" VARCHAR(200) NOT NULL'
def test_modify_column():
ret = Migrate.ddl.modify_column(Category, Category._meta.fields_map.get("name"))
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200) NOT NULL"
else:
assert ret == 'ALTER TABLE "category" MODIFY COLUMN "name" VARCHAR(200) NOT NULL'
def test_drop_column(): def test_drop_column():
ret = Migrate.ddl.drop_column(Category, "name") ret = Migrate.ddl.drop_column(Category, "name")
assert ret == "ALTER TABLE category DROP COLUMN name" if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE category DROP COLUMN name" assert ret == "ALTER TABLE `category` DROP COLUMN `name`"
else:
assert ret == 'ALTER TABLE "category" DROP COLUMN "name"'
def test_add_index(): def test_add_index():
index = Migrate.ddl.add_index(Category, ["name"]) index = Migrate.ddl.add_index(Category, ["name"])
index_u = Migrate.ddl.add_index(Category, ["name"], True) index_u = Migrate.ddl.add_index(Category, ["name"], True)
if isinstance(Migrate.ddl, MysqlDDL): if isinstance(Migrate.ddl, MysqlDDL):
assert index == "ALTER TABLE category ADD INDEX idx_category_name_8b0cb9 (`name`)" assert index == "ALTER TABLE `category` ADD INDEX `idx_category_name_8b0cb9` (`name`)"
assert (
assert index_u == "ALTER TABLE category ADD UNIQUE INDEX uid_category_name_8b0cb9 (`name`)" index_u == "ALTER TABLE `category` ADD UNIQUE INDEX `uid_category_name_8b0cb9` (`name`)"
)
elif isinstance(Migrate.ddl, SqliteDDL): else:
assert index_u == 'ALTER TABLE category ADD UNIQUE INDEX uid_category_name_8b0cb9 ("name")' assert index == 'ALTER TABLE "category" ADD INDEX "idx_category_name_8b0cb9" ("name")'
assert (
assert index_u == 'ALTER TABLE category ADD UNIQUE INDEX uid_category_name_8b0cb9 ("name")' index_u == 'ALTER TABLE "category" ADD UNIQUE INDEX "uid_category_name_8b0cb9" ("name")'
)
def test_drop_index(): def test_drop_index():
ret = Migrate.ddl.drop_index(Category, ["name"]) ret = Migrate.ddl.drop_index(Category, ["name"])
assert ret == "ALTER TABLE category DROP INDEX idx_category_name_8b0cb9" if isinstance(Migrate.ddl, MysqlDDL):
ret = Migrate.ddl.drop_index(Category, ["name"], True) assert ret == "ALTER TABLE `category` DROP INDEX `idx_category_name_8b0cb9`"
assert ret == "ALTER TABLE category DROP INDEX uid_category_name_8b0cb9" else:
assert ret == 'ALTER TABLE "category" DROP INDEX "idx_category_name_8b0cb9"'
def test_add_fk(): def test_add_fk():
ret = Migrate.ddl.add_fk(Category, Category._meta.fields_map.get("user")) ret = Migrate.ddl.add_fk(Category, Category._meta.fields_map.get("user"))
if isinstance(Migrate.ddl, MysqlDDL):
assert ( assert (
ret ret
== "ALTER TABLE category ADD CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE" == "ALTER TABLE `category` ADD CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE"
)
else:
assert (
ret
== 'ALTER TABLE "category" ADD CONSTRAINT "fk_category_user_e2e3874c" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE'
) )
def test_drop_fk(): def test_drop_fk():
ret = Migrate.ddl.drop_fk(Category, Category._meta.fields_map.get("user")) ret = Migrate.ddl.drop_fk(Category, Category._meta.fields_map.get("user"))
assert ret == "ALTER TABLE category DROP FOREIGN KEY fk_category_user_e2e3874c" if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_e2e3874c`"
else:
assert ret == 'ALTER TABLE "category" DROP FOREIGN KEY "fk_category_user_e2e3874c"'

View File

@@ -1,8 +1,6 @@
from tortoise import Tortoise from tortoise import Tortoise
from aerich.ddl.mysql import MysqlDDL from aerich.ddl.mysql import MysqlDDL
from aerich.ddl.postgres import PostgresDDL
from aerich.ddl.sqlite import SqliteDDL
from aerich.migrate import Migrate from aerich.migrate import Migrate
@@ -14,28 +12,19 @@ def test_migrate():
Migrate.diff_models(models, diff_models, False) Migrate.diff_models(models, diff_models, False)
if isinstance(Migrate.ddl, MysqlDDL): if isinstance(Migrate.ddl, MysqlDDL):
assert Migrate.upgrade_operators == [ assert Migrate.upgrade_operators == [
"ALTER TABLE category ADD `name` VARCHAR(200) NOT NULL", "ALTER TABLE `category` ADD `name` VARCHAR(200) NOT NULL",
"ALTER TABLE user ADD UNIQUE INDEX uid_user_usernam_9987ab (`username`)", "ALTER TABLE `user` ADD UNIQUE INDEX `uid_user_usernam_9987ab` (`username`)",
] ]
assert Migrate.downgrade_operators == [ assert Migrate.downgrade_operators == [
"ALTER TABLE category DROP COLUMN name", "ALTER TABLE `category` DROP COLUMN `name`",
"ALTER TABLE user DROP INDEX uid_user_usernam_9987ab", "ALTER TABLE `user` DROP INDEX `uid_user_usernam_9987ab`",
] ]
elif isinstance(Migrate.ddl, SqliteDDL): else:
assert Migrate.upgrade_operators == [ assert Migrate.upgrade_operators == [
'ALTER TABLE category ADD "name" VARCHAR(200) NOT NULL', 'ALTER TABLE "category" ADD "name" VARCHAR(200) NOT NULL',
'ALTER TABLE user ADD UNIQUE INDEX uid_user_usernam_9987ab ("username")', 'ALTER TABLE "user" ADD UNIQUE INDEX "uid_user_usernam_9987ab" ("username")',
] ]
assert Migrate.downgrade_operators == [ assert Migrate.downgrade_operators == [
"ALTER TABLE category DROP COLUMN name", 'ALTER TABLE "category" DROP COLUMN "name"',
"ALTER TABLE user DROP INDEX uid_user_usernam_9987ab", 'ALTER TABLE "user" DROP INDEX "uid_user_usernam_9987ab"',
]
elif isinstance(Migrate.ddl, PostgresDDL):
assert Migrate.upgrade_operators == [
'ALTER TABLE category ADD "name" VARCHAR(200) NOT NULL',
'ALTER TABLE user ADD UNIQUE INDEX uid_user_usernam_9987ab ("username")',
]
assert Migrate.downgrade_operators == [
"ALTER TABLE category DROP COLUMN name",
"ALTER TABLE user DROP INDEX uid_user_usernam_9987ab",
] ]