4 Commits

Author SHA1 Message Date
long2ice
3f52ac348b Support rename table. (#139) 2021-03-25 21:21:49 +08:00
long2ice
f8aa7a8f34 Fix inspectdb for FloatField. (#138) 2021-03-22 14:16:59 +08:00
long2ice
44d520cc82 Fix postgres field type change error. (#135) 2021-03-21 21:18:08 +08:00
long2ice
364735f804 Fix rename field on the field add. (#134) 2021-03-21 20:43:05 +08:00
15 changed files with 98 additions and 58 deletions

View File

@@ -2,6 +2,13 @@
## 0.5 ## 0.5
### 0.5.2
- Fix rename field on the field add. (#134)
- Fix postgres field type change error. (#135)
- Fix inspectdb for `FloatField`. (#138)
- Support `rename table`. (#139)
### 0.5.1 ### 0.5.1
- Fix tortoise connections not being closed properly. (#120) - Fix tortoise connections not being closed properly. (#120)

View File

@@ -8,11 +8,7 @@
## Introduction ## Introduction
Aerich is a database migrations tool for Tortoise-ORM, which is like alembic for SQLAlchemy, or like Django ORM with Aerich is a database migrations tool for Tortoise-ORM, which is like alembic for SQLAlchemy, or like Django ORM with
it\'s own migrations solution. it\'s own migration solution.
~~**Important: You can only use absolutely import in your `models.py` to make `aerich` work.**~~
From version `v0.5.0`, there is no such limitation now.
## Install ## Install
@@ -97,8 +93,8 @@ Success create app migrate location ./migrations/models
Success generate schema for app "models" Success generate schema for app "models"
``` ```
If your Tortoise-ORM app is not the default `models`, you must specify the correct app via `--app`, e.g. `aerich If your Tortoise-ORM app is not the default `models`, you must specify the correct app via `--app`,
--app other_models init-db`. e.g. `aerich --app other_models init-db`.
### Update models and make migrate ### Update models and make migrate
@@ -193,8 +189,7 @@ Inspect a specified table in the default app and redirect to `models.py`:
aerich inspectdb -t user > models.py aerich inspectdb -t user > models.py
``` ```
Note that this command is limited and cannot infer some fields, such as `IntEnumField`, `ForeignKeyField`, and Note that this command is limited and cannot infer some fields, such as `IntEnumField`, `ForeignKeyField`, and others.
others.
### Multiple databases ### Multiple databases
@@ -213,12 +208,6 @@ tortoise_orm = {
You only need to specify `aerich.models` in one app, and must specify `--app` when running `aerich migrate` and so on. You only need to specify `aerich.models` in one app, and must specify `--app` when running `aerich migrate` and so on.
## Support this project
| AliPay | WeChatPay | PayPal |
| -------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------- | ---------------------------------------------------------------- |
| <img width="200" src="https://github.com/long2ice/aerich/raw/dev/images/alipay.jpeg"/> | <img width="200" src="https://github.com/long2ice/aerich/raw/dev/images/wechatpay.jpeg"/> | [PayPal](https://www.paypal.me/long2ice) to my account long2ice. |
## License ## License
This project is licensed under the This project is licensed under the

View File

@@ -1 +1 @@
__version__ = "0.5.1" __version__ = "0.5.2"

View File

@@ -26,6 +26,7 @@ class BaseDDL:
_CHANGE_COLUMN_TEMPLATE = ( _CHANGE_COLUMN_TEMPLATE = (
'ALTER TABLE "{table_name}" CHANGE {old_column_name} {new_column_name} {new_column_type}' 'ALTER TABLE "{table_name}" CHANGE {old_column_name} {new_column_name} {new_column_type}'
) )
_RENAME_TABLE_TEMPLATE = 'ALTER TABLE "{old_table_name}" RENAME TO "{new_table_name}"'
def __init__(self, client: "BaseDBAsyncClient"): def __init__(self, client: "BaseDBAsyncClient"):
self.client = client self.client = client
@@ -230,3 +231,9 @@ class BaseDDL:
def set_comment(self, model: "Type[Model]", field_describe: dict): def set_comment(self, model: "Type[Model]", field_describe: dict):
raise NotImplementedError raise NotImplementedError
def rename_table(self, model: "Type[Model]", old_table_name: str, new_table_name: str):
db_table = model._meta.db_table
return self._RENAME_TABLE_TEMPLATE.format(
table_name=db_table, old_table_name=old_table_name, new_table_name=new_table_name
)

View File

@@ -28,6 +28,7 @@ class MysqlDDL(BaseDDL):
_DROP_FK_TEMPLATE = "ALTER TABLE `{table_name}` DROP FOREIGN KEY `{fk_name}`" _DROP_FK_TEMPLATE = "ALTER TABLE `{table_name}` DROP FOREIGN KEY `{fk_name}`"
_M2M_TABLE_TEMPLATE = "CREATE TABLE `{table_name}` (`{backward_key}` {backward_type} NOT NULL REFERENCES `{backward_table}` (`{backward_field}`) ON DELETE CASCADE,`{forward_key}` {forward_type} NOT NULL REFERENCES `{forward_table}` (`{forward_field}`) ON DELETE CASCADE){extra}{comment}" _M2M_TABLE_TEMPLATE = "CREATE TABLE `{table_name}` (`{backward_key}` {backward_type} NOT NULL REFERENCES `{backward_table}` (`{backward_field}`) ON DELETE CASCADE,`{forward_key}` {forward_type} NOT NULL REFERENCES `{forward_table}` (`{forward_field}`) ON DELETE CASCADE){extra}{comment}"
_MODIFY_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` MODIFY COLUMN {column}" _MODIFY_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` MODIFY COLUMN {column}"
_RENAME_TABLE_TEMPLATE = "ALTER TABLE `{old_table_name}` RENAME TO `{new_table_name}`"
def alter_column_null(self, model: "Type[Model]", field_describe: dict): def alter_column_null(self, model: "Type[Model]", field_describe: dict):
raise NotSupportError("Alter column null is unsupported in MySQL.") raise NotSupportError("Alter column null is unsupported in MySQL.")

View File

@@ -12,7 +12,9 @@ class PostgresDDL(BaseDDL):
_ADD_INDEX_TEMPLATE = 'CREATE {unique}INDEX "{index_name}" ON "{table_name}" ({column_names})' _ADD_INDEX_TEMPLATE = 'CREATE {unique}INDEX "{index_name}" ON "{table_name}" ({column_names})'
_DROP_INDEX_TEMPLATE = 'DROP INDEX "{index_name}"' _DROP_INDEX_TEMPLATE = 'DROP INDEX "{index_name}"'
_ALTER_NULL_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {set_drop} NOT NULL' _ALTER_NULL_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {set_drop} NOT NULL'
_MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" TYPE {datatype}' _MODIFY_COLUMN_TEMPLATE = (
'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" TYPE {datatype}{using}'
)
_SET_COMMENT_TEMPLATE = 'COMMENT ON COLUMN "{table_name}"."{column}" IS {comment}' _SET_COMMENT_TEMPLATE = 'COMMENT ON COLUMN "{table_name}"."{column}" IS {comment}'
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP CONSTRAINT "{fk_name}"' _DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP CONSTRAINT "{fk_name}"'
@@ -27,10 +29,13 @@ class PostgresDDL(BaseDDL):
def modify_column(self, model: "Type[Model]", field_describe: dict, is_pk: bool = False): def modify_column(self, model: "Type[Model]", field_describe: dict, is_pk: bool = False):
db_table = model._meta.db_table db_table = model._meta.db_table
db_field_types = field_describe.get("db_field_types") db_field_types = field_describe.get("db_field_types")
db_column = field_describe.get("db_column")
datatype = db_field_types.get(self.DIALECT) or db_field_types.get("")
return self._MODIFY_COLUMN_TEMPLATE.format( return self._MODIFY_COLUMN_TEMPLATE.format(
table_name=db_table, table_name=db_table,
column=field_describe.get("db_column"), column=db_column,
datatype=db_field_types.get(self.DIALECT) or db_field_types.get(""), datatype=datatype,
using=f' USING "{db_column}"::{datatype}',
) )
def set_comment(self, model: "Type[Model]", field_describe: dict): def set_comment(self, model: "Type[Model]", field_describe: dict):

View File

@@ -15,6 +15,7 @@ class InspectDb:
"LONGTEXT": " {field} = fields.TextField({null}{default}{comment})", "LONGTEXT": " {field} = fields.TextField({null}{default}{comment})",
"TEXT": " {field} = fields.TextField({null}{default}{comment})", "TEXT": " {field} = fields.TextField({null}{default}{comment})",
"DATETIME": " {field} = fields.DatetimeField({null}{default}{comment})", "DATETIME": " {field} = fields.DatetimeField({null}{default}{comment})",
"FLOAT": " {field} = fields.FloatField({null}{default}{comment})",
} }
def __init__(self, conn: BaseDBAsyncClient, tables: Optional[List[str]] = None): def __init__(self, conn: BaseDBAsyncClient, tables: Optional[List[str]] = None):

View File

@@ -176,7 +176,11 @@ class Migrate:
pass pass
else: else:
old_model_describe = old_models.get(new_model_str) old_model_describe = old_models.get(new_model_str)
# rename table
new_table = new_model_describe.get("table")
old_table = old_model_describe.get("table")
if new_table != old_table:
cls._add_operator(cls.rename_table(model, old_table, new_table), upgrade)
old_unique_together = set( old_unique_together = set(
map(lambda x: tuple(x), old_model_describe.get("unique_together")) map(lambda x: tuple(x), old_model_describe.get("unique_together"))
) )
@@ -257,14 +261,23 @@ class Migrate:
old_data_field_name = old_data_field.get("name") old_data_field_name = old_data_field.get("name")
if len(changes) == 2: if len(changes) == 2:
# rename field # rename field
if changes[0] == ( if (
changes[0]
== (
"change", "change",
"name", "name",
(old_data_field_name, new_data_field_name), (old_data_field_name, new_data_field_name),
) and changes[1] == ( )
and changes[1]
== (
"change", "change",
"db_column", "db_column",
(old_data_field.get("db_column"), new_data_field.get("db_column")), (
old_data_field.get("db_column"),
new_data_field.get("db_column"),
),
)
and old_data_field_name not in new_data_fields_name
): ):
if upgrade: if upgrade:
is_rename = click.prompt( is_rename = click.prompt(
@@ -383,6 +396,9 @@ class Migrate:
elif option == "default": elif option == "default":
# change column default # change column default
cls._add_operator(cls._alter_default(model, new_data_field), upgrade) cls._add_operator(cls._alter_default(model, new_data_field), upgrade)
elif option == "unique":
# because indexed include it
pass
else: else:
# modify column # modify column
cls._add_operator( cls._add_operator(
@@ -394,6 +410,10 @@ class Migrate:
if old_model not in new_models.keys(): if old_model not in new_models.keys():
cls._add_operator(cls.drop_model(old_models.get(old_model).get("table")), upgrade) cls._add_operator(cls.drop_model(old_models.get(old_model).get("table")), upgrade)
@classmethod
def rename_table(cls, model: Type[Model], old_table_name: str, new_table_name: str):
return cls.ddl.rename_table(model, old_table_name, new_table_name)
@classmethod @classmethod
def add_model(cls, model: Type[Model]): def add_model(cls, model: Type[Model]):
return cls.ddl.create_table(model) return cls.ddl.create_table(model)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 75 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 76 KiB

30
poetry.lock generated
View File

@@ -269,7 +269,7 @@ python-versions = "*"
[[package]] [[package]]
name = "isort" name = "isort"
version = "5.7.0" version = "5.8.0"
description = "A Python utility / library to sort Python imports." description = "A Python utility / library to sort Python imports."
category = "dev" category = "dev"
optional = false optional = false
@@ -404,12 +404,12 @@ optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]] [[package]]
name = "pypika" name = "pypika-tortoise"
version = "0.44.1" version = "0.1.0"
description = "A SQL query builder API for Python" description = "Forked from pypika and streamline just for tortoise-orm"
category = "main" category = "main"
optional = false optional = false
python-versions = "*" python-versions = ">=3.7,<4.0"
[[package]] [[package]]
name = "pytest" name = "pytest"
@@ -552,7 +552,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]] [[package]]
name = "tortoise-orm" name = "tortoise-orm"
version = "0.16.21" version = "0.17.0"
description = "Easy async ORM for python, built with relations in mind" description = "Easy async ORM for python, built with relations in mind"
category = "main" category = "main"
optional = false optional = false
@@ -561,12 +561,13 @@ python-versions = ">=3.7,<4.0"
[package.dependencies] [package.dependencies]
aiosqlite = ">=0.16.0,<0.17.0" aiosqlite = ">=0.16.0,<0.17.0"
iso8601 = ">=0.1.13,<0.2.0" iso8601 = ">=0.1.13,<0.2.0"
pypika = ">=0.44.0,<0.45.0" pypika-tortoise = ">=0.1.0,<0.2.0"
pytz = ">=2020.4,<2021.0" pytz = ">=2020.4,<2021.0"
[package.extras] [package.extras]
docs = ["pygments", "cloud-sptheme", "docutils", "sphinx"] docs = ["pygments", "cloud-sptheme", "docutils", "sphinx"]
aiomysql = ["aiomysql"] aiomysql = ["aiomysql"]
asyncmy = ["asyncmy"]
asyncpg = ["asyncpg"] asyncpg = ["asyncpg"]
accel = ["ciso8601 (>=2.1.2,<3.0.0)", "python-rapidjson", "uvloop (>=0.14.0,<0.15.0)"] accel = ["ciso8601 (>=2.1.2,<3.0.0)", "python-rapidjson", "uvloop (>=0.14.0,<0.15.0)"]
@@ -606,7 +607,7 @@ asyncpg = ["asyncpg"]
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = "^3.7" python-versions = "^3.7"
content-hash = "27f12254888e96c2b78ba5d786091fbb0bc98efcff2d71b59f7165b5b6edb1a3" content-hash = "3b8e107e2fd01362244f27f783c8dcfd792e4ea3c98a0579e4b987e0c5681b5d"
[metadata.files] [metadata.files]
aiomysql = [ aiomysql = [
@@ -755,8 +756,8 @@ iso8601 = [
{file = "iso8601-0.1.14.tar.gz", hash = "sha256:8aafd56fa0290496c5edbb13c311f78fa3a241f0853540da09d9363eae3ebd79"}, {file = "iso8601-0.1.14.tar.gz", hash = "sha256:8aafd56fa0290496c5edbb13c311f78fa3a241f0853540da09d9363eae3ebd79"},
] ]
isort = [ isort = [
{file = "isort-5.7.0-py3-none-any.whl", hash = "sha256:fff4f0c04e1825522ce6949973e83110a6e907750cd92d128b0d14aaaadbffdc"}, {file = "isort-5.8.0-py3-none-any.whl", hash = "sha256:2bb1680aad211e3c9944dbce1d4ba09a989f04e238296c87fe2139faa26d655d"},
{file = "isort-5.7.0.tar.gz", hash = "sha256:c729845434366216d320e936b8ad6f9d681aab72dc7cbc2d51bedc3582f3ad1e"}, {file = "isort-5.8.0.tar.gz", hash = "sha256:0a943902919f65c5684ac4e0154b1ad4fac6dcaa5d9f3426b732f1c8b5419be6"},
] ]
mccabe = [ mccabe = [
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
@@ -830,8 +831,9 @@ pyparsing = [
{file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
{file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
] ]
pypika = [ pypika-tortoise = [
{file = "pypika-0.44.1.tar.gz", hash = "sha256:316839144d3ad7656405a10cdd26d2181f16bb8ff7e256d616ffb50335ca1fcb"}, {file = "pypika-tortoise-0.1.0.tar.gz", hash = "sha256:7176e98ff0cf7c311d4ba58f28f1755956265dee2f9781e65e1304a67a3e5aa5"},
{file = "pypika_tortoise-0.1.0-py3-none-any.whl", hash = "sha256:ec83b0b2964be01ef563f5f019b0332a18177604e841c47ad39d798798c6dfe9"},
] ]
pytest = [ pytest = [
{file = "pytest-6.2.2-py3-none-any.whl", hash = "sha256:b574b57423e818210672e07ca1fa90aaf194a4f63f3ab909a2c67ebb22913839"}, {file = "pytest-6.2.2-py3-none-any.whl", hash = "sha256:b574b57423e818210672e07ca1fa90aaf194a4f63f3ab909a2c67ebb22913839"},
@@ -940,8 +942,8 @@ toml = [
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
] ]
tortoise-orm = [ tortoise-orm = [
{file = "tortoise-orm-0.16.21.tar.gz", hash = "sha256:9729eac3eb58e59c32e2815d5ff1941a71e1eecd63834ae7199b6084c1a454b5"}, {file = "tortoise-orm-0.17.0.tar.gz", hash = "sha256:d59c9ba2e8f3bb165f6ba7880ae0f0127659aa143750de419850a06329ca27ff"},
{file = "tortoise_orm-0.16.21-py3-none-any.whl", hash = "sha256:f36aa16d07bab69b141e91f8791c0f878fbcc0acfffa3c671ea10a6ad73c54ed"}, {file = "tortoise_orm-0.17.0-py3-none-any.whl", hash = "sha256:39da891a61871a9e5e980007776c7d1f9b61e6b8601eb6482c90f89b30334638"},
] ]
typed-ast = [ typed-ast = [
{file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"}, {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"},

View File

@@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "aerich" name = "aerich"
version = "0.5.1" version = "0.5.2"
description = "A database migrations tool for Tortoise ORM." description = "A database migrations tool for Tortoise ORM."
authors = ["long2ice <long2ice@gmail.com>"] authors = ["long2ice <long2ice@gmail.com>"]
license = "Apache-2.0" license = "Apache-2.0"
@@ -16,7 +16,7 @@ include = ["CHANGELOG.md", "LICENSE", "README.md"]
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.7" python = "^3.7"
tortoise-orm = "^0.16.21" tortoise-orm = "*"
click = "*" click = "*"
pydantic = "*" pydantic = "*"
aiomysql = { version = "*", optional = true } aiomysql = { version = "*", optional = true }

View File

@@ -61,3 +61,6 @@ class Config(Model):
key = fields.CharField(max_length=20) key = fields.CharField(max_length=20)
value = fields.JSONField() value = fields.JSONField()
status: Status = fields.IntEnumField(Status, default=Status.on) status: Status = fields.IntEnumField(Status, default=Status.on)
class Meta:
table = "configs"

View File

@@ -76,7 +76,10 @@ def test_modify_column():
if isinstance(Migrate.ddl, MysqlDDL): if isinstance(Migrate.ddl, MysqlDDL):
assert ret0 == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)" assert ret0 == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)"
elif isinstance(Migrate.ddl, PostgresDDL): elif isinstance(Migrate.ddl, PostgresDDL):
assert ret0 == 'ALTER TABLE "category" ALTER COLUMN "name" TYPE VARCHAR(200)' assert (
ret0
== 'ALTER TABLE "category" ALTER COLUMN "name" TYPE VARCHAR(200) USING "name"::VARCHAR(200)'
)
if isinstance(Migrate.ddl, MysqlDDL): if isinstance(Migrate.ddl, MysqlDDL):
assert ( assert (
@@ -84,7 +87,9 @@ def test_modify_column():
== "ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1" == "ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1"
) )
elif isinstance(Migrate.ddl, PostgresDDL): elif isinstance(Migrate.ddl, PostgresDDL):
assert ret1 == 'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL' assert (
ret1 == 'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL'
)
def test_alter_column_default(): def test_alter_column_default():

View File

@@ -146,7 +146,7 @@ old_models_describe = {
"models.Config": { "models.Config": {
"name": "models.Config", "name": "models.Config",
"app": "models", "app": "models",
"table": "config", "table": "configs",
"abstract": False, "abstract": False,
"description": None, "description": None,
"docstring": None, "docstring": None,
@@ -768,7 +768,7 @@ def test_migrate(mocker: MockerFixture):
- alter default: Config.status - alter default: Config.status
- rename column: Product.image -> Product.pic - rename column: Product.image -> Product.pic
""" """
mocker.patch("click.prompt", side_effect=(False, True)) mocker.patch("click.prompt", side_effect=(True,))
models_describe = get_models_describe("models") models_describe = get_models_describe("models")
Migrate.app = "models" Migrate.app = "models"
@@ -790,6 +790,7 @@ def test_migrate(mocker: MockerFixture):
"ALTER TABLE `config` ALTER COLUMN `status` DROP DEFAULT", "ALTER TABLE `config` ALTER COLUMN `status` DROP DEFAULT",
"ALTER TABLE `email` ADD `address` VARCHAR(200) NOT NULL", "ALTER TABLE `email` ADD `address` VARCHAR(200) NOT NULL",
"ALTER TABLE `email` DROP COLUMN `user_id`", "ALTER TABLE `email` DROP COLUMN `user_id`",
"ALTER TABLE `configs` RENAME TO `config`",
"ALTER TABLE `product` RENAME COLUMN `image` TO `pic`", "ALTER TABLE `product` RENAME COLUMN `image` TO `pic`",
"ALTER TABLE `email` RENAME COLUMN `id` TO `email_id`", "ALTER TABLE `email` RENAME COLUMN `id` TO `email_id`",
"ALTER TABLE `email` DROP FOREIGN KEY `fk_email_user_5b58673d`", "ALTER TABLE `email` DROP FOREIGN KEY `fk_email_user_5b58673d`",
@@ -798,7 +799,6 @@ def test_migrate(mocker: MockerFixture):
"ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0", "ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0",
"ALTER TABLE `user` DROP COLUMN `avatar`", "ALTER TABLE `user` DROP COLUMN `avatar`",
"ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(100) NOT NULL", "ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(100) NOT NULL",
"ALTER TABLE `user` MODIFY COLUMN `username` VARCHAR(20) NOT NULL",
"CREATE TABLE IF NOT EXISTS `newmodel` (\n `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,\n `name` VARCHAR(50) NOT NULL\n) CHARACTER SET utf8mb4;", "CREATE TABLE IF NOT EXISTS `newmodel` (\n `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,\n `name` VARCHAR(50) NOT NULL\n) CHARACTER SET utf8mb4;",
"ALTER TABLE `user` ADD UNIQUE INDEX `uid_user_usernam_9987ab` (`username`)", "ALTER TABLE `user` ADD UNIQUE INDEX `uid_user_usernam_9987ab` (`username`)",
"CREATE TABLE `email_user` (`email_id` INT NOT NULL REFERENCES `email` (`email_id`) ON DELETE CASCADE,`user_id` INT NOT NULL REFERENCES `user` (`id`) ON DELETE CASCADE) CHARACTER SET utf8mb4", "CREATE TABLE `email_user` (`email_id` INT NOT NULL REFERENCES `email` (`email_id`) ON DELETE CASCADE,`user_id` INT NOT NULL REFERENCES `user` (`id`) ON DELETE CASCADE) CHARACTER SET utf8mb4",
@@ -814,6 +814,7 @@ def test_migrate(mocker: MockerFixture):
"ALTER TABLE `config` ALTER COLUMN `status` SET DEFAULT 1", "ALTER TABLE `config` ALTER COLUMN `status` SET DEFAULT 1",
"ALTER TABLE `email` ADD `user_id` INT NOT NULL", "ALTER TABLE `email` ADD `user_id` INT NOT NULL",
"ALTER TABLE `email` DROP COLUMN `address`", "ALTER TABLE `email` DROP COLUMN `address`",
"ALTER TABLE `config` RENAME TO `configs`",
"ALTER TABLE `product` RENAME COLUMN `pic` TO `image`", "ALTER TABLE `product` RENAME COLUMN `pic` TO `image`",
"ALTER TABLE `email` RENAME COLUMN `email_id` TO `id`", "ALTER TABLE `email` RENAME COLUMN `email_id` TO `id`",
"ALTER TABLE `email` ADD CONSTRAINT `fk_email_user_5b58673d` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE", "ALTER TABLE `email` ADD CONSTRAINT `fk_email_user_5b58673d` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE",
@@ -823,7 +824,6 @@ def test_migrate(mocker: MockerFixture):
"ALTER TABLE `user` ADD `avatar` VARCHAR(200) NOT NULL DEFAULT ''", "ALTER TABLE `user` ADD `avatar` VARCHAR(200) NOT NULL DEFAULT ''",
"ALTER TABLE `user` DROP INDEX `idx_user_usernam_9987ab`", "ALTER TABLE `user` DROP INDEX `idx_user_usernam_9987ab`",
"ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(200) NOT NULL", "ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(200) NOT NULL",
"ALTER TABLE `user` MODIFY COLUMN `username` VARCHAR(20) NOT NULL",
"DROP TABLE IF EXISTS `email_user`", "DROP TABLE IF EXISTS `email_user`",
"DROP TABLE IF EXISTS `newmodel`", "DROP TABLE IF EXISTS `newmodel`",
] ]
@@ -832,8 +832,8 @@ def test_migrate(mocker: MockerFixture):
elif isinstance(Migrate.ddl, PostgresDDL): elif isinstance(Migrate.ddl, PostgresDDL):
assert sorted(Migrate.upgrade_operators) == sorted( assert sorted(Migrate.upgrade_operators) == sorted(
[ [
'ALTER TABLE "category" ALTER COLUMN "name" TYPE VARCHAR(200)', 'ALTER TABLE "category" ALTER COLUMN "name" TYPE VARCHAR(200) USING "name"::VARCHAR(200)',
'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(100)', 'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(100) USING "slug"::VARCHAR(100)',
'ALTER TABLE "config" ADD "user_id" INT NOT NULL', 'ALTER TABLE "config" ADD "user_id" INT NOT NULL',
'ALTER TABLE "config" ADD CONSTRAINT "fk_config_user_17daa970" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE', 'ALTER TABLE "config" ADD CONSTRAINT "fk_config_user_17daa970" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE',
'ALTER TABLE "config" ALTER COLUMN "status" DROP DEFAULT', 'ALTER TABLE "config" ALTER COLUMN "status" DROP DEFAULT',
@@ -841,13 +841,13 @@ def test_migrate(mocker: MockerFixture):
'ALTER TABLE "email" DROP COLUMN "user_id"', 'ALTER TABLE "email" DROP COLUMN "user_id"',
'ALTER TABLE "product" RENAME COLUMN "image" TO "pic"', 'ALTER TABLE "product" RENAME COLUMN "image" TO "pic"',
'ALTER TABLE "email" RENAME COLUMN "id" TO "email_id"', 'ALTER TABLE "email" RENAME COLUMN "id" TO "email_id"',
'ALTER TABLE "configs" RENAME TO "config"',
'ALTER TABLE "email" DROP CONSTRAINT "fk_email_user_5b58673d"', 'ALTER TABLE "email" DROP CONSTRAINT "fk_email_user_5b58673d"',
'CREATE INDEX "idx_email_email_4a1a33" ON "email" ("email")', 'CREATE INDEX "idx_email_email_4a1a33" ON "email" ("email")',
'ALTER TABLE "user" ALTER COLUMN "username" TYPE VARCHAR(20)',
'CREATE UNIQUE INDEX "uid_product_name_f14935" ON "product" ("name", "type")', 'CREATE UNIQUE INDEX "uid_product_name_f14935" ON "product" ("name", "type")',
'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0', 'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0',
'ALTER TABLE "user" DROP COLUMN "avatar"', 'ALTER TABLE "user" DROP COLUMN "avatar"',
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(100)', 'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(100) USING "password"::VARCHAR(100)',
'CREATE TABLE IF NOT EXISTS "newmodel" (\n "id" SERIAL NOT NULL PRIMARY KEY,\n "name" VARCHAR(50) NOT NULL\n);\nCOMMENT ON COLUMN "config"."user_id" IS \'User\';', 'CREATE TABLE IF NOT EXISTS "newmodel" (\n "id" SERIAL NOT NULL PRIMARY KEY,\n "name" VARCHAR(50) NOT NULL\n);\nCOMMENT ON COLUMN "config"."user_id" IS \'User\';',
'CREATE UNIQUE INDEX "uid_user_usernam_9987ab" ON "user" ("username")', 'CREATE UNIQUE INDEX "uid_user_usernam_9987ab" ON "user" ("username")',
'CREATE TABLE "email_user" ("email_id" INT NOT NULL REFERENCES "email" ("email_id") ON DELETE CASCADE,"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE)', 'CREATE TABLE "email_user" ("email_id" INT NOT NULL REFERENCES "email" ("email_id") ON DELETE CASCADE,"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE)',
@@ -855,15 +855,15 @@ def test_migrate(mocker: MockerFixture):
) )
assert sorted(Migrate.downgrade_operators) == sorted( assert sorted(Migrate.downgrade_operators) == sorted(
[ [
'ALTER TABLE "category" ALTER COLUMN "name" TYPE VARCHAR(200)', 'ALTER TABLE "category" ALTER COLUMN "name" TYPE VARCHAR(200) USING "name"::VARCHAR(200)',
'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(200)', 'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(200) USING "slug"::VARCHAR(200)',
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(200)', 'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(200) USING "password"::VARCHAR(200)',
'ALTER TABLE "user" ALTER COLUMN "username" TYPE VARCHAR(20)',
'ALTER TABLE "config" DROP COLUMN "user_id"', 'ALTER TABLE "config" DROP COLUMN "user_id"',
'ALTER TABLE "config" DROP CONSTRAINT "fk_config_user_17daa970"', 'ALTER TABLE "config" DROP CONSTRAINT "fk_config_user_17daa970"',
'ALTER TABLE "config" ALTER COLUMN "status" SET DEFAULT 1', 'ALTER TABLE "config" ALTER COLUMN "status" SET DEFAULT 1',
'ALTER TABLE "email" ADD "user_id" INT NOT NULL', 'ALTER TABLE "email" ADD "user_id" INT NOT NULL',
'ALTER TABLE "email" DROP COLUMN "address"', 'ALTER TABLE "email" DROP COLUMN "address"',
'ALTER TABLE "config" RENAME TO "configs"',
'ALTER TABLE "product" RENAME COLUMN "pic" TO "image"', 'ALTER TABLE "product" RENAME COLUMN "pic" TO "image"',
'ALTER TABLE "email" RENAME COLUMN "email_id" TO "id"', 'ALTER TABLE "email" RENAME COLUMN "email_id" TO "id"',
'ALTER TABLE "email" ADD CONSTRAINT "fk_email_user_5b58673d" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE', 'ALTER TABLE "email" ADD CONSTRAINT "fk_email_user_5b58673d" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE',