Merge remote-tracking branch 'origin/dev' into dev
# Conflicts: # aerich/utils.py # tests/test_utils.py
This commit is contained in:
commit
d777c9c278
@ -99,11 +99,8 @@ class Command:
|
||||
return ret
|
||||
|
||||
async def history(self):
|
||||
ret = []
|
||||
versions = Migrate.get_all_version_files()
|
||||
for version in versions:
|
||||
ret.append(version)
|
||||
return ret
|
||||
return [version for version in versions]
|
||||
|
||||
async def inspectdb(self, tables: List[str] = None) -> str:
|
||||
connection = get_app_connection(self.tortoise_config, self.app)
|
||||
|
@ -54,10 +54,10 @@ async def cli(ctx: Context, config, app):
|
||||
|
||||
invoked_subcommand = ctx.invoked_subcommand
|
||||
if invoked_subcommand != "init":
|
||||
if not Path(config).exists():
|
||||
config_path = Path(config)
|
||||
if not config_path.exists():
|
||||
raise UsageError("You must exec init first", ctx=ctx)
|
||||
with open(config, "r") as f:
|
||||
content = f.read()
|
||||
content = config_path.read_text()
|
||||
doc = tomlkit.parse(content)
|
||||
try:
|
||||
tool = doc["tool"]["aerich"]
|
||||
@ -192,9 +192,9 @@ async def init(ctx: Context, tortoise_orm, location, src_folder):
|
||||
# check that we can find the configuration, if not we can fail before the config file gets created
|
||||
add_src_path(src_folder)
|
||||
get_tortoise_config(ctx, tortoise_orm)
|
||||
if Path(config_file).exists():
|
||||
with open(config_file, "r") as f:
|
||||
content = f.read()
|
||||
config_path = Path(config_file)
|
||||
if config_path.exists():
|
||||
content = config_path.read_text()
|
||||
doc = tomlkit.parse(content)
|
||||
else:
|
||||
doc = tomlkit.parse("[tool.aerich]")
|
||||
@ -204,8 +204,7 @@ async def init(ctx: Context, tortoise_orm, location, src_folder):
|
||||
table["src_folder"] = src_folder
|
||||
doc["tool"]["aerich"] = table
|
||||
|
||||
with open(config_file, "w") as f:
|
||||
f.write(tomlkit.dumps(doc))
|
||||
config_path.write_text(tomlkit.dumps(doc))
|
||||
|
||||
Path(location).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
@ -23,7 +23,12 @@ class BaseDDL:
|
||||
_DROP_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" DROP INDEX "{index_name}"'
|
||||
_ADD_FK_TEMPLATE = 'ALTER TABLE "{table_name}" ADD CONSTRAINT "{fk_name}" FOREIGN KEY ("{db_column}") REFERENCES "{table}" ("{field}") ON DELETE {on_delete}'
|
||||
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP FOREIGN KEY "{fk_name}"'
|
||||
_M2M_TABLE_TEMPLATE = 'CREATE TABLE "{table_name}" ("{backward_key}" {backward_type} NOT NULL REFERENCES "{backward_table}" ("{backward_field}") ON DELETE CASCADE,"{forward_key}" {forward_type} NOT NULL REFERENCES "{forward_table}" ("{forward_field}") ON DELETE {on_delete}){extra}{comment}'
|
||||
_M2M_TABLE_TEMPLATE = (
|
||||
'CREATE TABLE "{table_name}" (\n'
|
||||
' "{backward_key}" {backward_type} NOT NULL REFERENCES "{backward_table}" ("{backward_field}") ON DELETE CASCADE,\n'
|
||||
' "{forward_key}" {forward_type} NOT NULL REFERENCES "{forward_table}" ("{forward_field}") ON DELETE {on_delete}\n'
|
||||
"){extra}{comment}"
|
||||
)
|
||||
_MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" MODIFY COLUMN {column}'
|
||||
_CHANGE_COLUMN_TEMPLATE = (
|
||||
'ALTER TABLE "{table_name}" CHANGE {old_column_name} {new_column_name} {new_column_type}'
|
||||
@ -180,7 +185,7 @@ class BaseDDL:
|
||||
"idx" if not unique else "uid", model, field_names
|
||||
),
|
||||
table_name=model._meta.db_table,
|
||||
column_names=", ".join([self.schema_generator.quote(f) for f in field_names]),
|
||||
column_names=", ".join(self.schema_generator.quote(f) for f in field_names),
|
||||
)
|
||||
|
||||
def drop_index(self, model: "Type[Model]", field_names: List[str], unique=False):
|
||||
|
@ -22,6 +22,11 @@ class MysqlDDL(BaseDDL):
|
||||
_DROP_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` DROP INDEX `{index_name}`"
|
||||
_ADD_FK_TEMPLATE = "ALTER TABLE `{table_name}` ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}"
|
||||
_DROP_FK_TEMPLATE = "ALTER TABLE `{table_name}` DROP FOREIGN KEY `{fk_name}`"
|
||||
_M2M_TABLE_TEMPLATE = "CREATE TABLE `{table_name}` (`{backward_key}` {backward_type} NOT NULL REFERENCES `{backward_table}` (`{backward_field}`) ON DELETE CASCADE,`{forward_key}` {forward_type} NOT NULL REFERENCES `{forward_table}` (`{forward_field}`) ON DELETE CASCADE){extra}{comment}"
|
||||
_M2M_TABLE_TEMPLATE = (
|
||||
"CREATE TABLE `{table_name}` (\n"
|
||||
" `{backward_key}` {backward_type} NOT NULL REFERENCES `{backward_table}` (`{backward_field}`) ON DELETE CASCADE,\n"
|
||||
" `{forward_key}` {forward_type} NOT NULL REFERENCES `{forward_table}` (`{forward_field}`) ON DELETE CASCADE\n"
|
||||
"){extra}{comment}"
|
||||
)
|
||||
_MODIFY_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` MODIFY COLUMN {column}"
|
||||
_RENAME_TABLE_TEMPLATE = "ALTER TABLE `{old_table_name}` RENAME TO `{new_table_name}`"
|
||||
|
@ -72,18 +72,16 @@ def test_modify_column():
|
||||
ret1 = Migrate.ddl.modify_column(User, User._meta.fields_map.get("is_active").describe(False))
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret0 == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)"
|
||||
assert (
|
||||
ret1
|
||||
== "ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1"
|
||||
)
|
||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert (
|
||||
ret0
|
||||
== 'ALTER TABLE "category" ALTER COLUMN "name" TYPE VARCHAR(200) USING "name"::VARCHAR(200)'
|
||||
)
|
||||
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert (
|
||||
ret1
|
||||
== "ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1"
|
||||
)
|
||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert (
|
||||
ret1 == 'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL'
|
||||
)
|
||||
|
@ -802,8 +802,7 @@ def test_migrate(mocker: MockerFixture):
|
||||
Migrate.diff_models(models_describe, old_models_describe, False)
|
||||
Migrate._merge_operators()
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
expected_upgrade_operators = set(
|
||||
[
|
||||
expected_upgrade_operators = {
|
||||
"ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)",
|
||||
"ALTER TABLE `category` MODIFY COLUMN `slug` VARCHAR(100) NOT NULL",
|
||||
"ALTER TABLE `config` ADD `user_id` INT NOT NULL COMMENT 'User'",
|
||||
@ -812,7 +811,6 @@ def test_migrate(mocker: MockerFixture):
|
||||
"ALTER TABLE `config` MODIFY COLUMN `value` JSON NOT NULL",
|
||||
"ALTER TABLE `email` ADD `address` VARCHAR(200) NOT NULL",
|
||||
"ALTER TABLE `email` DROP COLUMN `user_id`",
|
||||
"CREATE TABLE `email_user` (`email_id` INT NOT NULL REFERENCES `email` (`email_id`) ON DELETE CASCADE,`user_id` INT NOT NULL REFERENCES `user` (`id`) ON DELETE CASCADE) CHARACTER SET utf8mb4",
|
||||
"ALTER TABLE `configs` RENAME TO `config`",
|
||||
"ALTER TABLE `product` RENAME COLUMN `image` TO `pic`",
|
||||
"ALTER TABLE `email` RENAME COLUMN `id` TO `email_id`",
|
||||
@ -830,14 +828,13 @@ def test_migrate(mocker: MockerFixture):
|
||||
"ALTER TABLE `user` MODIFY COLUMN `is_superuser` BOOL NOT NULL COMMENT 'Is SuperUser' DEFAULT 0",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `longitude` DECIMAL(10,8) NOT NULL",
|
||||
"ALTER TABLE `user` ADD UNIQUE INDEX `uid_user_usernam_9987ab` (`username`)",
|
||||
"CREATE TABLE `email_user` (\n `email_id` INT NOT NULL REFERENCES `email` (`email_id`) ON DELETE CASCADE,\n `user_id` INT NOT NULL REFERENCES `user` (`id`) ON DELETE CASCADE\n) CHARACTER SET utf8mb4",
|
||||
"CREATE TABLE IF NOT EXISTS `newmodel` (\n `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,\n `name` VARCHAR(50) NOT NULL\n) CHARACTER SET utf8mb4;",
|
||||
"ALTER TABLE `category` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
|
||||
"ALTER TABLE `product` MODIFY COLUMN `body` LONGTEXT NOT NULL",
|
||||
"ALTER TABLE `email` MODIFY COLUMN `is_primary` BOOL NOT NULL DEFAULT 0",
|
||||
]
|
||||
)
|
||||
expected_downgrade_operators = set(
|
||||
[
|
||||
}
|
||||
expected_downgrade_operators = {
|
||||
"ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200) NOT NULL",
|
||||
"ALTER TABLE `category` MODIFY COLUMN `slug` VARCHAR(200) NOT NULL",
|
||||
"ALTER TABLE `config` DROP COLUMN `user_id`",
|
||||
@ -868,8 +865,7 @@ def test_migrate(mocker: MockerFixture):
|
||||
"ALTER TABLE `user` MODIFY COLUMN `longitude` DECIMAL(12,9) NOT NULL",
|
||||
"ALTER TABLE `product` MODIFY COLUMN `body` LONGTEXT NOT NULL",
|
||||
"ALTER TABLE `email` MODIFY COLUMN `is_primary` BOOL NOT NULL DEFAULT 0",
|
||||
]
|
||||
)
|
||||
}
|
||||
assert not set(Migrate.upgrade_operators).symmetric_difference(expected_upgrade_operators)
|
||||
|
||||
assert not set(Migrate.downgrade_operators).symmetric_difference(
|
||||
@ -877,8 +873,7 @@ def test_migrate(mocker: MockerFixture):
|
||||
)
|
||||
|
||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||
expected_upgrade_operators = set(
|
||||
[
|
||||
expected_upgrade_operators = {
|
||||
'ALTER TABLE "category" ALTER COLUMN "name" DROP NOT NULL',
|
||||
'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(100) USING "slug"::VARCHAR(100)',
|
||||
'ALTER TABLE "category" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
|
||||
@ -905,14 +900,12 @@ def test_migrate(mocker: MockerFixture):
|
||||
'ALTER TABLE "user" ALTER COLUMN "longitude" TYPE DECIMAL(10,8) USING "longitude"::DECIMAL(10,8)',
|
||||
'CREATE INDEX "idx_product_name_869427" ON "product" ("name", "type_db_alias")',
|
||||
'CREATE INDEX "idx_email_email_4a1a33" ON "email" ("email")',
|
||||
'CREATE TABLE "email_user" ("email_id" INT NOT NULL REFERENCES "email" ("email_id") ON DELETE CASCADE,"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE)',
|
||||
'CREATE TABLE "email_user" (\n "email_id" INT NOT NULL REFERENCES "email" ("email_id") ON DELETE CASCADE,\n "user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE\n)',
|
||||
'CREATE TABLE IF NOT EXISTS "newmodel" (\n "id" SERIAL NOT NULL PRIMARY KEY,\n "name" VARCHAR(50) NOT NULL\n);\nCOMMENT ON COLUMN "config"."user_id" IS \'User\';',
|
||||
'CREATE UNIQUE INDEX "uid_product_name_869427" ON "product" ("name", "type_db_alias")',
|
||||
'CREATE UNIQUE INDEX "uid_user_usernam_9987ab" ON "user" ("username")',
|
||||
]
|
||||
)
|
||||
expected_downgrade_operators = set(
|
||||
[
|
||||
}
|
||||
expected_downgrade_operators = {
|
||||
'ALTER TABLE "category" ALTER COLUMN "name" SET NOT NULL',
|
||||
'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(200) USING "slug"::VARCHAR(200)',
|
||||
'ALTER TABLE "category" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
|
||||
@ -943,8 +936,7 @@ def test_migrate(mocker: MockerFixture):
|
||||
'DROP INDEX "uid_product_name_869427"',
|
||||
'DROP TABLE IF EXISTS "email_user"',
|
||||
'DROP TABLE IF EXISTS "newmodel"',
|
||||
]
|
||||
)
|
||||
}
|
||||
assert not set(Migrate.upgrade_operators).symmetric_difference(expected_upgrade_operators)
|
||||
assert not set(Migrate.downgrade_operators).symmetric_difference(
|
||||
expected_downgrade_operators
|
||||
|
Loading…
x
Reference in New Issue
Block a user