feat: support --fake for aerich upgrade (#398)
* feat: support `--fake` for aerich upgrade * Add `--fake` to downgrade * tests: check --fake result for aerich upgrade and downgrade * Update readme * Fix unittest failed because of `db_field_types` changed * refactor: improve type hints and document
This commit is contained in:
46
tests/_utils.py
Normal file
46
tests/_utils.py
Normal file
@@ -0,0 +1,46 @@
|
||||
import contextlib
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from tortoise import Tortoise, generate_schema_for_client
|
||||
from tortoise.exceptions import DBConnectionError, OperationalError
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
from contextlib import chdir
|
||||
else:
|
||||
|
||||
class chdir(contextlib.AbstractContextManager): # Copied from source code of Python3.13
|
||||
"""Non thread-safe context manager to change the current working directory."""
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
self._old_cwd = []
|
||||
|
||||
def __enter__(self):
|
||||
self._old_cwd.append(os.getcwd())
|
||||
os.chdir(self.path)
|
||||
|
||||
def __exit__(self, *excinfo):
|
||||
os.chdir(self._old_cwd.pop())
|
||||
|
||||
|
||||
async def drop_db(tortoise_orm) -> None:
|
||||
# Placing init outside the try-block(suppress) since it doesn't
|
||||
# establish connections to the DB eagerly.
|
||||
await Tortoise.init(config=tortoise_orm)
|
||||
with contextlib.suppress(DBConnectionError, OperationalError):
|
||||
await Tortoise._drop_databases()
|
||||
|
||||
|
||||
async def init_db(tortoise_orm, generate_schemas=True) -> None:
|
||||
await drop_db(tortoise_orm)
|
||||
await Tortoise.init(config=tortoise_orm, _create_db=True)
|
||||
if generate_schemas:
|
||||
await generate_schema_for_client(Tortoise.get_connection("default"), safe=True)
|
||||
|
||||
|
||||
def copy_files(*src_files: Path, target_dir: Path) -> None:
|
||||
for src in src_files:
|
||||
shutil.copy(src, target_dir)
|
||||
72
tests/assets/fake/_tests.py
Normal file
72
tests/assets/fake/_tests.py
Normal file
@@ -0,0 +1,72 @@
|
||||
import pytest
|
||||
from models import NewModel
|
||||
from models_second import Config
|
||||
from settings import TORTOISE_ORM
|
||||
from tortoise import Tortoise
|
||||
from tortoise.exceptions import OperationalError
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def anyio_backend() -> str:
|
||||
return "asyncio"
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def init_connections():
|
||||
await Tortoise.init(TORTOISE_ORM)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
await Tortoise.close_connections()
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_init_db():
|
||||
m1 = await NewModel.filter(name="")
|
||||
assert isinstance(m1, list)
|
||||
m2 = await Config.filter(key="")
|
||||
assert isinstance(m2, list)
|
||||
await NewModel.create(name="")
|
||||
await Config.create(key="", label="", value={})
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_fake_field_1():
|
||||
assert "field_1" in NewModel._meta.fields_map
|
||||
assert "field_1" in Config._meta.fields_map
|
||||
with pytest.raises(OperationalError):
|
||||
await NewModel.create(name="", field_1=1)
|
||||
with pytest.raises(OperationalError):
|
||||
await Config.create(key="", label="", value={}, field_1=1)
|
||||
|
||||
obj1 = NewModel(name="", field_1=1)
|
||||
with pytest.raises(OperationalError):
|
||||
await obj1.save()
|
||||
obj1 = NewModel(name="")
|
||||
with pytest.raises(OperationalError):
|
||||
await obj1.save()
|
||||
with pytest.raises(OperationalError):
|
||||
obj1 = await NewModel.first()
|
||||
obj1 = await NewModel.all().first().values("id", "name")
|
||||
assert obj1 and obj1["id"]
|
||||
|
||||
obj2 = Config(key="", label="", value={}, field_1=1)
|
||||
with pytest.raises(OperationalError):
|
||||
await obj2.save()
|
||||
obj2 = Config(key="", label="", value={})
|
||||
with pytest.raises(OperationalError):
|
||||
await obj2.save()
|
||||
with pytest.raises(OperationalError):
|
||||
obj2 = await Config.first()
|
||||
obj2 = await Config.all().first().values("id", "key")
|
||||
assert obj2 and obj2["id"]
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_fake_field_2():
|
||||
assert "field_2" in NewModel._meta.fields_map
|
||||
assert "field_2" in Config._meta.fields_map
|
||||
with pytest.raises(OperationalError):
|
||||
await NewModel.create(name="")
|
||||
with pytest.raises(OperationalError):
|
||||
await Config.create(key="", label="", value={})
|
||||
28
tests/assets/fake/db.py
Normal file
28
tests/assets/fake/db.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import asyncclick as click
|
||||
from settings import TORTOISE_ORM
|
||||
|
||||
from tests._utils import drop_db, init_db
|
||||
|
||||
|
||||
@click.group()
|
||||
def cli(): ...
|
||||
|
||||
|
||||
@cli.command()
|
||||
async def create():
|
||||
await init_db(TORTOISE_ORM, False)
|
||||
click.echo(f"Success to create databases for {TORTOISE_ORM['connections']}")
|
||||
|
||||
|
||||
@cli.command()
|
||||
async def drop():
|
||||
await drop_db(TORTOISE_ORM)
|
||||
click.echo(f"Dropped databases for {TORTOISE_ORM['connections']}")
|
||||
|
||||
|
||||
def main():
|
||||
cli()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
22
tests/assets/fake/settings.py
Normal file
22
tests/assets/fake/settings.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import os
|
||||
from datetime import date
|
||||
|
||||
from tortoise.contrib.test import MEMORY_SQLITE
|
||||
|
||||
DB_URL = (
|
||||
_u.replace("\\{\\}", f"aerich_fake_{date.today():%Y%m%d}")
|
||||
if (_u := os.getenv("TEST_DB"))
|
||||
else MEMORY_SQLITE
|
||||
)
|
||||
DB_URL_SECOND = (DB_URL + "_second") if DB_URL != MEMORY_SQLITE else MEMORY_SQLITE
|
||||
|
||||
TORTOISE_ORM = {
|
||||
"connections": {
|
||||
"default": DB_URL.replace(MEMORY_SQLITE, "sqlite://db.sqlite3"),
|
||||
"second": DB_URL_SECOND.replace(MEMORY_SQLITE, "sqlite://db_second.sqlite3"),
|
||||
},
|
||||
"apps": {
|
||||
"models": {"models": ["models", "aerich.models"], "default_connection": "default"},
|
||||
"models_second": {"models": ["models_second"], "default_connection": "second"},
|
||||
},
|
||||
}
|
||||
75
tests/assets/sqlite_migrate/_tests.py
Normal file
75
tests/assets/sqlite_migrate/_tests.py
Normal file
@@ -0,0 +1,75 @@
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
from models import Foo
|
||||
from tortoise.exceptions import IntegrityError
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_allow_duplicate() -> None:
|
||||
await Foo.all().delete()
|
||||
await Foo.create(name="foo")
|
||||
obj = await Foo.create(name="foo")
|
||||
assert (await Foo.all().count()) == 2
|
||||
await obj.delete()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_unique_is_true() -> None:
|
||||
with pytest.raises(IntegrityError):
|
||||
await Foo.create(name="foo")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_unique_field() -> None:
|
||||
if not await Foo.filter(age=0).exists():
|
||||
await Foo.create(name="0_" + uuid.uuid4().hex, age=0)
|
||||
with pytest.raises(IntegrityError):
|
||||
await Foo.create(name=uuid.uuid4().hex, age=0)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_drop_unique_field() -> None:
|
||||
name = "1_" + uuid.uuid4().hex
|
||||
await Foo.create(name=name, age=0)
|
||||
assert await Foo.filter(name=name).exists()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_with_age_field() -> None:
|
||||
name = "2_" + uuid.uuid4().hex
|
||||
await Foo.create(name=name, age=0)
|
||||
obj = await Foo.get(name=name)
|
||||
assert obj.age == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_without_age_field() -> None:
|
||||
name = "3_" + uuid.uuid4().hex
|
||||
await Foo.create(name=name, age=0)
|
||||
obj = await Foo.get(name=name)
|
||||
assert getattr(obj, "age", None) is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_m2m_with_custom_through() -> None:
|
||||
from models import FooGroup, Group
|
||||
|
||||
name = "4_" + uuid.uuid4().hex
|
||||
foo = await Foo.create(name=name)
|
||||
group = await Group.create(name=name + "1")
|
||||
await FooGroup.all().delete()
|
||||
await foo.groups.add(group)
|
||||
foo_group = await FooGroup.get(foo=foo, group=group)
|
||||
assert not foo_group.is_active
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_m2m_field_after_init_db() -> None:
|
||||
from models import Group
|
||||
|
||||
name = "5_" + uuid.uuid4().hex
|
||||
foo = await Foo.create(name=name)
|
||||
group = await Group.create(name=name + "1")
|
||||
await foo.groups.add(group)
|
||||
assert (await group.users.all().first()) == foo
|
||||
26
tests/assets/sqlite_migrate/conftest_.py
Normal file
26
tests/assets/sqlite_migrate/conftest_.py
Normal file
@@ -0,0 +1,26 @@
|
||||
import asyncio
|
||||
from typing import Generator
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
import settings
|
||||
from tortoise import Tortoise, connections
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def event_loop() -> Generator:
|
||||
policy = asyncio.get_event_loop_policy()
|
||||
res = policy.new_event_loop()
|
||||
asyncio.set_event_loop(res)
|
||||
res._close = res.close # type:ignore[attr-defined]
|
||||
res.close = lambda: None # type:ignore[method-assign]
|
||||
|
||||
yield res
|
||||
|
||||
res._close() # type:ignore[attr-defined]
|
||||
|
||||
|
||||
@pytest_asyncio.fixture(scope="session", autouse=True)
|
||||
async def api(event_loop, request):
|
||||
await Tortoise.init(config=settings.TORTOISE_ORM)
|
||||
request.addfinalizer(lambda: event_loop.run_until_complete(connections.close_all(discard=True)))
|
||||
5
tests/assets/sqlite_migrate/models.py
Normal file
5
tests/assets/sqlite_migrate/models.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from tortoise import Model, fields
|
||||
|
||||
|
||||
class Foo(Model):
|
||||
name = fields.CharField(max_length=60, db_index=False)
|
||||
4
tests/assets/sqlite_migrate/settings.py
Normal file
4
tests/assets/sqlite_migrate/settings.py
Normal file
@@ -0,0 +1,4 @@
|
||||
TORTOISE_ORM = {
|
||||
"connections": {"default": "sqlite://db.sqlite3"},
|
||||
"apps": {"models": {"models": ["models", "aerich.models"]}},
|
||||
}
|
||||
@@ -52,6 +52,7 @@ class Category(Model):
|
||||
user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField(
|
||||
"models.User", description="User"
|
||||
)
|
||||
title = fields.CharField(max_length=20, unique=True)
|
||||
created_at = fields.DatetimeField(auto_now_add=True)
|
||||
|
||||
|
||||
|
||||
149
tests/test_fake.py
Normal file
149
tests/test_fake.py
Normal file
@@ -0,0 +1,149 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from aerich.ddl.sqlite import SqliteDDL
|
||||
from aerich.migrate import Migrate
|
||||
from tests._utils import chdir, copy_files
|
||||
|
||||
|
||||
def run_shell(command: str, capture_output=True, **kw) -> str:
|
||||
r = subprocess.run(shlex.split(command), capture_output=capture_output)
|
||||
if r.returncode != 0 and r.stderr:
|
||||
return r.stderr.decode()
|
||||
if not r.stdout:
|
||||
return ""
|
||||
return r.stdout.decode()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def new_aerich_project(tmp_path: Path):
|
||||
test_dir = Path(__file__).parent
|
||||
asset_dir = test_dir / "assets" / "fake"
|
||||
settings_py = asset_dir / "settings.py"
|
||||
_tests_py = asset_dir / "_tests.py"
|
||||
db_py = asset_dir / "db.py"
|
||||
models_py = test_dir / "models.py"
|
||||
models_second_py = test_dir / "models_second.py"
|
||||
copy_files(settings_py, _tests_py, models_py, models_second_py, db_py, target_dir=tmp_path)
|
||||
dst_dir = tmp_path / "tests"
|
||||
dst_dir.mkdir()
|
||||
dst_dir.joinpath("__init__.py").touch()
|
||||
copy_files(test_dir / "_utils.py", test_dir / "indexes.py", target_dir=dst_dir)
|
||||
if should_remove := str(tmp_path) not in sys.path:
|
||||
sys.path.append(str(tmp_path))
|
||||
with chdir(tmp_path):
|
||||
run_shell("python db.py create", capture_output=False)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if not os.getenv("AERICH_DONT_DROP_FAKE_DB"):
|
||||
run_shell("python db.py drop", capture_output=False)
|
||||
if should_remove:
|
||||
sys.path.remove(str(tmp_path))
|
||||
|
||||
|
||||
def _append_field(*files: str, name="field_1") -> None:
|
||||
for file in files:
|
||||
p = Path(file)
|
||||
field = f" {name} = fields.IntField(default=0)"
|
||||
with p.open("a") as f:
|
||||
f.write(os.linesep + field)
|
||||
|
||||
|
||||
def test_fake(new_aerich_project):
|
||||
if (ddl := getattr(Migrate, "ddl", None)) and isinstance(ddl, SqliteDDL):
|
||||
# TODO: go ahead if sqlite alter-column supported
|
||||
return
|
||||
output = run_shell("aerich init -t settings.TORTOISE_ORM")
|
||||
assert "Success" in output
|
||||
output = run_shell("aerich init-db")
|
||||
assert "Success" in output
|
||||
output = run_shell("aerich --app models_second init-db")
|
||||
assert "Success" in output
|
||||
output = run_shell("pytest _tests.py::test_init_db")
|
||||
assert "error" not in output.lower()
|
||||
_append_field("models.py", "models_second.py")
|
||||
output = run_shell("aerich migrate")
|
||||
assert "Success" in output
|
||||
output = run_shell("aerich --app models_second migrate")
|
||||
assert "Success" in output
|
||||
output = run_shell("aerich upgrade --fake")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich --app models_second upgrade --fake")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("pytest _tests.py::test_fake_field_1")
|
||||
assert "error" not in output.lower()
|
||||
_append_field("models.py", "models_second.py", name="field_2")
|
||||
output = run_shell("aerich migrate")
|
||||
assert "Success" in output
|
||||
output = run_shell("aerich --app models_second migrate")
|
||||
assert "Success" in output
|
||||
output = run_shell("aerich heads")
|
||||
assert "_update.py" in output
|
||||
output = run_shell("aerich upgrade --fake")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich --app models_second upgrade --fake")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("pytest _tests.py::test_fake_field_2")
|
||||
assert "error" not in output.lower()
|
||||
output = run_shell("aerich heads")
|
||||
assert "No available heads." in output
|
||||
output = run_shell("aerich --app models_second heads")
|
||||
assert "No available heads." in output
|
||||
_append_field("models.py", "models_second.py", name="field_3")
|
||||
run_shell("aerich migrate", capture_output=False)
|
||||
run_shell("aerich --app models_second migrate", capture_output=False)
|
||||
run_shell("aerich upgrade --fake", capture_output=False)
|
||||
run_shell("aerich --app models_second upgrade --fake", capture_output=False)
|
||||
output = run_shell("aerich downgrade --fake -v 2 --yes", input="y\n")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich --app models_second downgrade --fake -v 2 --yes", input="y\n")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich heads")
|
||||
assert "No available heads." not in output
|
||||
assert not re.search(r"1_\d+_update\.py", output)
|
||||
assert re.search(r"2_\d+_update\.py", output)
|
||||
output = run_shell("aerich --app models_second heads")
|
||||
assert "No available heads." not in output
|
||||
assert not re.search(r"1_\d+_update\.py", output)
|
||||
assert re.search(r"2_\d+_update\.py", output)
|
||||
output = run_shell("aerich downgrade --fake -v 1 --yes", input="y\n")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich --app models_second downgrade --fake -v 1 --yes", input="y\n")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich heads")
|
||||
assert "No available heads." not in output
|
||||
assert re.search(r"1_\d+_update\.py", output)
|
||||
assert re.search(r"2_\d+_update\.py", output)
|
||||
output = run_shell("aerich --app models_second heads")
|
||||
assert "No available heads." not in output
|
||||
assert re.search(r"1_\d+_update\.py", output)
|
||||
assert re.search(r"2_\d+_update\.py", output)
|
||||
output = run_shell("aerich upgrade --fake")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich --app models_second upgrade --fake")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich heads")
|
||||
assert "No available heads." in output
|
||||
output = run_shell("aerich --app models_second heads")
|
||||
assert "No available heads." in output
|
||||
output = run_shell("aerich downgrade --fake -v 1 --yes", input="y\n")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich --app models_second downgrade --fake -v 1 --yes", input="y\n")
|
||||
assert "FAKED" in output
|
||||
output = run_shell("aerich heads")
|
||||
assert "No available heads." not in output
|
||||
assert re.search(r"1_\d+_update\.py", output)
|
||||
assert re.search(r"2_\d+_update\.py", output)
|
||||
output = run_shell("aerich --app models_second heads")
|
||||
assert "No available heads." not in output
|
||||
assert re.search(r"1_\d+_update\.py", output)
|
||||
assert re.search(r"2_\d+_update\.py", output)
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
@@ -13,6 +15,14 @@ from aerich.migrate import MIGRATE_TEMPLATE, Migrate
|
||||
from aerich.utils import get_models_describe
|
||||
from tests.indexes import CustomIndex
|
||||
|
||||
|
||||
def describe_index(idx: Index) -> Index | dict:
|
||||
# tortoise-orm>=0.24 changes Index desribe to be dict
|
||||
if tortoise.__version__ < "0.24":
|
||||
return idx
|
||||
return idx.describe() # type:ignore
|
||||
|
||||
|
||||
# tortoise-orm>=0.21 changes IntField constraints
|
||||
# from {"ge": 1, "le": 2147483647} to {"ge": -2147483648, "le": 2147483647}
|
||||
MIN_INT = 1 if tortoise.__version__ < "0.21" else -2147483648
|
||||
@@ -640,7 +650,10 @@ old_models_describe = {
|
||||
"description": None,
|
||||
"docstring": None,
|
||||
"unique_together": [],
|
||||
"indexes": [Index(fields=("username", "is_active")), CustomIndex(fields=("is_superuser",))],
|
||||
"indexes": [
|
||||
describe_index(Index(fields=("username", "is_active"))),
|
||||
describe_index(CustomIndex(fields=("is_superuser",))),
|
||||
],
|
||||
"pk_field": {
|
||||
"name": "id",
|
||||
"field_type": "IntField",
|
||||
@@ -958,7 +971,6 @@ def test_migrate(mocker: MockerFixture):
|
||||
"ALTER TABLE `config` ADD `user_id` INT NOT NULL COMMENT 'User'",
|
||||
"ALTER TABLE `config` ADD CONSTRAINT `fk_config_user_17daa970` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE",
|
||||
"ALTER TABLE `config` ALTER COLUMN `status` DROP DEFAULT",
|
||||
"ALTER TABLE `config` MODIFY COLUMN `value` JSON NOT NULL",
|
||||
"ALTER TABLE `email` ADD `address` VARCHAR(200) NOT NULL",
|
||||
"ALTER TABLE `email` ADD CONSTRAINT `fk_email_config_76a9dc71` FOREIGN KEY (`config_id`) REFERENCES `config` (`id`) ON DELETE CASCADE",
|
||||
"ALTER TABLE `email` ADD `config_id` INT NOT NULL UNIQUE",
|
||||
@@ -971,23 +983,24 @@ def test_migrate(mocker: MockerFixture):
|
||||
"ALTER TABLE `email` ADD INDEX `idx_email_email_4a1a33` (`email`)",
|
||||
"ALTER TABLE `product` ADD UNIQUE INDEX `uid_product_name_869427` (`name`, `type_db_alias`)",
|
||||
"ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0",
|
||||
"ALTER TABLE `product` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
|
||||
"ALTER TABLE `product` RENAME COLUMN `is_delete` TO `is_deleted`",
|
||||
"ALTER TABLE `product` RENAME COLUMN `is_review` TO `is_reviewed`",
|
||||
"ALTER TABLE `user` DROP COLUMN `avatar`",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(100) NOT NULL",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `intro` LONGTEXT NOT NULL",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `last_login` DATETIME(6) NOT NULL COMMENT 'Last Login'",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `longitude` DECIMAL(10,8) NOT NULL",
|
||||
"ALTER TABLE `user` ADD UNIQUE INDEX `username` (`username`)",
|
||||
"CREATE TABLE `email_user` (\n `email_id` INT NOT NULL REFERENCES `email` (`email_id`) ON DELETE CASCADE,\n `user_id` INT NOT NULL REFERENCES `user` (`id`) ON DELETE CASCADE\n) CHARACTER SET utf8mb4",
|
||||
"CREATE TABLE IF NOT EXISTS `newmodel` (\n `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,\n `name` VARCHAR(50) NOT NULL\n) CHARACTER SET utf8mb4",
|
||||
"ALTER TABLE `category` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
|
||||
"ALTER TABLE `product` MODIFY COLUMN `body` LONGTEXT NOT NULL",
|
||||
"CREATE TABLE `product_user` (\n `product_id` INT NOT NULL REFERENCES `product` (`id`) ON DELETE CASCADE,\n `user_id` INT NOT NULL REFERENCES `user` (`id`) ON DELETE CASCADE\n) CHARACTER SET utf8mb4",
|
||||
"CREATE TABLE `config_category_map` (\n `category_id` INT NOT NULL REFERENCES `category` (`id`) ON DELETE CASCADE,\n `config_id` INT NOT NULL REFERENCES `config` (`id`) ON DELETE CASCADE\n) CHARACTER SET utf8mb4",
|
||||
"DROP TABLE IF EXISTS `config_category`",
|
||||
}
|
||||
upgrade_operators = set(Migrate.upgrade_operators)
|
||||
upgrade_more_than_expected = upgrade_operators - expected_upgrade_operators
|
||||
assert not upgrade_more_than_expected
|
||||
upgrade_less_than_expected = expected_upgrade_operators - upgrade_operators
|
||||
assert not upgrade_less_than_expected
|
||||
|
||||
expected_downgrade_operators = {
|
||||
"ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200) NOT NULL",
|
||||
"ALTER TABLE `category` MODIFY COLUMN `slug` VARCHAR(200) NOT NULL",
|
||||
@@ -1020,28 +1033,21 @@ def test_migrate(mocker: MockerFixture):
|
||||
"DROP TABLE IF EXISTS `email_user`",
|
||||
"DROP TABLE IF EXISTS `newmodel`",
|
||||
"DROP TABLE IF EXISTS `product_user`",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `intro` LONGTEXT NOT NULL",
|
||||
"ALTER TABLE `config` MODIFY COLUMN `value` TEXT NOT NULL",
|
||||
"ALTER TABLE `category` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
|
||||
"ALTER TABLE `product` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `last_login` DATETIME(6) NOT NULL COMMENT 'Last Login'",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `longitude` DECIMAL(12,9) NOT NULL",
|
||||
"ALTER TABLE `product` MODIFY COLUMN `body` LONGTEXT NOT NULL",
|
||||
"CREATE TABLE `config_category` (\n `config_id` INT NOT NULL REFERENCES `config` (`id`) ON DELETE CASCADE,\n `category_id` INT NOT NULL REFERENCES `category` (`id`) ON DELETE CASCADE\n) CHARACTER SET utf8mb4",
|
||||
"DROP TABLE IF EXISTS `config_category_map`",
|
||||
}
|
||||
assert not set(Migrate.upgrade_operators).symmetric_difference(expected_upgrade_operators)
|
||||
|
||||
assert not set(Migrate.downgrade_operators).symmetric_difference(
|
||||
expected_downgrade_operators
|
||||
)
|
||||
downgrade_operators = set(Migrate.downgrade_operators)
|
||||
downgrade_more_than_expected = downgrade_operators - expected_downgrade_operators
|
||||
assert not downgrade_more_than_expected
|
||||
downgrade_less_than_expected = expected_downgrade_operators - downgrade_operators
|
||||
assert not downgrade_less_than_expected
|
||||
|
||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||
expected_upgrade_operators = {
|
||||
'DROP INDEX IF EXISTS "uid_category_title_f7fc03"',
|
||||
'ALTER TABLE "category" ALTER COLUMN "name" DROP NOT NULL',
|
||||
'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(100) USING "slug"::VARCHAR(100)',
|
||||
'ALTER TABLE "category" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
|
||||
'ALTER TABLE "category" RENAME COLUMN "user_id" TO "owner_id"',
|
||||
'ALTER TABLE "category" ADD CONSTRAINT "fk_category_user_110d4c63" FOREIGN KEY ("owner_id") REFERENCES "user" ("id") ON DELETE CASCADE',
|
||||
'ALTER TABLE "config" DROP COLUMN "name"',
|
||||
@@ -1049,7 +1055,6 @@ def test_migrate(mocker: MockerFixture):
|
||||
'ALTER TABLE "config" ADD "user_id" INT NOT NULL',
|
||||
'ALTER TABLE "config" ADD CONSTRAINT "fk_config_user_17daa970" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE',
|
||||
'ALTER TABLE "config" ALTER COLUMN "status" DROP DEFAULT',
|
||||
'ALTER TABLE "config" ALTER COLUMN "value" TYPE JSONB USING "value"::JSONB',
|
||||
'ALTER TABLE "configs" RENAME TO "config"',
|
||||
'ALTER TABLE "email" ADD "address" VARCHAR(200) NOT NULL',
|
||||
'ALTER TABLE "email" RENAME COLUMN "id" TO "email_id"',
|
||||
@@ -1060,14 +1065,10 @@ def test_migrate(mocker: MockerFixture):
|
||||
'ALTER TABLE "product" DROP COLUMN "uuid"',
|
||||
'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0',
|
||||
'ALTER TABLE "product" RENAME COLUMN "image" TO "pic"',
|
||||
'ALTER TABLE "product" ALTER COLUMN "body" TYPE TEXT USING "body"::TEXT',
|
||||
'ALTER TABLE "product" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
|
||||
'ALTER TABLE "product" RENAME COLUMN "is_review" TO "is_reviewed"',
|
||||
'ALTER TABLE "product" RENAME COLUMN "is_delete" TO "is_deleted"',
|
||||
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(100) USING "password"::VARCHAR(100)',
|
||||
'ALTER TABLE "user" DROP COLUMN "avatar"',
|
||||
'ALTER TABLE "user" ALTER COLUMN "last_login" TYPE TIMESTAMPTZ USING "last_login"::TIMESTAMPTZ',
|
||||
'ALTER TABLE "user" ALTER COLUMN "intro" TYPE TEXT USING "intro"::TEXT',
|
||||
'ALTER TABLE "user" ALTER COLUMN "longitude" TYPE DECIMAL(10,8) USING "longitude"::DECIMAL(10,8)',
|
||||
'CREATE INDEX "idx_product_name_869427" ON "product" ("name", "type_db_alias")',
|
||||
'CREATE INDEX "idx_email_email_4a1a33" ON "email" ("email")',
|
||||
@@ -1079,11 +1080,16 @@ def test_migrate(mocker: MockerFixture):
|
||||
'CREATE TABLE "config_category_map" (\n "category_id" INT NOT NULL REFERENCES "category" ("id") ON DELETE CASCADE,\n "config_id" INT NOT NULL REFERENCES "config" ("id") ON DELETE CASCADE\n)',
|
||||
'DROP TABLE IF EXISTS "config_category"',
|
||||
}
|
||||
upgrade_operators = set(Migrate.upgrade_operators)
|
||||
upgrade_more_than_expected = upgrade_operators - expected_upgrade_operators
|
||||
assert not upgrade_more_than_expected
|
||||
upgrade_less_than_expected = expected_upgrade_operators - upgrade_operators
|
||||
assert not upgrade_less_than_expected
|
||||
|
||||
expected_downgrade_operators = {
|
||||
'CREATE UNIQUE INDEX "uid_category_title_f7fc03" ON "category" ("title")',
|
||||
'ALTER TABLE "category" ALTER COLUMN "name" SET NOT NULL',
|
||||
'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(200) USING "slug"::VARCHAR(200)',
|
||||
'ALTER TABLE "category" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
|
||||
'ALTER TABLE "category" RENAME COLUMN "owner_id" TO "user_id"',
|
||||
'ALTER TABLE "category" DROP CONSTRAINT IF EXISTS "fk_category_user_110d4c63"',
|
||||
'ALTER TABLE "config" ADD "name" VARCHAR(100) NOT NULL UNIQUE',
|
||||
@@ -1091,7 +1097,6 @@ def test_migrate(mocker: MockerFixture):
|
||||
'ALTER TABLE "config" ALTER COLUMN "status" SET DEFAULT 1',
|
||||
'ALTER TABLE "config" DROP CONSTRAINT IF EXISTS "fk_config_user_17daa970"',
|
||||
'ALTER TABLE "config" RENAME TO "configs"',
|
||||
'ALTER TABLE "config" ALTER COLUMN "value" TYPE JSONB USING "value"::JSONB',
|
||||
'ALTER TABLE "config" DROP COLUMN "user_id"',
|
||||
'ALTER TABLE "email" ADD "user_id" INT NOT NULL',
|
||||
'ALTER TABLE "email" DROP COLUMN "address"',
|
||||
@@ -1106,11 +1111,7 @@ def test_migrate(mocker: MockerFixture):
|
||||
'ALTER TABLE "product" RENAME COLUMN "is_reviewed" TO "is_review"',
|
||||
'ALTER TABLE "user" ADD "avatar" VARCHAR(200) NOT NULL DEFAULT \'\'',
|
||||
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(200) USING "password"::VARCHAR(200)',
|
||||
'ALTER TABLE "user" ALTER COLUMN "last_login" TYPE TIMESTAMPTZ USING "last_login"::TIMESTAMPTZ',
|
||||
'ALTER TABLE "user" ALTER COLUMN "intro" TYPE TEXT USING "intro"::TEXT',
|
||||
'ALTER TABLE "user" ALTER COLUMN "longitude" TYPE DECIMAL(12,9) USING "longitude"::DECIMAL(12,9)',
|
||||
'ALTER TABLE "product" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
|
||||
'ALTER TABLE "product" ALTER COLUMN "body" TYPE TEXT USING "body"::TEXT',
|
||||
'DROP TABLE IF EXISTS "product_user"',
|
||||
'DROP INDEX IF EXISTS "idx_product_name_869427"',
|
||||
'DROP INDEX IF EXISTS "idx_email_email_4a1a33"',
|
||||
@@ -1121,10 +1122,11 @@ def test_migrate(mocker: MockerFixture):
|
||||
'CREATE TABLE "config_category" (\n "config_id" INT NOT NULL REFERENCES "config" ("id") ON DELETE CASCADE,\n "category_id" INT NOT NULL REFERENCES "category" ("id") ON DELETE CASCADE\n)',
|
||||
'DROP TABLE IF EXISTS "config_category_map"',
|
||||
}
|
||||
assert not set(Migrate.upgrade_operators).symmetric_difference(expected_upgrade_operators)
|
||||
assert not set(Migrate.downgrade_operators).symmetric_difference(
|
||||
expected_downgrade_operators
|
||||
)
|
||||
downgrade_operators = set(Migrate.downgrade_operators)
|
||||
downgrade_more_than_expected = downgrade_operators - expected_downgrade_operators
|
||||
assert not downgrade_more_than_expected
|
||||
downgrade_less_than_expected = expected_downgrade_operators - downgrade_operators
|
||||
assert not downgrade_less_than_expected
|
||||
|
||||
elif isinstance(Migrate.ddl, SqliteDDL):
|
||||
assert Migrate.upgrade_operators == []
|
||||
|
||||
@@ -3,161 +3,16 @@ import os
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from aerich.ddl.sqlite import SqliteDDL
|
||||
from aerich.migrate import Migrate
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
from contextlib import chdir
|
||||
else:
|
||||
|
||||
class chdir(contextlib.AbstractContextManager): # Copied from source code of Python3.13
|
||||
"""Non thread-safe context manager to change the current working directory."""
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
self._old_cwd = []
|
||||
|
||||
def __enter__(self):
|
||||
self._old_cwd.append(os.getcwd())
|
||||
os.chdir(self.path)
|
||||
|
||||
def __exit__(self, *excinfo):
|
||||
os.chdir(self._old_cwd.pop())
|
||||
|
||||
|
||||
MODELS = """from __future__ import annotations
|
||||
|
||||
from tortoise import Model, fields
|
||||
|
||||
|
||||
class Foo(Model):
|
||||
name = fields.CharField(max_length=60, db_index=False)
|
||||
"""
|
||||
|
||||
SETTINGS = """from __future__ import annotations
|
||||
|
||||
TORTOISE_ORM = {
|
||||
"connections": {"default": "sqlite://db.sqlite3"},
|
||||
"apps": {"models": {"models": ["models", "aerich.models"]}},
|
||||
}
|
||||
"""
|
||||
|
||||
CONFTEST = """from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Generator
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
from tortoise import Tortoise, connections
|
||||
|
||||
import settings
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def event_loop() -> Generator:
|
||||
policy = asyncio.get_event_loop_policy()
|
||||
res = policy.new_event_loop()
|
||||
asyncio.set_event_loop(res)
|
||||
res._close = res.close # type:ignore[attr-defined]
|
||||
res.close = lambda: None # type:ignore[method-assign]
|
||||
|
||||
yield res
|
||||
|
||||
res._close() # type:ignore[attr-defined]
|
||||
|
||||
|
||||
@pytest_asyncio.fixture(scope="session", autouse=True)
|
||||
async def api(event_loop, request):
|
||||
await Tortoise.init(config=settings.TORTOISE_ORM)
|
||||
request.addfinalizer(lambda: event_loop.run_until_complete(connections.close_all(discard=True)))
|
||||
"""
|
||||
|
||||
TESTS = """from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
from tortoise.exceptions import IntegrityError
|
||||
|
||||
from models import Foo
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_allow_duplicate() -> None:
|
||||
await Foo.all().delete()
|
||||
await Foo.create(name="foo")
|
||||
obj = await Foo.create(name="foo")
|
||||
assert (await Foo.all().count()) == 2
|
||||
await obj.delete()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_unique_is_true() -> None:
|
||||
with pytest.raises(IntegrityError):
|
||||
await Foo.create(name="foo")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_unique_field() -> None:
|
||||
if not await Foo.filter(age=0).exists():
|
||||
await Foo.create(name="0_"+uuid.uuid4().hex, age=0)
|
||||
with pytest.raises(IntegrityError):
|
||||
await Foo.create(name=uuid.uuid4().hex, age=0)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_drop_unique_field() -> None:
|
||||
name = "1_" + uuid.uuid4().hex
|
||||
await Foo.create(name=name, age=0)
|
||||
assert (await Foo.filter(name=name).exists())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_with_age_field() -> None:
|
||||
name = "2_" + uuid.uuid4().hex
|
||||
await Foo.create(name=name, age=0)
|
||||
obj = await Foo.get(name=name)
|
||||
assert obj.age == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_without_age_field() -> None:
|
||||
name = "3_" + uuid.uuid4().hex
|
||||
await Foo.create(name=name, age=0)
|
||||
obj = await Foo.get(name=name)
|
||||
assert getattr(obj, "age", None) is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_m2m_with_custom_through() -> None:
|
||||
from models import Group, FooGroup
|
||||
name = "4_" + uuid.uuid4().hex
|
||||
foo = await Foo.create(name=name)
|
||||
group = await Group.create(name=name+"1")
|
||||
await FooGroup.all().delete()
|
||||
await foo.groups.add(group)
|
||||
foo_group = await FooGroup.get(foo=foo, group=group)
|
||||
assert not foo_group.is_active
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_m2m_field_after_init_db() -> None:
|
||||
from models import Group
|
||||
name = "5_" + uuid.uuid4().hex
|
||||
foo = await Foo.create(name=name)
|
||||
group = await Group.create(name=name+"1")
|
||||
await foo.groups.add(group)
|
||||
assert (await group.users.all().first()) == foo
|
||||
"""
|
||||
from tests._utils import chdir, copy_files
|
||||
|
||||
|
||||
def run_aerich(cmd: str) -> None:
|
||||
with contextlib.suppress(subprocess.TimeoutExpired):
|
||||
if not cmd.startswith("aerich"):
|
||||
if not cmd.startswith("aerich") and not cmd.startswith("poetry"):
|
||||
cmd = "aerich " + cmd
|
||||
subprocess.run(shlex.split(cmd), timeout=2)
|
||||
|
||||
@@ -170,60 +25,60 @@ def run_shell(cmd: str) -> subprocess.CompletedProcess:
|
||||
def test_sqlite_migrate(tmp_path: Path) -> None:
|
||||
if (ddl := getattr(Migrate, "ddl", None)) and not isinstance(ddl, SqliteDDL):
|
||||
return
|
||||
test_dir = Path(__file__).parent
|
||||
asset_dir = test_dir / "assets" / "sqlite_migrate"
|
||||
with chdir(tmp_path):
|
||||
models_py = Path("models.py")
|
||||
settings_py = Path("settings.py")
|
||||
test_py = Path("_test.py")
|
||||
models_py.write_text(MODELS)
|
||||
settings_py.write_text(SETTINGS)
|
||||
test_py.write_text(TESTS)
|
||||
Path("conftest.py").write_text(CONFTEST)
|
||||
files = ("models.py", "settings.py", "_tests.py")
|
||||
copy_files(*(asset_dir / f for f in files), target_dir=Path())
|
||||
models_py, settings_py, test_py = (Path(f) for f in files)
|
||||
copy_files(asset_dir / "conftest_.py", target_dir=Path("conftest.py"))
|
||||
if (db_file := Path("db.sqlite3")).exists():
|
||||
db_file.unlink()
|
||||
MODELS = models_py.read_text("utf-8")
|
||||
run_aerich("aerich init -t settings.TORTOISE_ORM")
|
||||
config_file = Path("pyproject.toml")
|
||||
modify_time = config_file.stat().st_mtime
|
||||
run_aerich("aerich init-db")
|
||||
run_aerich("aerich init -t settings.TORTOISE_ORM")
|
||||
assert modify_time == config_file.stat().st_mtime
|
||||
r = run_shell("pytest _test.py::test_allow_duplicate")
|
||||
r = run_shell("pytest _tests.py::test_allow_duplicate")
|
||||
assert r.returncode == 0
|
||||
# Add index
|
||||
models_py.write_text(MODELS.replace("index=False", "index=True"))
|
||||
run_aerich("aerich migrate") # migrations/models/1_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest -s _test.py::test_allow_duplicate")
|
||||
r = run_shell("pytest -s _tests.py::test_allow_duplicate")
|
||||
assert r.returncode == 0
|
||||
# Drop index
|
||||
models_py.write_text(MODELS)
|
||||
run_aerich("aerich migrate") # migrations/models/2_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest -s _test.py::test_allow_duplicate")
|
||||
r = run_shell("pytest -s _tests.py::test_allow_duplicate")
|
||||
assert r.returncode == 0
|
||||
# Add unique index
|
||||
models_py.write_text(MODELS.replace("index=False", "index=True, unique=True"))
|
||||
run_aerich("aerich migrate") # migrations/models/3_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest _test.py::test_unique_is_true")
|
||||
r = run_shell("pytest _tests.py::test_unique_is_true")
|
||||
assert r.returncode == 0
|
||||
# Drop unique index
|
||||
models_py.write_text(MODELS)
|
||||
run_aerich("aerich migrate") # migrations/models/4_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest _test.py::test_allow_duplicate")
|
||||
r = run_shell("pytest _tests.py::test_allow_duplicate")
|
||||
assert r.returncode == 0
|
||||
# Add field with unique=True
|
||||
with models_py.open("a") as f:
|
||||
f.write(" age = fields.IntField(unique=True, default=0)")
|
||||
run_aerich("aerich migrate") # migrations/models/5_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest _test.py::test_add_unique_field")
|
||||
r = run_shell("pytest _tests.py::test_add_unique_field")
|
||||
assert r.returncode == 0
|
||||
# Drop unique field
|
||||
models_py.write_text(MODELS)
|
||||
run_aerich("aerich migrate") # migrations/models/6_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest -s _test.py::test_drop_unique_field")
|
||||
r = run_shell("pytest -s _tests.py::test_drop_unique_field")
|
||||
assert r.returncode == 0
|
||||
|
||||
# Initial with indexed field and then drop it
|
||||
@@ -235,14 +90,14 @@ def test_sqlite_migrate(tmp_path: Path) -> None:
|
||||
run_aerich("aerich init-db")
|
||||
migration_file = list(migrations_dir.glob("0_*.py"))[0]
|
||||
assert "CREATE INDEX" in migration_file.read_text()
|
||||
r = run_shell("pytest _test.py::test_with_age_field")
|
||||
r = run_shell("pytest _tests.py::test_with_age_field")
|
||||
assert r.returncode == 0
|
||||
models_py.write_text(MODELS)
|
||||
run_aerich("aerich migrate")
|
||||
run_aerich("aerich upgrade")
|
||||
migration_file_1 = list(migrations_dir.glob("1_*.py"))[0]
|
||||
assert "DROP INDEX" in migration_file_1.read_text()
|
||||
r = run_shell("pytest _test.py::test_without_age_field")
|
||||
r = run_shell("pytest _tests.py::test_without_age_field")
|
||||
assert r.returncode == 0
|
||||
|
||||
# Generate migration file in emptry directory
|
||||
@@ -283,7 +138,7 @@ class FooGroup(Model):
|
||||
run_aerich("aerich upgrade")
|
||||
migration_file_1 = list(migrations_dir.glob("1_*.py"))[0]
|
||||
assert "foo_group" in migration_file_1.read_text()
|
||||
r = run_shell("pytest _test.py::test_m2m_with_custom_through")
|
||||
r = run_shell("pytest _tests.py::test_m2m_with_custom_through")
|
||||
assert r.returncode == 0
|
||||
|
||||
# add m2m field after init-db
|
||||
@@ -304,5 +159,5 @@ class Group(Model):
|
||||
run_aerich("aerich upgrade")
|
||||
migration_file_1 = list(migrations_dir.glob("1_*.py"))[0]
|
||||
assert "foo_group" in migration_file_1.read_text()
|
||||
r = run_shell("pytest _test.py::test_add_m2m_field_after_init_db")
|
||||
r = run_shell("pytest _tests.py::test_add_m2m_field_after_init_db")
|
||||
assert r.returncode == 0
|
||||
|
||||
Reference in New Issue
Block a user