Compare commits
23 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
d6a51bd20e | ||
|
c1dea4e846 | ||
|
5e8a7c7e91 | ||
|
7d22518c74 | ||
|
f93faa8afb | ||
|
1acb9ed1e7 | ||
|
69ce0cafa1 | ||
|
4fc7f324d4 | ||
|
d8addadb37 | ||
|
0780919ef3 | ||
|
5af8c9cd56 | ||
|
56da0e7e3c | ||
|
6270c4781e | ||
|
12d0a5dad1 | ||
|
56eff1b22f | ||
|
e4a3863f80 | ||
|
5572876714 | ||
|
3d840395f1 | ||
|
accceef24f | ||
|
9c81bc6036 | ||
|
c2ebe9b5e4 | ||
|
8cefe68c9b | ||
|
44025823ee |
23
.github/workflows/ci.yml
vendored
23
.github/workflows/ci.yml
vendored
@@ -20,7 +20,12 @@ jobs:
|
||||
options: --health-cmd=pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
|
||||
tortoise-orm:
|
||||
- tortoise021
|
||||
- tortoise022
|
||||
- tortoise023
|
||||
- tortoisedev
|
||||
steps:
|
||||
- name: Start MySQL
|
||||
run: sudo systemctl start mysql.service
|
||||
@@ -38,6 +43,20 @@ jobs:
|
||||
run: |
|
||||
pip install -U pip poetry
|
||||
poetry config virtualenvs.create false
|
||||
- name: Install dependencies and check style
|
||||
run: make check
|
||||
- name: Install TortoiseORM v0.21
|
||||
if: matrix.tortoise-orm == 'tortoise021'
|
||||
run: poetry run pip install --upgrade "tortoise-orm>=0.21,<0.22"
|
||||
- name: Install TortoiseORM v0.22
|
||||
if: matrix.tortoise-orm == 'tortoise022'
|
||||
run: poetry run pip install --upgrade "tortoise-orm>=0.22,<0.23"
|
||||
- name: Install TortoiseORM v0.23
|
||||
if: matrix.tortoise-orm == 'tortoise023'
|
||||
run: poetry run pip install --upgrade "tortoise-orm>=0.23,<0.24"
|
||||
- name: Install TortoiseORM develop branch
|
||||
if: matrix.tortoise-orm == 'tortoisedev'
|
||||
run: poetry run pip install --upgrade "git+https://github.com/tortoise/tortoise-orm"
|
||||
- name: CI
|
||||
env:
|
||||
MYSQL_PASS: root
|
||||
@@ -46,4 +65,4 @@ jobs:
|
||||
POSTGRES_PASS: 123456
|
||||
POSTGRES_HOST: 127.0.0.1
|
||||
POSTGRES_PORT: 5432
|
||||
run: make ci
|
||||
run: make _testall
|
||||
|
42
CHANGELOG.md
42
CHANGELOG.md
@@ -2,6 +2,47 @@
|
||||
|
||||
## 0.8
|
||||
|
||||
### [0.8.1](../../releases/tag/v0.8.1) - 2024-12-27
|
||||
|
||||
#### Fixed
|
||||
- fix: add o2o field does not create constraint when migrating. ([#396])
|
||||
- Migration with duplicate renaming of columns in some cases. ([#395])
|
||||
- fix: intermediate table for m2m relation not created. ([#394])
|
||||
- Migrate add m2m field with custom through generate duplicated table. ([#393])
|
||||
- Migrate drop the wrong m2m field when model have multi m2m fields. ([#376])
|
||||
- KeyError raised when removing or renaming an existing model. ([#386])
|
||||
- fix: error when there is `__init__.py` in the migration folder. ([#272])
|
||||
- Setting null=false on m2m field causes migration to fail. ([#334])
|
||||
- Fix NonExistentKey when running `aerich init` without `[tool]` section in config file. ([#284])
|
||||
- Fix configuration file reading error when containing Chinese characters. ([#286])
|
||||
- sqlite: failed to create/drop index. ([#302])
|
||||
- PostgreSQL: Cannot drop constraint after deleting or rename FK on a model. ([#378])
|
||||
- Fix create/drop indexes in every migration. ([#377])
|
||||
- Sort m2m fields before comparing them with diff. ([#271])
|
||||
|
||||
#### Changed
|
||||
- Allow run `aerich init-db` with empty migration directories instead of abort with warnings. ([#286])
|
||||
- Add version constraint(>=0.21) for tortoise-orm. ([#388])
|
||||
- Move `tomlkit` to optional and support `pip install aerich[toml]`. ([#392])
|
||||
|
||||
[#396]: https://github.com/tortoise/aerich/pull/396
|
||||
[#395]: https://github.com/tortoise/aerich/pull/395
|
||||
[#394]: https://github.com/tortoise/aerich/pull/394
|
||||
[#393]: https://github.com/tortoise/aerich/pull/393
|
||||
[#376]: https://github.com/tortoise/aerich/pull/376
|
||||
[#386]: https://github.com/tortoise/aerich/pull/386
|
||||
[#272]: https://github.com/tortoise/aerich/pull/272
|
||||
[#334]: https://github.com/tortoise/aerich/pull/334
|
||||
[#284]: https://github.com/tortoise/aerich/pull/284
|
||||
[#286]: https://github.com/tortoise/aerich/pull/286
|
||||
[#302]: https://github.com/tortoise/aerich/pull/302
|
||||
[#378]: https://github.com/tortoise/aerich/pull/378
|
||||
[#377]: https://github.com/tortoise/aerich/pull/377
|
||||
[#271]: https://github.com/tortoise/aerich/pull/271
|
||||
[#286]: https://github.com/tortoise/aerich/pull/286
|
||||
[#388]: https://github.com/tortoise/aerich/pull/388
|
||||
[#392]: https://github.com/tortoise/aerich/pull/392
|
||||
|
||||
### [0.8.0](../../releases/tag/v0.8.0) - 2024-12-04
|
||||
|
||||
- Fix the issue of parameter concatenation when generating ORM with inspectdb (#331)
|
||||
@@ -9,6 +50,7 @@
|
||||
- Correct the click import. (#360)
|
||||
- Improve CLI help text and output. (#355)
|
||||
- Fix mysql drop unique index raises OperationalError. (#346)
|
||||
|
||||
**Upgrade note:**
|
||||
1. Use column name as unique key name for mysql
|
||||
2. Drop support for Python3.7
|
||||
|
13
Makefile
13
Makefile
@@ -6,13 +6,13 @@ MYSQL_PORT ?= 3306
|
||||
MYSQL_PASS ?= "123456"
|
||||
POSTGRES_HOST ?= "127.0.0.1"
|
||||
POSTGRES_PORT ?= 5432
|
||||
POSTGRES_PASS ?= "123456"
|
||||
POSTGRES_PASS ?= 123456
|
||||
|
||||
up:
|
||||
@poetry update
|
||||
|
||||
deps:
|
||||
@poetry install -E asyncpg -E asyncmy
|
||||
@poetry install -E asyncpg -E asyncmy -E toml
|
||||
|
||||
_style:
|
||||
@isort -src $(checkfiles)
|
||||
@@ -23,17 +23,14 @@ _check:
|
||||
@black --check $(black_opts) $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false)
|
||||
@ruff check $(checkfiles)
|
||||
@mypy $(checkfiles)
|
||||
ifneq ($(shell python -c 'import sys;is_py38=sys.version_info<(3,9);rc=int(is_py38);sys.exit(rc)'),)
|
||||
# Run bandit with Python3.9+, as the `usedforsecurity=...` parameter of `hashlib.new` is only added from Python 3.9 onwards.
|
||||
@bandit -r aerich
|
||||
endif
|
||||
check: deps _check
|
||||
|
||||
test: deps
|
||||
$(py_warn) TEST_DB=sqlite://:memory: py.test
|
||||
$(py_warn) TEST_DB=sqlite://:memory: pytest
|
||||
|
||||
test_sqlite:
|
||||
$(py_warn) TEST_DB=sqlite://:memory: py.test
|
||||
$(py_warn) TEST_DB=sqlite://:memory: pytest
|
||||
|
||||
test_mysql:
|
||||
$(py_warn) TEST_DB="mysql://root:$(MYSQL_PASS)@$(MYSQL_HOST):$(MYSQL_PORT)/test_\{\}" pytest -vv -s
|
||||
@@ -47,4 +44,4 @@ testall: deps _testall
|
||||
build: deps
|
||||
@poetry build
|
||||
|
||||
ci: check _testall
|
||||
ci: build _check _testall
|
||||
|
@@ -17,7 +17,7 @@ it\'s own migration solution.
|
||||
Just install from pypi:
|
||||
|
||||
```shell
|
||||
pip install aerich
|
||||
pip install "aerich[toml]"
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
@@ -42,7 +42,7 @@ class Command:
|
||||
async def _upgrade(self, conn, version_file) -> None:
|
||||
file_path = Path(Migrate.migrate_location, version_file)
|
||||
m = import_py_file(file_path)
|
||||
upgrade = getattr(m, "upgrade")
|
||||
upgrade = m.upgrade
|
||||
await conn.execute_script(await upgrade(conn))
|
||||
await Aerich.create(
|
||||
version=version_file,
|
||||
@@ -89,7 +89,7 @@ class Command:
|
||||
) as conn:
|
||||
file_path = Path(Migrate.migrate_location, file)
|
||||
m = import_py_file(file_path)
|
||||
downgrade = getattr(m, "downgrade")
|
||||
downgrade = m.downgrade
|
||||
downgrade_sql = await downgrade(conn)
|
||||
if not downgrade_sql.strip():
|
||||
raise DowngradeError("No downgrade items found")
|
||||
@@ -133,7 +133,12 @@ class Command:
|
||||
location = self.location
|
||||
app = self.app
|
||||
dirname = Path(location, app)
|
||||
dirname.mkdir(parents=True)
|
||||
if not dirname.exists():
|
||||
dirname.mkdir(parents=True)
|
||||
else:
|
||||
# If directory is empty, go ahead, otherwise raise FileExistsError
|
||||
for unexpected_file in dirname.glob("*"):
|
||||
raise FileExistsError(str(unexpected_file))
|
||||
|
||||
await Tortoise.init(config=self.tortoise_config)
|
||||
connection = get_app_connection(self.tortoise_config, app)
|
||||
|
@@ -1,11 +1,10 @@
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, cast
|
||||
|
||||
import asyncclick as click
|
||||
import tomlkit
|
||||
from asyncclick import Context, UsageError
|
||||
from tomlkit.exceptions import NonExistentKey
|
||||
|
||||
from aerich import Command
|
||||
from aerich.enums import Color
|
||||
@@ -13,6 +12,14 @@ from aerich.exceptions import DowngradeError
|
||||
from aerich.utils import add_src_path, get_tortoise_config
|
||||
from aerich.version import __version__
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
import tomllib
|
||||
else:
|
||||
try:
|
||||
import tomli as tomllib
|
||||
except ImportError:
|
||||
import tomlkit as tomllib # type: ignore
|
||||
|
||||
CONFIG_DEFAULT_VALUES = {
|
||||
"src_folder": ".",
|
||||
}
|
||||
@@ -40,15 +47,17 @@ async def cli(ctx: Context, config, app) -> None:
|
||||
raise UsageError(
|
||||
"You need to run `aerich init` first to create the config file.", ctx=ctx
|
||||
)
|
||||
content = config_path.read_text()
|
||||
doc: dict = tomlkit.parse(content)
|
||||
content = config_path.read_text("utf-8")
|
||||
doc: dict = tomllib.loads(content)
|
||||
try:
|
||||
tool = cast(Dict[str, str], doc["tool"]["aerich"])
|
||||
location = tool["location"]
|
||||
tortoise_orm = tool["tortoise_orm"]
|
||||
src_folder = tool.get("src_folder", CONFIG_DEFAULT_VALUES["src_folder"])
|
||||
except NonExistentKey:
|
||||
raise UsageError("You need run `aerich init` again when upgrading to aerich 0.6.0+.")
|
||||
except KeyError as e:
|
||||
raise UsageError(
|
||||
"You need run `aerich init` again when upgrading to aerich 0.6.0+."
|
||||
) from e
|
||||
add_src_path(src_folder)
|
||||
tortoise_config = get_tortoise_config(ctx, tortoise_orm)
|
||||
if not app:
|
||||
@@ -170,6 +179,10 @@ async def history(ctx: Context) -> None:
|
||||
)
|
||||
@click.pass_context
|
||||
async def init(ctx: Context, tortoise_orm, location, src_folder) -> None:
|
||||
try:
|
||||
import tomli_w as tomlkit
|
||||
except ImportError:
|
||||
import tomlkit # type: ignore
|
||||
config_file = ctx.obj["config_file"]
|
||||
|
||||
if os.path.isabs(src_folder):
|
||||
@@ -182,17 +195,16 @@ async def init(ctx: Context, tortoise_orm, location, src_folder) -> None:
|
||||
add_src_path(src_folder)
|
||||
get_tortoise_config(ctx, tortoise_orm)
|
||||
config_path = Path(config_file)
|
||||
if config_path.exists():
|
||||
content = config_path.read_text()
|
||||
else:
|
||||
content = "[tool.aerich]"
|
||||
doc: dict = tomlkit.parse(content)
|
||||
table = tomlkit.table()
|
||||
content = config_path.read_text("utf-8") if config_path.exists() else "[tool.aerich]"
|
||||
doc: dict = tomllib.loads(content)
|
||||
table: dict = getattr(tomlkit, "table", dict)()
|
||||
table["tortoise_orm"] = tortoise_orm
|
||||
table["location"] = location
|
||||
table["src_folder"] = src_folder
|
||||
doc["tool"]["aerich"] = table
|
||||
|
||||
try:
|
||||
doc["tool"]["aerich"] = table
|
||||
except KeyError:
|
||||
doc["tool"] = {"aerich": table}
|
||||
config_path.write_text(tomlkit.dumps(doc))
|
||||
|
||||
Path(location).mkdir(parents=True, exist_ok=True)
|
||||
|
@@ -3,6 +3,7 @@ from typing import Any, List, Type, cast
|
||||
|
||||
from tortoise import BaseDBAsyncClient, Model
|
||||
from tortoise.backends.base.schema_generator import BaseSchemaGenerator
|
||||
from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator
|
||||
|
||||
from aerich.utils import is_default_function
|
||||
|
||||
@@ -122,7 +123,12 @@ class BaseDDL:
|
||||
unique = ""
|
||||
template = self._MODIFY_COLUMN_TEMPLATE
|
||||
else:
|
||||
unique = "UNIQUE" if field_describe.get("unique") else ""
|
||||
# sqlite does not support alter table to add unique column
|
||||
unique = (
|
||||
"UNIQUE"
|
||||
if field_describe.get("unique") and self.DIALECT != SqliteSchemaGenerator.DIALECT
|
||||
else ""
|
||||
)
|
||||
template = self._ADD_COLUMN_TEMPLATE
|
||||
return template.format(
|
||||
table_name=db_table,
|
||||
|
@@ -10,13 +10,13 @@ class PostgresDDL(BaseDDL):
|
||||
schema_generator_cls = AsyncpgSchemaGenerator
|
||||
DIALECT = AsyncpgSchemaGenerator.DIALECT
|
||||
_ADD_INDEX_TEMPLATE = 'CREATE {unique}INDEX "{index_name}" ON "{table_name}" ({column_names})'
|
||||
_DROP_INDEX_TEMPLATE = 'DROP INDEX "{index_name}"'
|
||||
_DROP_INDEX_TEMPLATE = 'DROP INDEX IF EXISTS "{index_name}"'
|
||||
_ALTER_NULL_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {set_drop} NOT NULL'
|
||||
_MODIFY_COLUMN_TEMPLATE = (
|
||||
'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" TYPE {datatype}{using}'
|
||||
)
|
||||
_SET_COMMENT_TEMPLATE = 'COMMENT ON COLUMN "{table_name}"."{column}" IS {comment}'
|
||||
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP CONSTRAINT "{fk_name}"'
|
||||
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP CONSTRAINT IF EXISTS "{fk_name}"'
|
||||
|
||||
def alter_column_null(self, model: "Type[Model]", field_describe: dict) -> str:
|
||||
db_table = model._meta.db_table
|
||||
|
@@ -10,6 +10,8 @@ from aerich.exceptions import NotSupportError
|
||||
class SqliteDDL(BaseDDL):
|
||||
schema_generator_cls = SqliteSchemaGenerator
|
||||
DIALECT = SqliteSchemaGenerator.DIALECT
|
||||
_ADD_INDEX_TEMPLATE = 'CREATE {unique}INDEX "{index_name}" ON "{table_name}" ({column_names})'
|
||||
_DROP_INDEX_TEMPLATE = 'DROP INDEX IF EXISTS "{index_name}"'
|
||||
|
||||
def modify_column(self, model: "Type[Model]", field_object: dict, is_pk: bool = True):
|
||||
raise NotSupportError("Modify column is unsupported in SQLite.")
|
||||
|
@@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
from typing import Any, Callable, Dict, Optional, TypedDict
|
||||
|
||||
from pydantic import BaseModel
|
||||
@@ -61,8 +62,8 @@ class Column(BaseModel):
|
||||
elif self.data_type == "bool":
|
||||
default = f"default={'True' if self.default == 'true' else 'False'}, "
|
||||
elif self.data_type in ("datetime", "timestamptz", "TIMESTAMP"):
|
||||
if "CURRENT_TIMESTAMP" == self.default:
|
||||
if "DEFAULT_GENERATED on update CURRENT_TIMESTAMP" == self.extra:
|
||||
if self.default == "CURRENT_TIMESTAMP":
|
||||
if self.extra == "DEFAULT_GENERATED on update CURRENT_TIMESTAMP":
|
||||
default = "auto_now=True, "
|
||||
else:
|
||||
default = "auto_now_add=True, "
|
||||
@@ -94,10 +95,8 @@ class Inspect:
|
||||
|
||||
def __init__(self, conn: BaseDBAsyncClient, tables: list[str] | None = None) -> None:
|
||||
self.conn = conn
|
||||
try:
|
||||
with contextlib.suppress(AttributeError):
|
||||
self.database = conn.database # type:ignore[attr-defined]
|
||||
except AttributeError:
|
||||
pass
|
||||
self.tables = tables
|
||||
|
||||
@property
|
||||
|
@@ -40,16 +40,11 @@ where c.TABLE_SCHEMA = %s
|
||||
and c.TABLE_NAME = %s"""
|
||||
ret = await self.conn.execute_query_dict(sql, [self.database, table])
|
||||
for row in ret:
|
||||
non_unique = row["NON_UNIQUE"]
|
||||
if non_unique is None:
|
||||
unique = False
|
||||
else:
|
||||
unique = index = False
|
||||
if (non_unique := row["NON_UNIQUE"]) is not None:
|
||||
unique = not non_unique
|
||||
index_name = row["INDEX_NAME"]
|
||||
if index_name is None:
|
||||
index = False
|
||||
else:
|
||||
index = row["INDEX_NAME"] != "PRIMARY"
|
||||
if (index_name := row["INDEX_NAME"]) is not None:
|
||||
index = index_name != "PRIMARY"
|
||||
columns.append(
|
||||
Column(
|
||||
name=row["COLUMN_NAME"],
|
||||
|
@@ -1,4 +1,5 @@
|
||||
import hashlib
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib
|
||||
import os
|
||||
from datetime import datetime
|
||||
@@ -6,6 +7,7 @@ from pathlib import Path
|
||||
from typing import Dict, Iterable, List, Optional, Set, Tuple, Type, Union, cast
|
||||
|
||||
import asyncclick as click
|
||||
import tortoise
|
||||
from dictdiffer import diff
|
||||
from tortoise import BaseDBAsyncClient, Model, Tortoise
|
||||
from tortoise.exceptions import OperationalError
|
||||
@@ -13,7 +15,12 @@ from tortoise.indexes import Index
|
||||
|
||||
from aerich.ddl import BaseDDL
|
||||
from aerich.models import MAX_VERSION_LENGTH, Aerich
|
||||
from aerich.utils import get_app_connection, get_models_describe, is_default_function
|
||||
from aerich.utils import (
|
||||
get_app_connection,
|
||||
get_dict_diff_by_key,
|
||||
get_models_describe,
|
||||
is_default_function,
|
||||
)
|
||||
|
||||
MIGRATE_TEMPLATE = """from tortoise import BaseDBAsyncClient
|
||||
|
||||
@@ -37,8 +44,7 @@ class Migrate:
|
||||
_upgrade_m2m: List[str] = []
|
||||
_downgrade_m2m: List[str] = []
|
||||
_aerich = Aerich.__name__
|
||||
_rename_old: List[str] = []
|
||||
_rename_new: List[str] = []
|
||||
_rename_fields: Dict[str, Dict[str, str]] = {} # {'model': {'old_field': 'new_field'}}
|
||||
|
||||
ddl: BaseDDL
|
||||
ddl_class: Type[BaseDDL]
|
||||
@@ -54,14 +60,22 @@ class Migrate:
|
||||
|
||||
@classmethod
|
||||
def get_all_version_files(cls) -> List[str]:
|
||||
return sorted(
|
||||
filter(lambda x: x.endswith("py"), os.listdir(cls.migrate_location)),
|
||||
key=lambda x: int(x.split("_")[0]),
|
||||
)
|
||||
def get_file_version(file_name: str) -> str:
|
||||
return file_name.split("_")[0]
|
||||
|
||||
def is_version_file(file_name: str) -> bool:
|
||||
if not file_name.endswith("py"):
|
||||
return False
|
||||
if "_" not in file_name:
|
||||
return False
|
||||
return get_file_version(file_name).isdigit()
|
||||
|
||||
files = filter(is_version_file, os.listdir(cls.migrate_location))
|
||||
return sorted(files, key=lambda x: int(get_file_version(x)))
|
||||
|
||||
@classmethod
|
||||
def _get_model(cls, model: str) -> Type[Model]:
|
||||
return Tortoise.apps[cls.app][model]
|
||||
return Tortoise.apps[cls.app].get(model) # type: ignore
|
||||
|
||||
@classmethod
|
||||
async def get_last_version(cls) -> Optional[Aerich]:
|
||||
@@ -189,21 +203,25 @@ class Migrate:
|
||||
|
||||
@classmethod
|
||||
def _handle_indexes(cls, model: Type[Model], indexes: List[Union[Tuple[str], Index]]) -> list:
|
||||
ret: list = []
|
||||
if tortoise.__version__ > "0.22.2":
|
||||
# The min version of tortoise is '0.11.0', so we can compare it by a `>`,
|
||||
# tortoise>0.22.2 have __eq__/__hash__ with Index class since 313ee76.
|
||||
return indexes
|
||||
if index_classes := set(index.__class__ for index in indexes if isinstance(index, Index)):
|
||||
# Leave magic patch here to compare with older version of tortoise-orm
|
||||
# TODO: limit tortoise>0.22.2 in pyproject.toml and remove this function when v0.9.0 released
|
||||
for index_cls in index_classes:
|
||||
if index_cls(fields=("id",)) != index_cls(fields=("id",)):
|
||||
|
||||
def index_hash(self) -> str:
|
||||
h = hashlib.new("MD5", usedforsecurity=False) # type:ignore[call-arg]
|
||||
h.update(
|
||||
self.index_name(cls.ddl.schema_generator, model).encode()
|
||||
+ self.__class__.__name__.encode()
|
||||
)
|
||||
return h.hexdigest()
|
||||
def _hash(self) -> int:
|
||||
return hash((tuple(sorted(self.fields)), self.name, self.expressions))
|
||||
|
||||
for index in indexes:
|
||||
if isinstance(index, Index):
|
||||
index.__hash__ = index_hash # type:ignore[method-assign,assignment]
|
||||
ret.append(index)
|
||||
return ret
|
||||
def _eq(self, other) -> bool:
|
||||
return type(self) is type(other) and self.__dict__ == other.__dict__
|
||||
|
||||
setattr(index_cls, "__hash__", _hash)
|
||||
setattr(index_cls, "__eq__", _eq)
|
||||
return indexes
|
||||
|
||||
@classmethod
|
||||
def _get_indexes(cls, model, model_describe: dict) -> Set[Union[Index, Tuple[str, ...]]]:
|
||||
@@ -215,6 +233,121 @@ class Migrate:
|
||||
indexes.add(cast(Tuple[str, ...], tuple(x)))
|
||||
return indexes
|
||||
|
||||
@staticmethod
|
||||
def _validate_custom_m2m_through(field: dict) -> None:
|
||||
# TODO: Check whether field includes required fk columns
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def _handle_m2m_fields(
|
||||
cls, old_model_describe: Dict, new_model_describe: Dict, model, new_models, upgrade=True
|
||||
) -> None:
|
||||
old_m2m_fields = cast(List[dict], old_model_describe.get("m2m_fields", []))
|
||||
new_m2m_fields = cast(List[dict], new_model_describe.get("m2m_fields", []))
|
||||
new_tables: Dict[str, dict] = {field["table"]: field for field in new_models.values()}
|
||||
for action, option, change in get_dict_diff_by_key(old_m2m_fields, new_m2m_fields):
|
||||
if (option and option[-1] == "nullable") or change[0][0] == "db_constraint":
|
||||
continue
|
||||
new_value = change[0][1]
|
||||
if isinstance(new_value, str):
|
||||
for new_m2m_field in new_m2m_fields:
|
||||
if new_m2m_field["name"] == new_value:
|
||||
table = cast(str, new_m2m_field.get("through"))
|
||||
break
|
||||
else:
|
||||
table = new_value.get("through")
|
||||
if action == "add":
|
||||
add = False
|
||||
if upgrade:
|
||||
if field := new_tables.get(table):
|
||||
cls._validate_custom_m2m_through(field)
|
||||
elif table not in cls._upgrade_m2m:
|
||||
cls._upgrade_m2m.append(table)
|
||||
add = True
|
||||
else:
|
||||
if table not in cls._downgrade_m2m:
|
||||
cls._downgrade_m2m.append(table)
|
||||
add = True
|
||||
if add:
|
||||
ref_desc = cast(dict, new_models.get(new_value.get("model_name")))
|
||||
cls._add_operator(
|
||||
cls.create_m2m(model, new_value, ref_desc),
|
||||
upgrade,
|
||||
fk_m2m_index=True,
|
||||
)
|
||||
elif action == "remove":
|
||||
add = False
|
||||
if upgrade and table not in cls._upgrade_m2m:
|
||||
cls._upgrade_m2m.append(table)
|
||||
add = True
|
||||
elif not upgrade and table not in cls._downgrade_m2m:
|
||||
cls._downgrade_m2m.append(table)
|
||||
add = True
|
||||
if add:
|
||||
cls._add_operator(cls.drop_m2m(table), upgrade, True)
|
||||
|
||||
@classmethod
|
||||
def _handle_relational(
|
||||
cls,
|
||||
key: str,
|
||||
old_model_describe: Dict,
|
||||
new_model_describe: Dict,
|
||||
model: Type[Model],
|
||||
old_models: Dict,
|
||||
new_models: Dict,
|
||||
upgrade=True,
|
||||
) -> None:
|
||||
old_fk_fields = cast(List[dict], old_model_describe.get(key))
|
||||
new_fk_fields = cast(List[dict], new_model_describe.get(key))
|
||||
|
||||
old_fk_fields_name: List[str] = [i.get("name", "") for i in old_fk_fields]
|
||||
new_fk_fields_name: List[str] = [i.get("name", "") for i in new_fk_fields]
|
||||
|
||||
# add
|
||||
for new_fk_field_name in set(new_fk_fields_name).difference(set(old_fk_fields_name)):
|
||||
fk_field = cls.get_field_by_name(new_fk_field_name, new_fk_fields)
|
||||
if fk_field.get("db_constraint"):
|
||||
ref_describe = cast(dict, new_models[fk_field["python_type"]])
|
||||
sql = cls._add_fk(model, fk_field, ref_describe)
|
||||
cls._add_operator(sql, upgrade, fk_m2m_index=True)
|
||||
# drop
|
||||
for old_fk_field_name in set(old_fk_fields_name).difference(set(new_fk_fields_name)):
|
||||
old_fk_field = cls.get_field_by_name(old_fk_field_name, cast(List[dict], old_fk_fields))
|
||||
if old_fk_field.get("db_constraint"):
|
||||
ref_describe = cast(dict, old_models[old_fk_field["python_type"]])
|
||||
sql = cls._drop_fk(model, old_fk_field, ref_describe)
|
||||
cls._add_operator(sql, upgrade, fk_m2m_index=True)
|
||||
|
||||
@classmethod
|
||||
def _handle_fk_fields(
|
||||
cls,
|
||||
old_model_describe: Dict,
|
||||
new_model_describe: Dict,
|
||||
model: Type[Model],
|
||||
old_models: Dict,
|
||||
new_models: Dict,
|
||||
upgrade=True,
|
||||
) -> None:
|
||||
key = "fk_fields"
|
||||
cls._handle_relational(
|
||||
key, old_model_describe, new_model_describe, model, old_models, new_models, upgrade
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _handle_o2o_fields(
|
||||
cls,
|
||||
old_model_describe: Dict,
|
||||
new_model_describe: Dict,
|
||||
model: Type[Model],
|
||||
old_models: Dict,
|
||||
new_models: Dict,
|
||||
upgrade=True,
|
||||
) -> None:
|
||||
key = "o2o_fields"
|
||||
cls._handle_relational(
|
||||
key, old_model_describe, new_model_describe, model, old_models, new_models, upgrade
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def diff_models(
|
||||
cls, old_models: Dict[str, dict], new_models: Dict[str, dict], upgrade=True
|
||||
@@ -229,13 +362,14 @@ class Migrate:
|
||||
_aerich = f"{cls.app}.{cls._aerich}"
|
||||
old_models.pop(_aerich, None)
|
||||
new_models.pop(_aerich, None)
|
||||
models_with_rename_field: Set[str] = set() # models that trigger the click.prompt
|
||||
|
||||
for new_model_str, new_model_describe in new_models.items():
|
||||
model = cls._get_model(new_model_describe["name"].split(".")[1])
|
||||
|
||||
if new_model_str not in old_models:
|
||||
if upgrade:
|
||||
cls._add_operator(cls.add_model(model), upgrade)
|
||||
cls._handle_m2m_fields({}, new_model_describe, model, new_models, upgrade)
|
||||
else:
|
||||
# we can't find origin model when downgrade, so skip
|
||||
pass
|
||||
@@ -268,45 +402,17 @@ class Migrate:
|
||||
# current only support rename pk
|
||||
if action == "change" and option == "name":
|
||||
cls._add_operator(cls._rename_field(model, *change), upgrade)
|
||||
# fk fields
|
||||
args = (old_model_describe, new_model_describe, model, old_models, new_models)
|
||||
cls._handle_fk_fields(*args, upgrade=upgrade)
|
||||
# o2o fields
|
||||
cls._handle_o2o_fields(*args, upgrade=upgrade)
|
||||
old_o2o_columns = [i["raw_field"] for i in old_model_describe.get("o2o_fields", [])]
|
||||
new_o2o_columns = [i["raw_field"] for i in new_model_describe.get("o2o_fields", [])]
|
||||
# m2m fields
|
||||
old_m2m_fields = cast(List[dict], old_model_describe.get("m2m_fields"))
|
||||
new_m2m_fields = cast(List[dict], new_model_describe.get("m2m_fields"))
|
||||
for action, option, change in diff(old_m2m_fields, new_m2m_fields):
|
||||
if change[0][0] == "db_constraint":
|
||||
continue
|
||||
new_value = change[0][1]
|
||||
if isinstance(new_value, str):
|
||||
for new_m2m_field in new_m2m_fields:
|
||||
if new_m2m_field["name"] == new_value:
|
||||
table = cast(str, new_m2m_field.get("through"))
|
||||
break
|
||||
else:
|
||||
table = new_value.get("through")
|
||||
if action == "add":
|
||||
add = False
|
||||
if upgrade and table not in cls._upgrade_m2m:
|
||||
cls._upgrade_m2m.append(table)
|
||||
add = True
|
||||
elif not upgrade and table not in cls._downgrade_m2m:
|
||||
cls._downgrade_m2m.append(table)
|
||||
add = True
|
||||
if add:
|
||||
ref_desc = cast(dict, new_models.get(new_value.get("model_name")))
|
||||
cls._add_operator(
|
||||
cls.create_m2m(model, new_value, ref_desc),
|
||||
upgrade,
|
||||
fk_m2m_index=True,
|
||||
)
|
||||
elif action == "remove":
|
||||
add = False
|
||||
if upgrade and table not in cls._upgrade_m2m:
|
||||
cls._upgrade_m2m.append(table)
|
||||
add = True
|
||||
elif not upgrade and table not in cls._downgrade_m2m:
|
||||
cls._downgrade_m2m.append(table)
|
||||
add = True
|
||||
if add:
|
||||
cls._add_operator(cls.drop_m2m(table), upgrade, True)
|
||||
cls._handle_m2m_fields(
|
||||
old_model_describe, new_model_describe, model, new_models, upgrade
|
||||
)
|
||||
# add unique_together
|
||||
for index in new_unique_together.difference(old_unique_together):
|
||||
cls._add_operator(cls._add_index(model, index, True), upgrade, True)
|
||||
@@ -341,41 +447,63 @@ class Migrate:
|
||||
):
|
||||
new_data_field = cls.get_field_by_name(new_data_field_name, new_data_fields)
|
||||
is_rename = False
|
||||
for old_data_field in old_data_fields:
|
||||
field_type = new_data_field.get("field_type")
|
||||
db_column = new_data_field.get("db_column")
|
||||
new_name = set(new_data_field_name)
|
||||
for old_data_field in sorted(
|
||||
old_data_fields,
|
||||
key=lambda f: (
|
||||
f.get("field_type") != field_type,
|
||||
# old field whose name have more same characters with new field's
|
||||
# should be put in front of the other
|
||||
len(new_name.symmetric_difference(set(f.get("name", "")))),
|
||||
),
|
||||
):
|
||||
changes = list(diff(old_data_field, new_data_field))
|
||||
old_data_field_name = cast(str, old_data_field.get("name"))
|
||||
if len(changes) == 2:
|
||||
# rename field
|
||||
name_diff = (old_data_field_name, new_data_field_name)
|
||||
column_diff = (old_data_field.get("db_column"), db_column)
|
||||
if (
|
||||
changes[0]
|
||||
== (
|
||||
"change",
|
||||
"name",
|
||||
(old_data_field_name, new_data_field_name),
|
||||
)
|
||||
and changes[1]
|
||||
== (
|
||||
"change",
|
||||
"db_column",
|
||||
(
|
||||
old_data_field.get("db_column"),
|
||||
new_data_field.get("db_column"),
|
||||
),
|
||||
)
|
||||
changes[0] == ("change", "name", name_diff)
|
||||
and changes[1] == ("change", "db_column", column_diff)
|
||||
and old_data_field_name not in new_data_fields_name
|
||||
):
|
||||
if upgrade:
|
||||
if (
|
||||
rename_fields := cls._rename_fields.get(new_model_str)
|
||||
) and (
|
||||
old_data_field_name in rename_fields
|
||||
or new_data_field_name in rename_fields.values()
|
||||
):
|
||||
continue
|
||||
prefix = f"({new_model_str}) "
|
||||
if new_model_str not in models_with_rename_field:
|
||||
if models_with_rename_field:
|
||||
# When there are multi rename fields with different models,
|
||||
# print a empty line to warn that is another model
|
||||
prefix = "\n" + prefix
|
||||
models_with_rename_field.add(new_model_str)
|
||||
is_rename = click.prompt(
|
||||
f"Rename {old_data_field_name} to {new_data_field_name}?",
|
||||
f"{prefix}Rename {old_data_field_name} to {new_data_field_name}?",
|
||||
default=True,
|
||||
type=bool,
|
||||
show_choices=True,
|
||||
)
|
||||
if is_rename:
|
||||
if rename_fields is None:
|
||||
rename_fields = cls._rename_fields[new_model_str] = {}
|
||||
rename_fields[old_data_field_name] = new_data_field_name
|
||||
else:
|
||||
is_rename = old_data_field_name in cls._rename_new
|
||||
is_rename = False
|
||||
if rename_to := cls._rename_fields.get(new_model_str, {}).get(
|
||||
new_data_field_name
|
||||
):
|
||||
is_rename = True
|
||||
if rename_to != old_data_field_name:
|
||||
continue
|
||||
if is_rename:
|
||||
cls._rename_new.append(new_data_field_name)
|
||||
cls._rename_old.append(old_data_field_name)
|
||||
# only MySQL8+ has rename syntax
|
||||
if (
|
||||
cls.dialect == "mysql"
|
||||
@@ -394,14 +522,11 @@ class Migrate:
|
||||
upgrade,
|
||||
)
|
||||
if not is_rename:
|
||||
cls._add_operator(
|
||||
cls._add_field(
|
||||
model,
|
||||
new_data_field,
|
||||
),
|
||||
upgrade,
|
||||
)
|
||||
if new_data_field["indexed"]:
|
||||
cls._add_operator(cls._add_field(model, new_data_field), upgrade)
|
||||
if (
|
||||
new_data_field["indexed"]
|
||||
and new_data_field["db_column"] not in new_o2o_columns
|
||||
):
|
||||
cls._add_operator(
|
||||
cls._add_index(
|
||||
model, (new_data_field["db_column"],), new_data_field["unique"]
|
||||
@@ -410,12 +535,14 @@ class Migrate:
|
||||
True,
|
||||
)
|
||||
# remove fields
|
||||
rename_fields = cls._rename_fields.get(new_model_str)
|
||||
for old_data_field_name in set(old_data_fields_name).difference(
|
||||
set(new_data_fields_name)
|
||||
):
|
||||
# don't remove field if is renamed
|
||||
if (upgrade and old_data_field_name in cls._rename_old) or (
|
||||
not upgrade and old_data_field_name in cls._rename_new
|
||||
if rename_fields and (
|
||||
(upgrade and old_data_field_name in rename_fields)
|
||||
or (not upgrade and old_data_field_name in rename_fields.values())
|
||||
):
|
||||
continue
|
||||
old_data_field = cls.get_field_by_name(old_data_field_name, old_data_fields)
|
||||
@@ -424,7 +551,10 @@ class Migrate:
|
||||
cls._remove_field(model, db_column),
|
||||
upgrade,
|
||||
)
|
||||
if old_data_field["indexed"]:
|
||||
if (
|
||||
old_data_field["indexed"]
|
||||
and old_data_field["db_column"] not in old_o2o_columns
|
||||
):
|
||||
is_unique_field = old_data_field.get("unique")
|
||||
cls._add_operator(
|
||||
cls._drop_index(model, {db_column}, is_unique_field),
|
||||
@@ -432,38 +562,6 @@ class Migrate:
|
||||
True,
|
||||
)
|
||||
|
||||
old_fk_fields = cast(List[dict], old_model_describe.get("fk_fields"))
|
||||
new_fk_fields = cast(List[dict], new_model_describe.get("fk_fields"))
|
||||
|
||||
old_fk_fields_name: List[str] = [i.get("name", "") for i in old_fk_fields]
|
||||
new_fk_fields_name: List[str] = [i.get("name", "") for i in new_fk_fields]
|
||||
|
||||
# add fk
|
||||
for new_fk_field_name in set(new_fk_fields_name).difference(
|
||||
set(old_fk_fields_name)
|
||||
):
|
||||
fk_field = cls.get_field_by_name(new_fk_field_name, new_fk_fields)
|
||||
if fk_field.get("db_constraint"):
|
||||
ref_describe = cast(dict, new_models[fk_field["python_type"]])
|
||||
cls._add_operator(
|
||||
cls._add_fk(model, fk_field, ref_describe),
|
||||
upgrade,
|
||||
fk_m2m_index=True,
|
||||
)
|
||||
# drop fk
|
||||
for old_fk_field_name in set(old_fk_fields_name).difference(
|
||||
set(new_fk_fields_name)
|
||||
):
|
||||
old_fk_field = cls.get_field_by_name(
|
||||
old_fk_field_name, cast(List[dict], old_fk_fields)
|
||||
)
|
||||
if old_fk_field.get("db_constraint"):
|
||||
ref_describe = cast(dict, old_models[old_fk_field["python_type"]])
|
||||
cls._add_operator(
|
||||
cls._drop_fk(model, old_fk_field, ref_describe),
|
||||
upgrade,
|
||||
fk_m2m_index=True,
|
||||
)
|
||||
# change fields
|
||||
for field_name in set(new_data_fields_name).intersection(set(old_data_fields_name)):
|
||||
old_data_field = cls.get_field_by_name(field_name, old_data_fields)
|
||||
|
@@ -1,12 +1,15 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib.util
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from types import ModuleType
|
||||
from typing import Dict, Optional, Union
|
||||
from typing import Dict, Generator, Optional, Union
|
||||
|
||||
from asyncclick import BadOptionUsage, ClickException, Context
|
||||
from dictdiffer import diff
|
||||
from tortoise import BaseDBAsyncClient, Tortoise
|
||||
|
||||
|
||||
@@ -101,3 +104,43 @@ def import_py_file(file: Union[str, Path]) -> ModuleType:
|
||||
module = importlib.util.module_from_spec(spec) # type:ignore[arg-type]
|
||||
spec.loader.exec_module(module) # type:ignore[union-attr]
|
||||
return module
|
||||
|
||||
|
||||
def get_dict_diff_by_key(
|
||||
old_fields: list[dict], new_fields: list[dict], key="through"
|
||||
) -> Generator[tuple]:
|
||||
"""
|
||||
Compare two list by key instead of by index
|
||||
|
||||
:param old_fields: previous field info list
|
||||
:param new_fields: current field info list
|
||||
:param key: if two dicts have the same value of this key, action is change; otherwise, is remove/add
|
||||
:return: similar to dictdiffer.diff
|
||||
|
||||
Example::
|
||||
|
||||
>>> old = [{'through': 'a'}, {'through': 'b'}, {'through': 'c'}]
|
||||
>>> new = [{'through': 'a'}, {'through': 'c'}] # remove the second element
|
||||
>>> list(diff(old, new))
|
||||
[('change', [1, 'through'], ('b', 'c')),
|
||||
('remove', '', [(2, {'through': 'c'})])]
|
||||
>>> list(get_dict_diff_by_key(old, new))
|
||||
[('remove', '', [(0, {'through': 'b'})])]
|
||||
|
||||
"""
|
||||
length_old, length_new = len(old_fields), len(new_fields)
|
||||
if length_old == 0 or length_new == 0 or length_old == length_new == 1:
|
||||
yield from diff(old_fields, new_fields)
|
||||
else:
|
||||
value_index: dict[str, int] = {f[key]: i for i, f in enumerate(new_fields)}
|
||||
additions = set(range(length_new))
|
||||
for field in old_fields:
|
||||
value = field[key]
|
||||
if (index := value_index.get(value)) is not None:
|
||||
additions.remove(index)
|
||||
yield from diff([field], [new_fields[index]]) # change
|
||||
else:
|
||||
yield from diff([field], []) # remove
|
||||
if additions:
|
||||
for index in sorted(additions):
|
||||
yield from diff([], [new_fields[index]]) # add
|
||||
|
@@ -1,4 +1,5 @@
|
||||
import asyncio
|
||||
import contextlib
|
||||
import os
|
||||
from typing import Generator
|
||||
|
||||
@@ -7,6 +8,7 @@ from tortoise import Tortoise, expand_db_url, generate_schema_for_client
|
||||
from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator
|
||||
from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator
|
||||
from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator
|
||||
from tortoise.contrib.test import MEMORY_SQLITE
|
||||
from tortoise.exceptions import DBConnectionError, OperationalError
|
||||
|
||||
from aerich.ddl.mysql import MysqlDDL
|
||||
@@ -14,7 +16,6 @@ from aerich.ddl.postgres import PostgresDDL
|
||||
from aerich.ddl.sqlite import SqliteDDL
|
||||
from aerich.migrate import Migrate
|
||||
|
||||
MEMORY_SQLITE = "sqlite://:memory:"
|
||||
db_url = os.getenv("TEST_DB", MEMORY_SQLITE)
|
||||
db_url_second = os.getenv("TEST_DB_SECOND", MEMORY_SQLITE)
|
||||
tortoise_orm = {
|
||||
@@ -57,10 +58,8 @@ async def initialize_tests(event_loop, request) -> None:
|
||||
# Placing init outside the try block since it doesn't
|
||||
# establish connections to the DB eagerly.
|
||||
await Tortoise.init(config=tortoise_orm)
|
||||
try:
|
||||
with contextlib.suppress(DBConnectionError, OperationalError):
|
||||
await Tortoise._drop_databases()
|
||||
except (DBConnectionError, OperationalError):
|
||||
pass
|
||||
await Tortoise.init(config=tortoise_orm, _create_db=True)
|
||||
await generate_schema_for_client(Tortoise.get_connection("default"), safe=True)
|
||||
|
||||
|
416
poetry.lock
generated
416
poetry.lock
generated
@@ -82,12 +82,67 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
||||
|
||||
[[package]]
|
||||
name = "asyncmy"
|
||||
version = "0.2.10rc1"
|
||||
version = "0.2.10"
|
||||
description = "A fast asyncio MySQL driver"
|
||||
optional = true
|
||||
python-versions = "^3.7"
|
||||
python-versions = ">=3.8,<4.0"
|
||||
files = [
|
||||
{file = "asyncmy-0.2.10rc1.tar.gz", hash = "sha256:ba97b7f9b9719b6cb15169f0bffbf20be63767ff5052a24c3663a1d558bced5a"},
|
||||
{file = "asyncmy-0.2.10-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:c2237c8756b8f374099bd320c53b16f7ec0cee8258f00d72eed5a2cd3d251066"},
|
||||
{file = "asyncmy-0.2.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:6e98d4fbf7ea0d99dfecb24968c9c350b019397ba1af9f181d51bb0f6f81919b"},
|
||||
{file = "asyncmy-0.2.10-cp310-cp310-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:b1b1ee03556c7eda6422afc3aca132982a84706f8abf30f880d642f50670c7ed"},
|
||||
{file = "asyncmy-0.2.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e2b97672ea3f0b335c0ffd3da1a5727b530f82f5032cd87e86c3aa3ac6df7f3"},
|
||||
{file = "asyncmy-0.2.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c6471ce1f9ae1e6f0d55adfb57c49d0bcf5753a253cccbd33799ddb402fe7da2"},
|
||||
{file = "asyncmy-0.2.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10e2a10fe44a2b216a1ae58fbdafa3fed661a625ec3c030c560c26f6ab618522"},
|
||||
{file = "asyncmy-0.2.10-cp310-cp310-win32.whl", hash = "sha256:a791ab117787eb075bc37ed02caa7f3e30cca10f1b09ec7eeb51d733df1d49fc"},
|
||||
{file = "asyncmy-0.2.10-cp310-cp310-win_amd64.whl", hash = "sha256:bd16fdc0964a4a1a19aec9797ca631c3ff2530013fdcd27225fc2e48af592804"},
|
||||
{file = "asyncmy-0.2.10-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:7af0f1f31f800a8789620c195e92f36cce4def68ee70d625534544d43044ed2a"},
|
||||
{file = "asyncmy-0.2.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:800116ab85dc53b24f484fb644fefffac56db7367a31e7d62f4097d495105a2c"},
|
||||
{file = "asyncmy-0.2.10-cp311-cp311-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:39525e9d7e557b83db268ed14b149a13530e0d09a536943dba561a8a1c94cc07"},
|
||||
{file = "asyncmy-0.2.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76e199d6b57918999efc702d2dbb182cb7ba8c604cdfc912517955219b16eaea"},
|
||||
{file = "asyncmy-0.2.10-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9ca8fdd7dbbf2d9b4c2d3a5fac42b058707d6a483b71fded29051b8ae198a250"},
|
||||
{file = "asyncmy-0.2.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0df23db54e38602c803dacf1bbc1dcc4237a87223e659681f00d1a319a4f3826"},
|
||||
{file = "asyncmy-0.2.10-cp311-cp311-win32.whl", hash = "sha256:a16633032be020b931acfd7cd1862c7dad42a96ea0b9b28786f2ec48e0a86757"},
|
||||
{file = "asyncmy-0.2.10-cp311-cp311-win_amd64.whl", hash = "sha256:cca06212575922216b89218abd86a75f8f7375fc9c28159ea469f860785cdbc7"},
|
||||
{file = "asyncmy-0.2.10-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:42295530c5f36784031f7fa42235ef8dd93a75d9b66904de087e68ff704b4f03"},
|
||||
{file = "asyncmy-0.2.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:641a853ffcec762905cbeceeb623839c9149b854d5c3716eb9a22c2b505802af"},
|
||||
{file = "asyncmy-0.2.10-cp312-cp312-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:c554874223dd36b1cfc15e2cd0090792ea3832798e8fe9e9d167557e9cf31b4d"},
|
||||
{file = "asyncmy-0.2.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd16e84391dde8edb40c57d7db634706cbbafb75e6a01dc8b68a63f8dd9e44ca"},
|
||||
{file = "asyncmy-0.2.10-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9f6b44c4bf4bb69a2a1d9d26dee302473099105ba95283b479458c448943ed3c"},
|
||||
{file = "asyncmy-0.2.10-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:16d398b1aad0550c6fe1655b6758455e3554125af8aaf1f5abdc1546078c7257"},
|
||||
{file = "asyncmy-0.2.10-cp312-cp312-win32.whl", hash = "sha256:59d2639dcc23939ae82b93b40a683c15a091460a3f77fa6aef1854c0a0af99cc"},
|
||||
{file = "asyncmy-0.2.10-cp312-cp312-win_amd64.whl", hash = "sha256:4c6674073be97ffb7ac7f909e803008b23e50281131fef4e30b7b2162141a574"},
|
||||
{file = "asyncmy-0.2.10-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:85bc4522d8b632cd3327001a00cb24416883fc3905857737b99aa00bc0703fe1"},
|
||||
{file = "asyncmy-0.2.10-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:c93768dde803c7c118e6ac1893f98252e48fecad7c20bb7e27d4bdf3d130a044"},
|
||||
{file = "asyncmy-0.2.10-cp38-cp38-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:93b6d7db19a093abdeceb454826ff752ce1917288635d5d63519068ef5b2f446"},
|
||||
{file = "asyncmy-0.2.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acecd4bbb513a67a94097fd499dac854546e07d2ff63c7fb5f4d2c077e4bdf91"},
|
||||
{file = "asyncmy-0.2.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1b4b346c02fca1d160005d4921753bb00ed03422f0c6ec90936c43aad96b7d52"},
|
||||
{file = "asyncmy-0.2.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8d393570e1c96ca200075797cc4f80849fc0ea960a45c6035855b1d392f33768"},
|
||||
{file = "asyncmy-0.2.10-cp38-cp38-win32.whl", hash = "sha256:c8ee5282af5f38b4dc3ae94a3485688bd6c0d3509ba37226dbaa187f1708e32c"},
|
||||
{file = "asyncmy-0.2.10-cp38-cp38-win_amd64.whl", hash = "sha256:10b3dfb119d7a9cb3aaae355c0981e60934f57297ea560bfdb280c5d85f77a9d"},
|
||||
{file = "asyncmy-0.2.10-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:244289bd1bea84384866bde50b09fe5b24856640e30a04073eacb71987b7b6ad"},
|
||||
{file = "asyncmy-0.2.10-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:6c9d024b160b9f869a21e62c4ef34a7b7a4b5a886ae03019d4182621ea804d2c"},
|
||||
{file = "asyncmy-0.2.10-cp39-cp39-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:b57594eea942224626203503f24fa88a47eaab3f13c9f24435091ea910f4b966"},
|
||||
{file = "asyncmy-0.2.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:346192941470ac2d315f97afa14c0131ff846c911da14861baf8a1f8ed541664"},
|
||||
{file = "asyncmy-0.2.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:957c2b48c5228e5f91fdf389daf38261a7b8989ad0eb0d1ba4e5680ef2a4a078"},
|
||||
{file = "asyncmy-0.2.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:472989d7bfa405c108a7f3c408bbed52306504fb3aa28963d833cb7eeaafece0"},
|
||||
{file = "asyncmy-0.2.10-cp39-cp39-win32.whl", hash = "sha256:714b0fdadd72031e972de2bbbd14e35a19d5a7e001594f0c8a69f92f0d05acc9"},
|
||||
{file = "asyncmy-0.2.10-cp39-cp39-win_amd64.whl", hash = "sha256:9fb58645d3da0b91db384f8519b16edc7dc421c966ada8647756318915d63696"},
|
||||
{file = "asyncmy-0.2.10-pp310-pypy310_pp73-macosx_13_0_x86_64.whl", hash = "sha256:f10c977c60a95bd6ec6b8654e20c8f53bad566911562a7ad7117ca94618f05d3"},
|
||||
{file = "asyncmy-0.2.10-pp310-pypy310_pp73-macosx_14_0_arm64.whl", hash = "sha256:aab07fbdb9466beaffef136ffabe388f0d295d8d2adb8f62c272f1d4076515b9"},
|
||||
{file = "asyncmy-0.2.10-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:63144322ade68262201baae73ad0c8a06b98a3c6ae39d1f3f21c41cc5287066a"},
|
||||
{file = "asyncmy-0.2.10-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux_2_5_x86_64.manylinux1_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9659d95c6f2a611aec15bdd928950df937bf68bc4bbb68b809ee8924b6756067"},
|
||||
{file = "asyncmy-0.2.10-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8ced4bd938e95ede0fb9fa54755773df47bdb9f29f142512501e613dd95cf4a4"},
|
||||
{file = "asyncmy-0.2.10-pp38-pypy38_pp73-macosx_13_0_x86_64.whl", hash = "sha256:f76080d5d360635f0c67411fb3fb890d7a5a9e31135b4bb07c6a4e588287b671"},
|
||||
{file = "asyncmy-0.2.10-pp38-pypy38_pp73-macosx_14_0_arm64.whl", hash = "sha256:fde04da1a3e656ec7d7656b2d02ade87df9baf88cc1ebeff5d2288f856c086a4"},
|
||||
{file = "asyncmy-0.2.10-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:a83383cc6951bcde11c9cdda216a0849d29be2002a8fb6405ea6d9e5ced4ec69"},
|
||||
{file = "asyncmy-0.2.10-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux_2_5_x86_64.manylinux1_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58c3d8c12030c23df93929c8371da818211fa02c7b50cd178960c0a88e538adf"},
|
||||
{file = "asyncmy-0.2.10-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e0c8706ff7fc003775f3fc63804ea45be61e9ac9df9fd968977f781189d625ed"},
|
||||
{file = "asyncmy-0.2.10-pp39-pypy39_pp73-macosx_13_0_x86_64.whl", hash = "sha256:4651caaee6f4d7a8eb478a0dc460f8e91ab09a2d8d32444bc2b235544c791947"},
|
||||
{file = "asyncmy-0.2.10-pp39-pypy39_pp73-macosx_14_0_arm64.whl", hash = "sha256:ac091b327f01c38d91c697c810ba49e5f836890d48f6879ba0738040bb244290"},
|
||||
{file = "asyncmy-0.2.10-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:e1d2d9387cd3971297486c21098e035c620149c9033369491f58fe4fc08825b6"},
|
||||
{file = "asyncmy-0.2.10-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux_2_5_x86_64.manylinux1_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a760cb486ddb2c936711325236e6b9213564a9bb5deb2f6949dbd16c8e4d739e"},
|
||||
{file = "asyncmy-0.2.10-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1586f26633c05b16bcfc46d86e9875f4941280e12afa79a741cdf77ae4ccfb4d"},
|
||||
{file = "asyncmy-0.2.10.tar.gz", hash = "sha256:f4b67edadf7caa56bdaf1c2e6cf451150c0a86f5353744deabe4426fe27aff4e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -307,13 +362,13 @@ pycparser = "*"
|
||||
|
||||
[[package]]
|
||||
name = "click"
|
||||
version = "8.1.7"
|
||||
version = "8.1.8"
|
||||
description = "Composable command line interface toolkit"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
|
||||
{file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
|
||||
{file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"},
|
||||
{file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -511,49 +566,49 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "1.13.0"
|
||||
version = "1.14.0"
|
||||
description = "Optional static typing for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"},
|
||||
{file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"},
|
||||
{file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"},
|
||||
{file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"},
|
||||
{file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"},
|
||||
{file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"},
|
||||
{file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"},
|
||||
{file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"},
|
||||
{file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"},
|
||||
{file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"},
|
||||
{file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"},
|
||||
{file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"},
|
||||
{file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"},
|
||||
{file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"},
|
||||
{file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"},
|
||||
{file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"},
|
||||
{file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"},
|
||||
{file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"},
|
||||
{file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"},
|
||||
{file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"},
|
||||
{file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"},
|
||||
{file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"},
|
||||
{file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"},
|
||||
{file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"},
|
||||
{file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"},
|
||||
{file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"},
|
||||
{file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"},
|
||||
{file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"},
|
||||
{file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"},
|
||||
{file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"},
|
||||
{file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"},
|
||||
{file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"},
|
||||
{file = "mypy-1.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e971c1c667007f9f2b397ffa80fa8e1e0adccff336e5e77e74cb5f22868bee87"},
|
||||
{file = "mypy-1.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e86aaeaa3221a278c66d3d673b297232947d873773d61ca3ee0e28b2ff027179"},
|
||||
{file = "mypy-1.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1628c5c3ce823d296e41e2984ff88c5861499041cb416a8809615d0c1f41740e"},
|
||||
{file = "mypy-1.14.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fadb29b77fc14a0dd81304ed73c828c3e5cde0016c7e668a86a3e0dfc9f3af3"},
|
||||
{file = "mypy-1.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:3fa76988dc760da377c1e5069200a50d9eaaccf34f4ea18428a3337034ab5a44"},
|
||||
{file = "mypy-1.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6e73c8a154eed31db3445fe28f63ad2d97b674b911c00191416cf7f6459fd49a"},
|
||||
{file = "mypy-1.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:273e70fcb2e38c5405a188425aa60b984ffdcef65d6c746ea5813024b68c73dc"},
|
||||
{file = "mypy-1.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1daca283d732943731a6a9f20fdbcaa927f160bc51602b1d4ef880a6fb252015"},
|
||||
{file = "mypy-1.14.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7e68047bedb04c1c25bba9901ea46ff60d5eaac2d71b1f2161f33107e2b368eb"},
|
||||
{file = "mypy-1.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:7a52f26b9c9b1664a60d87675f3bae00b5c7f2806e0c2800545a32c325920bcc"},
|
||||
{file = "mypy-1.14.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d5326ab70a6db8e856d59ad4cb72741124950cbbf32e7b70e30166ba7bbf61dd"},
|
||||
{file = "mypy-1.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bf4ec4980bec1e0e24e5075f449d014011527ae0055884c7e3abc6a99cd2c7f1"},
|
||||
{file = "mypy-1.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:390dfb898239c25289495500f12fa73aa7f24a4c6d90ccdc165762462b998d63"},
|
||||
{file = "mypy-1.14.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7e026d55ddcd76e29e87865c08cbe2d0104e2b3153a523c529de584759379d3d"},
|
||||
{file = "mypy-1.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:585ed36031d0b3ee362e5107ef449a8b5dfd4e9c90ccbe36414ee405ee6b32ba"},
|
||||
{file = "mypy-1.14.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9f6f4c0b27401d14c483c622bc5105eff3911634d576bbdf6695b9a7c1ba741"},
|
||||
{file = "mypy-1.14.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:56b2280cedcb312c7a79f5001ae5325582d0d339bce684e4a529069d0e7ca1e7"},
|
||||
{file = "mypy-1.14.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:342de51c48bab326bfc77ce056ba08c076d82ce4f5a86621f972ed39970f94d8"},
|
||||
{file = "mypy-1.14.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:00df23b42e533e02a6f0055e54de9a6ed491cd8b7ea738647364fd3a39ea7efc"},
|
||||
{file = "mypy-1.14.0-cp313-cp313-win_amd64.whl", hash = "sha256:e8c8387e5d9dff80e7daf961df357c80e694e942d9755f3ad77d69b0957b8e3f"},
|
||||
{file = "mypy-1.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b16738b1d80ec4334654e89e798eb705ac0c36c8a5c4798496cd3623aa02286"},
|
||||
{file = "mypy-1.14.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10065fcebb7c66df04b05fc799a854b1ae24d9963c8bb27e9064a9bdb43aa8ad"},
|
||||
{file = "mypy-1.14.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fbb7d683fa6bdecaa106e8368aa973ecc0ddb79a9eaeb4b821591ecd07e9e03c"},
|
||||
{file = "mypy-1.14.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3498cb55448dc5533e438cd13d6ddd28654559c8c4d1fd4b5ca57a31b81bac01"},
|
||||
{file = "mypy-1.14.0-cp38-cp38-win_amd64.whl", hash = "sha256:c7b243408ea43755f3a21a0a08e5c5ae30eddb4c58a80f415ca6b118816e60aa"},
|
||||
{file = "mypy-1.14.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:14117b9da3305b39860d0aa34b8f1ff74d209a368829a584eb77524389a9c13e"},
|
||||
{file = "mypy-1.14.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af98c5a958f9c37404bd4eef2f920b94874507e146ed6ee559f185b8809c44cc"},
|
||||
{file = "mypy-1.14.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f0b343a1d3989547024377c2ba0dca9c74a2428ad6ed24283c213af8dbb0710b"},
|
||||
{file = "mypy-1.14.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cdb5563c1726c85fb201be383168f8c866032db95e1095600806625b3a648cb7"},
|
||||
{file = "mypy-1.14.0-cp39-cp39-win_amd64.whl", hash = "sha256:74e925649c1ee0a79aa7448baf2668d81cc287dc5782cff6a04ee93f40fb8d3f"},
|
||||
{file = "mypy-1.14.0-py3-none-any.whl", hash = "sha256:2238d7f93fc4027ed1efc944507683df3ba406445a2b6c96e79666a045aadfab"},
|
||||
{file = "mypy-1.14.0.tar.gz", hash = "sha256:822dbd184d4a9804df5a7d5335a68cf7662930e70b8c1bc976645d1509f9a9d6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
mypy-extensions = ">=1.0.0"
|
||||
mypy_extensions = ">=1.0.0"
|
||||
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||
typing-extensions = ">=4.6.0"
|
||||
typing_extensions = ">=4.6.0"
|
||||
|
||||
[package.extras]
|
||||
dmypy = ["psutil (>=4.0)"]
|
||||
@@ -650,18 +705,18 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.10.2"
|
||||
version = "2.10.4"
|
||||
description = "Data validation using Python type hints"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e"},
|
||||
{file = "pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa"},
|
||||
{file = "pydantic-2.10.4-py3-none-any.whl", hash = "sha256:597e135ea68be3a37552fb524bc7d0d66dcf93d395acd93a00682f1efcb8ee3d"},
|
||||
{file = "pydantic-2.10.4.tar.gz", hash = "sha256:82f12e9723da6de4fe2ba888b5971157b3be7ad914267dea8f05f82b28254f06"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
annotated-types = ">=0.6.0"
|
||||
pydantic-core = "2.27.1"
|
||||
pydantic-core = "2.27.2"
|
||||
typing-extensions = ">=4.12.2"
|
||||
|
||||
[package.extras]
|
||||
@@ -670,111 +725,111 @@ timezone = ["tzdata"]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-core"
|
||||
version = "2.27.1"
|
||||
version = "2.27.2"
|
||||
description = "Core functionality for Pydantic validation and serialization"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"},
|
||||
{file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"},
|
||||
{file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"},
|
||||
{file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"},
|
||||
{file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"},
|
||||
{file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"},
|
||||
{file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"},
|
||||
{file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"},
|
||||
{file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"},
|
||||
{file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"},
|
||||
{file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"},
|
||||
{file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"},
|
||||
{file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"},
|
||||
{file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"},
|
||||
{file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"},
|
||||
{file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"},
|
||||
{file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"},
|
||||
{file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"},
|
||||
{file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"},
|
||||
{file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"},
|
||||
{file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"},
|
||||
{file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"},
|
||||
{file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"},
|
||||
{file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"},
|
||||
{file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"},
|
||||
{file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"},
|
||||
{file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"},
|
||||
{file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"},
|
||||
{file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"},
|
||||
{file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"},
|
||||
{file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"},
|
||||
{file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"},
|
||||
{file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"},
|
||||
{file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"},
|
||||
{file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"},
|
||||
{file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"},
|
||||
{file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"},
|
||||
{file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"},
|
||||
{file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"},
|
||||
{file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"},
|
||||
{file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"},
|
||||
{file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"},
|
||||
{file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"},
|
||||
{file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"},
|
||||
{file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"},
|
||||
{file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"},
|
||||
{file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"},
|
||||
{file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"},
|
||||
{file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"},
|
||||
{file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"},
|
||||
{file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"},
|
||||
{file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"},
|
||||
{file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"},
|
||||
{file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"},
|
||||
{file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"},
|
||||
{file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"},
|
||||
{file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"},
|
||||
{file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"},
|
||||
{file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"},
|
||||
{file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"},
|
||||
{file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"},
|
||||
{file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"},
|
||||
{file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"},
|
||||
{file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"},
|
||||
{file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"},
|
||||
{file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"},
|
||||
{file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"},
|
||||
{file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"},
|
||||
{file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"},
|
||||
{file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"},
|
||||
{file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"},
|
||||
{file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"},
|
||||
{file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"},
|
||||
{file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"},
|
||||
{file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"},
|
||||
{file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"},
|
||||
{file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"},
|
||||
{file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"},
|
||||
{file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"},
|
||||
{file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"},
|
||||
{file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"},
|
||||
{file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"},
|
||||
{file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"},
|
||||
{file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"},
|
||||
{file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"},
|
||||
{file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"},
|
||||
{file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"},
|
||||
{file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"},
|
||||
{file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"},
|
||||
{file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"},
|
||||
{file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"},
|
||||
{file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"},
|
||||
{file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"},
|
||||
{file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"},
|
||||
{file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"},
|
||||
{file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"},
|
||||
{file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"},
|
||||
{file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"},
|
||||
{file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"},
|
||||
{file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"},
|
||||
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"},
|
||||
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"},
|
||||
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"},
|
||||
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"},
|
||||
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"},
|
||||
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"},
|
||||
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"},
|
||||
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"},
|
||||
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"},
|
||||
{file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -796,13 +851,13 @@ windows-terminal = ["colorama (>=0.4.6)"]
|
||||
|
||||
[[package]]
|
||||
name = "pypika-tortoise"
|
||||
version = "0.3.1"
|
||||
version = "0.3.2"
|
||||
description = "Forked from pypika and streamline just for tortoise-orm"
|
||||
optional = false
|
||||
python-versions = ">=3.8,<4.0"
|
||||
files = [
|
||||
{file = "pypika_tortoise-0.3.1-py3-none-any.whl", hash = "sha256:eee0d49c99ed1b932f7c48f8b87d8492aeb3c7e6a48ba69bc462eb9e3b5b20a2"},
|
||||
{file = "pypika_tortoise-0.3.1.tar.gz", hash = "sha256:6f9861dd34fd21a009e79b174159e61699da28cb2607617e688b7e79e6c9ef7e"},
|
||||
{file = "pypika_tortoise-0.3.2-py3-none-any.whl", hash = "sha256:c5c52bc4473fe6f3db36cf659340750246ec5dd0f980d04ae7811430e299c3a2"},
|
||||
{file = "pypika_tortoise-0.3.2.tar.gz", hash = "sha256:f5d508e2ef00255e52ec6ac79ef889e10dbab328f218c55cd134c4d02ff9f6f4"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -976,29 +1031,29 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.8.1"
|
||||
version = "0.8.4"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.8.1-py3-none-linux_armv6l.whl", hash = "sha256:fae0805bd514066f20309f6742f6ee7904a773eb9e6c17c45d6b1600ca65c9b5"},
|
||||
{file = "ruff-0.8.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b8a4f7385c2285c30f34b200ca5511fcc865f17578383db154e098150ce0a087"},
|
||||
{file = "ruff-0.8.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:cd054486da0c53e41e0086e1730eb77d1f698154f910e0cd9e0d64274979a209"},
|
||||
{file = "ruff-0.8.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2029b8c22da147c50ae577e621a5bfbc5d1fed75d86af53643d7a7aee1d23871"},
|
||||
{file = "ruff-0.8.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2666520828dee7dfc7e47ee4ea0d928f40de72056d929a7c5292d95071d881d1"},
|
||||
{file = "ruff-0.8.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:333c57013ef8c97a53892aa56042831c372e0bb1785ab7026187b7abd0135ad5"},
|
||||
{file = "ruff-0.8.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:288326162804f34088ac007139488dcb43de590a5ccfec3166396530b58fb89d"},
|
||||
{file = "ruff-0.8.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b12c39b9448632284561cbf4191aa1b005882acbc81900ffa9f9f471c8ff7e26"},
|
||||
{file = "ruff-0.8.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:364e6674450cbac8e998f7b30639040c99d81dfb5bbc6dfad69bc7a8f916b3d1"},
|
||||
{file = "ruff-0.8.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b22346f845fec132aa39cd29acb94451d030c10874408dbf776af3aaeb53284c"},
|
||||
{file = "ruff-0.8.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b2f2f7a7e7648a2bfe6ead4e0a16745db956da0e3a231ad443d2a66a105c04fa"},
|
||||
{file = "ruff-0.8.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:adf314fc458374c25c5c4a4a9270c3e8a6a807b1bec018cfa2813d6546215540"},
|
||||
{file = "ruff-0.8.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a885d68342a231b5ba4d30b8c6e1b1ee3a65cf37e3d29b3c74069cdf1ee1e3c9"},
|
||||
{file = "ruff-0.8.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d2c16e3508c8cc73e96aa5127d0df8913d2290098f776416a4b157657bee44c5"},
|
||||
{file = "ruff-0.8.1-py3-none-win32.whl", hash = "sha256:93335cd7c0eaedb44882d75a7acb7df4b77cd7cd0d2255c93b28791716e81790"},
|
||||
{file = "ruff-0.8.1-py3-none-win_amd64.whl", hash = "sha256:2954cdbe8dfd8ab359d4a30cd971b589d335a44d444b6ca2cb3d1da21b75e4b6"},
|
||||
{file = "ruff-0.8.1-py3-none-win_arm64.whl", hash = "sha256:55873cc1a473e5ac129d15eccb3c008c096b94809d693fc7053f588b67822737"},
|
||||
{file = "ruff-0.8.1.tar.gz", hash = "sha256:3583db9a6450364ed5ca3f3b4225958b24f78178908d5c4bc0f46251ccca898f"},
|
||||
{file = "ruff-0.8.4-py3-none-linux_armv6l.whl", hash = "sha256:58072f0c06080276804c6a4e21a9045a706584a958e644353603d36ca1eb8a60"},
|
||||
{file = "ruff-0.8.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ffb60904651c00a1e0b8df594591770018a0f04587f7deeb3838344fe3adabac"},
|
||||
{file = "ruff-0.8.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6ddf5d654ac0d44389f6bf05cee4caeefc3132a64b58ea46738111d687352296"},
|
||||
{file = "ruff-0.8.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e248b1f0fa2749edd3350a2a342b67b43a2627434c059a063418e3d375cfe643"},
|
||||
{file = "ruff-0.8.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf197b98ed86e417412ee3b6c893f44c8864f816451441483253d5ff22c0e81e"},
|
||||
{file = "ruff-0.8.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c41319b85faa3aadd4d30cb1cffdd9ac6b89704ff79f7664b853785b48eccdf3"},
|
||||
{file = "ruff-0.8.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9f8402b7c4f96463f135e936d9ab77b65711fcd5d72e5d67597b543bbb43cf3f"},
|
||||
{file = "ruff-0.8.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4e56b3baa9c23d324ead112a4fdf20db9a3f8f29eeabff1355114dd96014604"},
|
||||
{file = "ruff-0.8.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:736272574e97157f7edbbb43b1d046125fce9e7d8d583d5d65d0c9bf2c15addf"},
|
||||
{file = "ruff-0.8.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5fe710ab6061592521f902fca7ebcb9fabd27bc7c57c764298b1c1f15fff720"},
|
||||
{file = "ruff-0.8.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:13e9ec6d6b55f6da412d59953d65d66e760d583dd3c1c72bf1f26435b5bfdbae"},
|
||||
{file = "ruff-0.8.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:97d9aefef725348ad77d6db98b726cfdb075a40b936c7984088804dfd38268a7"},
|
||||
{file = "ruff-0.8.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ab78e33325a6f5374e04c2ab924a3367d69a0da36f8c9cb6b894a62017506111"},
|
||||
{file = "ruff-0.8.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:8ef06f66f4a05c3ddbc9121a8b0cecccd92c5bf3dd43b5472ffe40b8ca10f0f8"},
|
||||
{file = "ruff-0.8.4-py3-none-win32.whl", hash = "sha256:552fb6d861320958ca5e15f28b20a3d071aa83b93caee33a87b471f99a6c0835"},
|
||||
{file = "ruff-0.8.4-py3-none-win_amd64.whl", hash = "sha256:f21a1143776f8656d7f364bd264a9d60f01b7f52243fbe90e7670c0dfe0cf65d"},
|
||||
{file = "ruff-0.8.4-py3-none-win_arm64.whl", hash = "sha256:9183dd615d8df50defa8b1d9a074053891ba39025cf5ae88e8bcb52edcc4bf08"},
|
||||
{file = "ruff-0.8.4.tar.gz", hash = "sha256:0d5f89f254836799af1615798caa5f80b7f935d7a670fad66c5007928e57ace8"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1067,11 +1122,22 @@ files = [
|
||||
{file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tomli-w"
|
||||
version = "1.1.0"
|
||||
description = "A lil' TOML writer"
|
||||
optional = true
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "tomli_w-1.1.0-py3-none-any.whl", hash = "sha256:1403179c78193e3184bfaade390ddbd071cba48a32a2e62ba11aae47490c63f7"},
|
||||
{file = "tomli_w-1.1.0.tar.gz", hash = "sha256:49e847a3a304d516a169a601184932ef0f6b61623fe680f836a2aa7128ed0d33"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tomlkit"
|
||||
version = "0.13.2"
|
||||
description = "Style preserving TOML library"
|
||||
optional = false
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"},
|
||||
@@ -1080,20 +1146,19 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "tortoise-orm"
|
||||
version = "0.22.1"
|
||||
version = "0.23.0"
|
||||
description = "Easy async ORM for python, built with relations in mind"
|
||||
optional = false
|
||||
python-versions = ">=3.8,<4.0"
|
||||
files = [
|
||||
{file = "tortoise_orm-0.22.1-py3-none-any.whl", hash = "sha256:96b8dbc10956cb5cfb6f02841b238035924a6011b5a9737774a22859e1b7bcbf"},
|
||||
{file = "tortoise_orm-0.22.1.tar.gz", hash = "sha256:50cce7ab3eee5321553810ee31f411abde7a1806312655a377801c91e1b2cb77"},
|
||||
{file = "tortoise_orm-0.23.0-py3-none-any.whl", hash = "sha256:deaabed1619ea8aab6213508dff025571a701b7f34ee534473d7bb7661aa9f4f"},
|
||||
{file = "tortoise_orm-0.23.0.tar.gz", hash = "sha256:f25d431ef4fb521a84edad582f4b9c53dccc5abf6cfbc6f228cbece5a13952fa"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiosqlite = ">=0.16.0,<0.21.0"
|
||||
iso8601 = ">=2.1.0,<3.0.0"
|
||||
pydantic = ">=2.0,<2.7.0 || >2.7.0,<3.0"
|
||||
pypika-tortoise = ">=0.3.0,<0.4.0"
|
||||
pypika-tortoise = ">=0.3.2,<0.4.0"
|
||||
pytz = "*"
|
||||
|
||||
[package.extras]
|
||||
@@ -1118,8 +1183,9 @@ files = [
|
||||
[extras]
|
||||
asyncmy = ["asyncmy"]
|
||||
asyncpg = ["asyncpg"]
|
||||
toml = ["tomli-w", "tomlkit"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.8"
|
||||
content-hash = "ca5aa98cc3db69e15009ac541f161bab5bde8a53072a9dac39a78f58b5f8b183"
|
||||
content-hash = "5a17cf1dd79829b76fc2c71cbd83032d70ada4f129cf56973c417eac91a975f6"
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "aerich"
|
||||
version = "0.8.0"
|
||||
version = "0.8.1"
|
||||
description = "A database migrations tool for Tortoise ORM."
|
||||
authors = ["long2ice <long2ice@gmail.com>"]
|
||||
license = "Apache-2.0"
|
||||
@@ -16,12 +16,13 @@ include = ["CHANGELOG.md", "LICENSE", "README.md"]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8"
|
||||
tortoise-orm = "*"
|
||||
tortoise-orm = ">=0.21"
|
||||
asyncpg = { version = "*", optional = true }
|
||||
asyncmy = { version = "^0.2.9", optional = true, allow-prereleases = true }
|
||||
pydantic = "^2.0"
|
||||
pydantic = "^2.0,!=2.7.0"
|
||||
dictdiffer = "*"
|
||||
tomlkit = "*"
|
||||
tomlkit = { version = "*", optional = true, python="<3.11" }
|
||||
tomli-w = { version = "^1.1.0", optional = true, python=">=3.11" }
|
||||
asyncclick = "^8.1.7.2"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
@@ -41,6 +42,7 @@ mypy = "^1.10.0"
|
||||
[tool.poetry.extras]
|
||||
asyncmy = ["asyncmy"]
|
||||
asyncpg = ["asyncpg"]
|
||||
toml = ["tomlkit", "tomli-w"]
|
||||
|
||||
|
||||
[tool.aerich]
|
||||
@@ -67,5 +69,7 @@ pretty = true
|
||||
python_version = "3.8"
|
||||
ignore_missing_imports = true
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 100
|
||||
[tool.ruff.lint]
|
||||
ignore = ['E501']
|
||||
|
7
tests/indexes.py
Normal file
7
tests/indexes.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from tortoise.indexes import Index
|
||||
|
||||
|
||||
class CustomIndex(Index):
|
||||
def __init__(self, *args, **kw) -> None:
|
||||
super().__init__(*args, **kw)
|
||||
self._foo = ""
|
@@ -3,6 +3,9 @@ import uuid
|
||||
from enum import IntEnum
|
||||
|
||||
from tortoise import Model, fields
|
||||
from tortoise.indexes import Index
|
||||
|
||||
from tests.indexes import CustomIndex
|
||||
|
||||
|
||||
class ProductType(IntEnum):
|
||||
@@ -31,6 +34,12 @@ class User(Model):
|
||||
intro = fields.TextField(default="")
|
||||
longitude = fields.DecimalField(max_digits=10, decimal_places=8)
|
||||
|
||||
products: fields.ManyToManyRelation["Product"]
|
||||
|
||||
class Meta:
|
||||
# reverse indexes elements
|
||||
indexes = [CustomIndex(fields=("is_superuser",)), Index(fields=("username", "is_active"))]
|
||||
|
||||
|
||||
class Email(Model):
|
||||
email_id = fields.IntField(primary_key=True)
|
||||
@@ -38,6 +47,7 @@ class Email(Model):
|
||||
is_primary = fields.BooleanField(default=False)
|
||||
address = fields.CharField(max_length=200)
|
||||
users: fields.ManyToManyRelation[User] = fields.ManyToManyField("models.User")
|
||||
config: fields.OneToOneRelation["Config"] = fields.OneToOneField("models.Config")
|
||||
|
||||
|
||||
def default_name():
|
||||
@@ -47,7 +57,7 @@ def default_name():
|
||||
class Category(Model):
|
||||
slug = fields.CharField(max_length=100)
|
||||
name = fields.CharField(max_length=200, null=True, default=default_name)
|
||||
user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField(
|
||||
owner: fields.ForeignKeyRelation[User] = fields.ForeignKeyField(
|
||||
"models.User", description="User"
|
||||
)
|
||||
title = fields.CharField(max_length=20, unique=False)
|
||||
@@ -55,7 +65,12 @@ class Category(Model):
|
||||
|
||||
|
||||
class Product(Model):
|
||||
categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField("models.Category")
|
||||
categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField(
|
||||
"models.Category", null=False
|
||||
)
|
||||
users: fields.ManyToManyRelation[User] = fields.ManyToManyField(
|
||||
"models.User", related_name="products"
|
||||
)
|
||||
name = fields.CharField(max_length=50)
|
||||
view_num = fields.IntField(description="View Num", default=0)
|
||||
sort = fields.IntField()
|
||||
@@ -66,6 +81,7 @@ class Product(Model):
|
||||
pic = fields.CharField(max_length=200)
|
||||
body = fields.TextField()
|
||||
created_at = fields.DatetimeField(auto_now_add=True)
|
||||
is_deleted = fields.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
unique_together = (("name", "type"),)
|
||||
@@ -73,6 +89,9 @@ class Product(Model):
|
||||
|
||||
|
||||
class Config(Model):
|
||||
categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField(
|
||||
"models.Category", through="config_category_map", related_name="category_set"
|
||||
)
|
||||
label = fields.CharField(max_length=200)
|
||||
key = fields.CharField(max_length=20)
|
||||
value: dict = fields.JSONField()
|
||||
@@ -81,6 +100,8 @@ class Config(Model):
|
||||
"models.User", description="User"
|
||||
)
|
||||
|
||||
email: fields.OneToOneRelation["Email"]
|
||||
|
||||
|
||||
class NewModel(Model):
|
||||
name = fields.CharField(max_length=50)
|
||||
|
@@ -2,6 +2,9 @@ import datetime
|
||||
from enum import IntEnum
|
||||
|
||||
from tortoise import Model, fields
|
||||
from tortoise.indexes import Index
|
||||
|
||||
from tests.indexes import CustomIndex
|
||||
|
||||
|
||||
class ProductType(IntEnum):
|
||||
@@ -31,6 +34,9 @@ class User(Model):
|
||||
intro = fields.TextField(default="")
|
||||
longitude = fields.DecimalField(max_digits=12, decimal_places=9)
|
||||
|
||||
class Meta:
|
||||
indexes = [Index(fields=("username", "is_active")), CustomIndex(fields=("is_superuser",))]
|
||||
|
||||
|
||||
class Email(Model):
|
||||
email = fields.CharField(max_length=200)
|
||||
@@ -55,16 +61,21 @@ class Product(Model):
|
||||
name = fields.CharField(max_length=50)
|
||||
view_num = fields.IntField(description="View Num")
|
||||
sort = fields.IntField()
|
||||
is_reviewed = fields.BooleanField(description="Is Reviewed")
|
||||
is_review = fields.BooleanField(description="Is Reviewed")
|
||||
type = fields.IntEnumField(
|
||||
ProductType, description="Product Type", source_field="type_db_alias"
|
||||
)
|
||||
image = fields.CharField(max_length=200)
|
||||
body = fields.TextField()
|
||||
created_at = fields.DatetimeField(auto_now_add=True)
|
||||
is_delete = fields.BooleanField(default=False)
|
||||
|
||||
|
||||
class Config(Model):
|
||||
category: fields.ManyToManyRelation[Category] = fields.ManyToManyField("models.Category")
|
||||
categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField(
|
||||
"models.Category", through="config_category_map", related_name="config_set"
|
||||
)
|
||||
name = fields.CharField(max_length=100, unique=True)
|
||||
label = fields.CharField(max_length=200)
|
||||
key = fields.CharField(max_length=20)
|
||||
|
@@ -16,8 +16,8 @@ def test_create_table():
|
||||
`name` VARCHAR(200),
|
||||
`title` VARCHAR(20) NOT NULL,
|
||||
`created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6),
|
||||
`user_id` INT NOT NULL COMMENT 'User',
|
||||
CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE
|
||||
`owner_id` INT NOT NULL COMMENT 'User',
|
||||
CONSTRAINT `fk_category_user_110d4c63` FOREIGN KEY (`owner_id`) REFERENCES `user` (`id`) ON DELETE CASCADE
|
||||
) CHARACTER SET utf8mb4"""
|
||||
)
|
||||
|
||||
@@ -30,7 +30,7 @@ def test_create_table():
|
||||
"name" VARCHAR(200),
|
||||
"title" VARCHAR(20) NOT NULL,
|
||||
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE /* User */
|
||||
"owner_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE /* User */
|
||||
)"""
|
||||
)
|
||||
|
||||
@@ -43,9 +43,9 @@ def test_create_table():
|
||||
"name" VARCHAR(200),
|
||||
"title" VARCHAR(20) NOT NULL,
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE
|
||||
"owner_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE
|
||||
);
|
||||
COMMENT ON COLUMN "category"."user_id" IS 'User'"""
|
||||
COMMENT ON COLUMN "category"."owner_id" IS 'User'"""
|
||||
)
|
||||
|
||||
|
||||
@@ -63,6 +63,14 @@ def test_add_column():
|
||||
assert ret == "ALTER TABLE `category` ADD `name` VARCHAR(200)"
|
||||
else:
|
||||
assert ret == 'ALTER TABLE "category" ADD "name" VARCHAR(200)'
|
||||
# add unique column
|
||||
ret = Migrate.ddl.add_column(User, User._meta.fields_map.get("username").describe(False))
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret == "ALTER TABLE `user` ADD `username` VARCHAR(20) NOT NULL UNIQUE"
|
||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert ret == 'ALTER TABLE "user" ADD "username" VARCHAR(20) NOT NULL UNIQUE'
|
||||
else:
|
||||
assert ret == 'ALTER TABLE "user" ADD "username" VARCHAR(20) NOT NULL'
|
||||
|
||||
|
||||
def test_modify_column():
|
||||
@@ -137,8 +145,8 @@ def test_set_comment():
|
||||
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("name").describe(False))
|
||||
assert ret == 'COMMENT ON COLUMN "category"."name" IS NULL'
|
||||
|
||||
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("user").describe(False))
|
||||
assert ret == 'COMMENT ON COLUMN "category"."user_id" IS \'User\''
|
||||
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("owner").describe(False))
|
||||
assert ret == 'COMMENT ON COLUMN "category"."owner_id" IS \'User\''
|
||||
|
||||
|
||||
def test_drop_column():
|
||||
@@ -155,14 +163,9 @@ def test_add_index():
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert index == "ALTER TABLE `category` ADD INDEX `idx_category_name_8b0cb9` (`name`)"
|
||||
assert index_u == "ALTER TABLE `category` ADD UNIQUE INDEX `name` (`name`)"
|
||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||
else:
|
||||
assert index == 'CREATE INDEX "idx_category_name_8b0cb9" ON "category" ("name")'
|
||||
assert index_u == 'CREATE UNIQUE INDEX "uid_category_name_8b0cb9" ON "category" ("name")'
|
||||
else:
|
||||
assert index == 'ALTER TABLE "category" ADD INDEX "idx_category_name_8b0cb9" ("name")'
|
||||
assert (
|
||||
index_u == 'ALTER TABLE "category" ADD UNIQUE INDEX "uid_category_name_8b0cb9" ("name")'
|
||||
)
|
||||
|
||||
|
||||
def test_drop_index():
|
||||
@@ -171,37 +174,34 @@ def test_drop_index():
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret == "ALTER TABLE `category` DROP INDEX `idx_category_name_8b0cb9`"
|
||||
assert ret_u == "ALTER TABLE `category` DROP INDEX `name`"
|
||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert ret == 'DROP INDEX "idx_category_name_8b0cb9"'
|
||||
assert ret_u == 'DROP INDEX "uid_category_name_8b0cb9"'
|
||||
else:
|
||||
assert ret == 'ALTER TABLE "category" DROP INDEX "idx_category_name_8b0cb9"'
|
||||
assert ret_u == 'ALTER TABLE "category" DROP INDEX "uid_category_name_8b0cb9"'
|
||||
assert ret == 'DROP INDEX IF EXISTS "idx_category_name_8b0cb9"'
|
||||
assert ret_u == 'DROP INDEX IF EXISTS "uid_category_name_8b0cb9"'
|
||||
|
||||
|
||||
def test_add_fk():
|
||||
ret = Migrate.ddl.add_fk(
|
||||
Category, Category._meta.fields_map.get("user").describe(False), User.describe(False)
|
||||
Category, Category._meta.fields_map.get("owner").describe(False), User.describe(False)
|
||||
)
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert (
|
||||
ret
|
||||
== "ALTER TABLE `category` ADD CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE"
|
||||
== "ALTER TABLE `category` ADD CONSTRAINT `fk_category_user_110d4c63` FOREIGN KEY (`owner_id`) REFERENCES `user` (`id`) ON DELETE CASCADE"
|
||||
)
|
||||
else:
|
||||
assert (
|
||||
ret
|
||||
== 'ALTER TABLE "category" ADD CONSTRAINT "fk_category_user_e2e3874c" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE'
|
||||
== 'ALTER TABLE "category" ADD CONSTRAINT "fk_category_user_110d4c63" FOREIGN KEY ("owner_id") REFERENCES "user" ("id") ON DELETE CASCADE'
|
||||
)
|
||||
|
||||
|
||||
def test_drop_fk():
|
||||
ret = Migrate.ddl.drop_fk(
|
||||
Category, Category._meta.fields_map.get("user").describe(False), User.describe(False)
|
||||
Category, Category._meta.fields_map.get("owner").describe(False), User.describe(False)
|
||||
)
|
||||
if isinstance(Migrate.ddl, MysqlDDL):
|
||||
assert ret == "ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_e2e3874c`"
|
||||
assert ret == "ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_110d4c63`"
|
||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||
assert ret == 'ALTER TABLE "category" DROP CONSTRAINT "fk_category_user_e2e3874c"'
|
||||
assert ret == 'ALTER TABLE "category" DROP CONSTRAINT IF EXISTS "fk_category_user_110d4c63"'
|
||||
else:
|
||||
assert ret == 'ALTER TABLE "category" DROP FOREIGN KEY "fk_category_user_e2e3874c"'
|
||||
assert ret == 'ALTER TABLE "category" DROP FOREIGN KEY "fk_category_user_110d4c63"'
|
||||
|
@@ -1,9 +1,9 @@
|
||||
from pathlib import Path
|
||||
from typing import List, cast
|
||||
|
||||
import pytest
|
||||
import tortoise
|
||||
from pytest_mock import MockerFixture
|
||||
from tortoise.indexes import Index
|
||||
|
||||
from aerich.ddl.mysql import MysqlDDL
|
||||
from aerich.ddl.postgres import PostgresDDL
|
||||
@@ -11,7 +11,11 @@ from aerich.ddl.sqlite import SqliteDDL
|
||||
from aerich.exceptions import NotSupportError
|
||||
from aerich.migrate import MIGRATE_TEMPLATE, Migrate
|
||||
from aerich.utils import get_models_describe
|
||||
from tests.indexes import CustomIndex
|
||||
|
||||
# tortoise-orm>=0.21 changes IntField constraints
|
||||
# from {"ge": 1, "le": 2147483647} to {"ge": -2147483648, "le": 2147483647}
|
||||
MIN_INT = 1 if tortoise.__version__ < "0.21" else -2147483648
|
||||
old_models_describe = {
|
||||
"models.Category": {
|
||||
"name": "models.Category",
|
||||
@@ -34,7 +38,7 @@ old_models_describe = {
|
||||
"default": None,
|
||||
"description": None,
|
||||
"docstring": None,
|
||||
"constraints": {"ge": 1, "le": 2147483647},
|
||||
"constraints": {"ge": MIN_INT, "le": 2147483647},
|
||||
"db_field_types": {"": "INT"},
|
||||
},
|
||||
"data_fields": [
|
||||
@@ -101,7 +105,7 @@ old_models_describe = {
|
||||
"default": None,
|
||||
"description": "User",
|
||||
"docstring": None,
|
||||
"constraints": {"ge": 1, "le": 2147483647},
|
||||
"constraints": {"ge": MIN_INT, "le": 2147483647},
|
||||
"db_field_types": {"": "INT"},
|
||||
},
|
||||
{
|
||||
@@ -184,7 +188,7 @@ old_models_describe = {
|
||||
"default": None,
|
||||
"description": None,
|
||||
"docstring": None,
|
||||
"constraints": {"ge": 1, "le": 2147483647},
|
||||
"constraints": {"ge": MIN_INT, "le": 2147483647},
|
||||
"db_field_types": {"": "INT"},
|
||||
},
|
||||
"data_fields": [
|
||||
@@ -268,7 +272,48 @@ old_models_describe = {
|
||||
"backward_fk_fields": [],
|
||||
"o2o_fields": [],
|
||||
"backward_o2o_fields": [],
|
||||
"m2m_fields": [],
|
||||
"m2m_fields": [
|
||||
{
|
||||
"name": "category",
|
||||
"field_type": "ManyToManyFieldInstance",
|
||||
"python_type": "models.Category",
|
||||
"generated": False,
|
||||
"nullable": False,
|
||||
"unique": False,
|
||||
"indexed": False,
|
||||
"default": None,
|
||||
"description": None,
|
||||
"docstring": None,
|
||||
"constraints": {},
|
||||
"model_name": "models.Category",
|
||||
"related_name": "configs",
|
||||
"forward_key": "category_id",
|
||||
"backward_key": "config_id",
|
||||
"through": "config_category",
|
||||
"on_delete": "CASCADE",
|
||||
"_generated": False,
|
||||
},
|
||||
{
|
||||
"name": "categories",
|
||||
"field_type": "ManyToManyFieldInstance",
|
||||
"python_type": "models.Category",
|
||||
"generated": False,
|
||||
"nullable": False,
|
||||
"unique": False,
|
||||
"indexed": False,
|
||||
"default": None,
|
||||
"description": None,
|
||||
"docstring": None,
|
||||
"constraints": {},
|
||||
"model_name": "models.Category",
|
||||
"related_name": "config_set",
|
||||
"forward_key": "category_id",
|
||||
"backward_key": "config_id",
|
||||
"through": "config_category_map",
|
||||
"on_delete": "CASCADE",
|
||||
"_generated": False,
|
||||
},
|
||||
],
|
||||
},
|
||||
"models.Email": {
|
||||
"name": "models.Email",
|
||||
@@ -291,7 +336,7 @@ old_models_describe = {
|
||||
"default": None,
|
||||
"description": None,
|
||||
"docstring": None,
|
||||
"constraints": {"ge": 1, "le": 2147483647},
|
||||
"constraints": {"ge": MIN_INT, "le": 2147483647},
|
||||
"db_field_types": {"": "INT"},
|
||||
},
|
||||
"data_fields": [
|
||||
@@ -323,7 +368,12 @@ old_models_describe = {
|
||||
"description": None,
|
||||
"docstring": None,
|
||||
"constraints": {},
|
||||
"db_field_types": {"": "BOOL", "sqlite": "INT"},
|
||||
"db_field_types": {
|
||||
"": "BOOL",
|
||||
"mssql": "BIT",
|
||||
"oracle": "NUMBER(1)",
|
||||
"sqlite": "INT",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
@@ -337,7 +387,7 @@ old_models_describe = {
|
||||
"default": None,
|
||||
"description": None,
|
||||
"docstring": None,
|
||||
"constraints": {"ge": 1, "le": 2147483647},
|
||||
"constraints": {"ge": MIN_INT, "le": 2147483647},
|
||||
"db_field_types": {"": "INT"},
|
||||
},
|
||||
],
|
||||
@@ -384,7 +434,7 @@ old_models_describe = {
|
||||
"default": None,
|
||||
"description": None,
|
||||
"docstring": None,
|
||||
"constraints": {"ge": 1, "le": 2147483647},
|
||||
"constraints": {"ge": MIN_INT, "le": 2147483647},
|
||||
"db_field_types": {"": "INT"},
|
||||
},
|
||||
"data_fields": [
|
||||
@@ -449,9 +499,9 @@ old_models_describe = {
|
||||
"db_field_types": {"": "INT"},
|
||||
},
|
||||
{
|
||||
"name": "is_reviewed",
|
||||
"name": "is_review",
|
||||
"field_type": "BooleanField",
|
||||
"db_column": "is_reviewed",
|
||||
"db_column": "is_review",
|
||||
"python_type": "bool",
|
||||
"generated": False,
|
||||
"nullable": False,
|
||||
@@ -461,7 +511,12 @@ old_models_describe = {
|
||||
"description": "Is Reviewed",
|
||||
"docstring": None,
|
||||
"constraints": {},
|
||||
"db_field_types": {"": "BOOL", "sqlite": "INT"},
|
||||
"db_field_types": {
|
||||
"": "BOOL",
|
||||
"mssql": "BIT",
|
||||
"oracle": "NUMBER(1)",
|
||||
"sqlite": "INT",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "type",
|
||||
@@ -529,6 +584,26 @@ old_models_describe = {
|
||||
"auto_now_add": True,
|
||||
"auto_now": False,
|
||||
},
|
||||
{
|
||||
"name": "is_delete",
|
||||
"field_type": "BooleanField",
|
||||
"db_column": "is_delete",
|
||||
"python_type": "bool",
|
||||
"generated": False,
|
||||
"nullable": False,
|
||||
"unique": False,
|
||||
"indexed": False,
|
||||
"default": False,
|
||||
"description": None,
|
||||
"docstring": None,
|
||||
"constraints": {},
|
||||
"db_field_types": {
|
||||
"": "BOOL",
|
||||
"mssql": "BIT",
|
||||
"oracle": "NUMBER(1)",
|
||||
"sqlite": "INT",
|
||||
},
|
||||
},
|
||||
],
|
||||
"fk_fields": [],
|
||||
"backward_fk_fields": [],
|
||||
@@ -565,7 +640,7 @@ old_models_describe = {
|
||||
"description": None,
|
||||
"docstring": None,
|
||||
"unique_together": [],
|
||||
"indexes": [],
|
||||
"indexes": [Index(fields=("username", "is_active")), CustomIndex(fields=("is_superuser",))],
|
||||
"pk_field": {
|
||||
"name": "id",
|
||||
"field_type": "IntField",
|
||||
@@ -578,7 +653,7 @@ old_models_describe = {
|
||||
"default": None,
|
||||
"description": None,
|
||||
"docstring": None,
|
||||
"constraints": {"ge": 1, "le": 2147483647},
|
||||
"constraints": {"ge": MIN_INT, "le": 2147483647},
|
||||
"db_field_types": {"": "INT"},
|
||||
},
|
||||
"data_fields": [
|
||||
@@ -646,7 +721,12 @@ old_models_describe = {
|
||||
"description": "Is Active",
|
||||
"docstring": None,
|
||||
"constraints": {},
|
||||
"db_field_types": {"": "BOOL", "sqlite": "INT"},
|
||||
"db_field_types": {
|
||||
"": "BOOL",
|
||||
"mssql": "BIT",
|
||||
"oracle": "NUMBER(1)",
|
||||
"sqlite": "INT",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "is_superuser",
|
||||
@@ -661,7 +741,12 @@ old_models_describe = {
|
||||
"description": "Is SuperUser",
|
||||
"docstring": None,
|
||||
"constraints": {},
|
||||
"db_field_types": {"": "BOOL", "sqlite": "INT"},
|
||||
"db_field_types": {
|
||||
"": "BOOL",
|
||||
"mssql": "BIT",
|
||||
"oracle": "NUMBER(1)",
|
||||
"sqlite": "INT",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "avatar",
|
||||
@@ -763,7 +848,7 @@ old_models_describe = {
|
||||
"default": None,
|
||||
"description": None,
|
||||
"docstring": None,
|
||||
"constraints": {"ge": 1, "le": 2147483647},
|
||||
"constraints": {"ge": MIN_INT, "le": 2147483647},
|
||||
"db_field_types": {"": "INT"},
|
||||
},
|
||||
"data_fields": [
|
||||
@@ -822,35 +907,30 @@ old_models_describe = {
|
||||
}
|
||||
|
||||
|
||||
def should_add_user_id_column_type_alter_sql() -> bool:
|
||||
if tortoise.__version__ < "0.21":
|
||||
return False
|
||||
# tortoise-orm>=0.21 changes IntField constraints
|
||||
# from {"ge": 1, "le": 2147483647} to {"ge": -2147483648,"le": 2147483647}
|
||||
data_fields = cast(List[dict], old_models_describe["models.Category"]["data_fields"])
|
||||
user_id_constraints = data_fields[-1]["constraints"]
|
||||
return tortoise.fields.data.IntField.constraints != user_id_constraints
|
||||
|
||||
|
||||
def test_migrate(mocker: MockerFixture):
|
||||
"""
|
||||
models.py diff with old_models.py
|
||||
- change email pk: id -> email_id
|
||||
- add field: Email.address
|
||||
- add fk: Config.user
|
||||
- drop fk: Email.user
|
||||
- add fk field: Config.user
|
||||
- drop fk field: Email.user
|
||||
- drop field: User.avatar
|
||||
- add index: Email.email
|
||||
- add many to many: Email.users
|
||||
- add one to one: Email.config
|
||||
- remove unique: Category.title
|
||||
- add unique: User.username
|
||||
- change column: length User.password
|
||||
- add unique_together: (name,type) of Product
|
||||
- add one more many to many field: Product.users
|
||||
- drop unique field: Config.name
|
||||
- alter default: Config.status
|
||||
- rename column: Product.image -> Product.pic
|
||||
- rename column: Product.is_review -> Product.is_reviewed
|
||||
- rename column: Product.is_delete -> Product.is_deleted
|
||||
- rename fk column: Category.user -> Category.owner
|
||||
"""
|
||||
mocker.patch("asyncclick.prompt", side_effect=(True,))
|
||||
mocker.patch("asyncclick.prompt", side_effect=(True, True, True, True))
|
||||
|
||||
models_describe = get_models_describe("models")
|
||||
Migrate.app = "models"
|
||||
@@ -870,6 +950,9 @@ def test_migrate(mocker: MockerFixture):
|
||||
"ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)",
|
||||
"ALTER TABLE `category` MODIFY COLUMN `slug` VARCHAR(100) NOT NULL",
|
||||
"ALTER TABLE `category` DROP INDEX `title`",
|
||||
"ALTER TABLE `category` RENAME COLUMN `user_id` TO `owner_id`",
|
||||
"ALTER TABLE `category` ADD CONSTRAINT `fk_category_user_110d4c63` FOREIGN KEY (`owner_id`) REFERENCES `user` (`id`) ON DELETE CASCADE",
|
||||
"ALTER TABLE `email` DROP COLUMN `user_id`",
|
||||
"ALTER TABLE `config` DROP COLUMN `name`",
|
||||
"ALTER TABLE `config` DROP INDEX `name`",
|
||||
"ALTER TABLE `config` ADD `user_id` INT NOT NULL COMMENT 'User'",
|
||||
@@ -877,7 +960,8 @@ def test_migrate(mocker: MockerFixture):
|
||||
"ALTER TABLE `config` ALTER COLUMN `status` DROP DEFAULT",
|
||||
"ALTER TABLE `config` MODIFY COLUMN `value` JSON NOT NULL",
|
||||
"ALTER TABLE `email` ADD `address` VARCHAR(200) NOT NULL",
|
||||
"ALTER TABLE `email` DROP COLUMN `user_id`",
|
||||
"ALTER TABLE `email` ADD CONSTRAINT `fk_email_config_76a9dc71` FOREIGN KEY (`config_id`) REFERENCES `config` (`id`) ON DELETE CASCADE",
|
||||
"ALTER TABLE `email` ADD `config_id` INT NOT NULL UNIQUE",
|
||||
"ALTER TABLE `configs` RENAME TO `config`",
|
||||
"ALTER TABLE `product` DROP COLUMN `uuid`",
|
||||
"ALTER TABLE `product` DROP INDEX `uuid`",
|
||||
@@ -888,32 +972,37 @@ def test_migrate(mocker: MockerFixture):
|
||||
"ALTER TABLE `product` ADD UNIQUE INDEX `uid_product_name_869427` (`name`, `type_db_alias`)",
|
||||
"ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0",
|
||||
"ALTER TABLE `product` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
|
||||
"ALTER TABLE `product` MODIFY COLUMN `is_reviewed` BOOL NOT NULL COMMENT 'Is Reviewed'",
|
||||
"ALTER TABLE `product` RENAME COLUMN `is_delete` TO `is_deleted`",
|
||||
"ALTER TABLE `product` RENAME COLUMN `is_review` TO `is_reviewed`",
|
||||
"ALTER TABLE `user` DROP COLUMN `avatar`",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(100) NOT NULL",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `intro` LONGTEXT NOT NULL",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `last_login` DATETIME(6) NOT NULL COMMENT 'Last Login'",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `is_superuser` BOOL NOT NULL COMMENT 'Is SuperUser' DEFAULT 0",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `longitude` DECIMAL(10,8) NOT NULL",
|
||||
"ALTER TABLE `user` ADD UNIQUE INDEX `username` (`username`)",
|
||||
"CREATE TABLE `email_user` (\n `email_id` INT NOT NULL REFERENCES `email` (`email_id`) ON DELETE CASCADE,\n `user_id` INT NOT NULL REFERENCES `user` (`id`) ON DELETE CASCADE\n) CHARACTER SET utf8mb4",
|
||||
"CREATE TABLE IF NOT EXISTS `newmodel` (\n `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,\n `name` VARCHAR(50) NOT NULL\n) CHARACTER SET utf8mb4",
|
||||
"ALTER TABLE `category` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
|
||||
"ALTER TABLE `product` MODIFY COLUMN `body` LONGTEXT NOT NULL",
|
||||
"ALTER TABLE `email` MODIFY COLUMN `is_primary` BOOL NOT NULL DEFAULT 0",
|
||||
"CREATE TABLE `product_user` (\n `product_id` INT NOT NULL REFERENCES `product` (`id`) ON DELETE CASCADE,\n `user_id` INT NOT NULL REFERENCES `user` (`id`) ON DELETE CASCADE\n) CHARACTER SET utf8mb4",
|
||||
"CREATE TABLE `config_category_map` (\n `category_id` INT NOT NULL REFERENCES `category` (`id`) ON DELETE CASCADE,\n `config_id` INT NOT NULL REFERENCES `config` (`id`) ON DELETE CASCADE\n) CHARACTER SET utf8mb4",
|
||||
"DROP TABLE IF EXISTS `config_category`",
|
||||
}
|
||||
expected_downgrade_operators = {
|
||||
"ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200) NOT NULL",
|
||||
"ALTER TABLE `category` MODIFY COLUMN `slug` VARCHAR(200) NOT NULL",
|
||||
"ALTER TABLE `category` ADD UNIQUE INDEX `title` (`title`)",
|
||||
"ALTER TABLE `category` RENAME COLUMN `owner_id` TO `user_id`",
|
||||
"ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_110d4c63`",
|
||||
"ALTER TABLE `config` ADD `name` VARCHAR(100) NOT NULL UNIQUE",
|
||||
"ALTER TABLE `config` ADD UNIQUE INDEX `name` (`name`)",
|
||||
"ALTER TABLE `config` DROP COLUMN `user_id`",
|
||||
"ALTER TABLE `config` DROP FOREIGN KEY `fk_config_user_17daa970`",
|
||||
"ALTER TABLE `config` ALTER COLUMN `status` SET DEFAULT 1",
|
||||
"ALTER TABLE `email` ADD `user_id` INT NOT NULL",
|
||||
"ALTER TABLE `config` DROP COLUMN `user_id`",
|
||||
"ALTER TABLE `email` DROP COLUMN `address`",
|
||||
"ALTER TABLE `email` DROP COLUMN `config_id`",
|
||||
"ALTER TABLE `email` DROP FOREIGN KEY `fk_email_config_76a9dc71`",
|
||||
"ALTER TABLE `config` RENAME TO `configs`",
|
||||
"ALTER TABLE `product` RENAME COLUMN `pic` TO `image`",
|
||||
"ALTER TABLE `email` RENAME COLUMN `email_id` TO `id`",
|
||||
@@ -923,27 +1012,24 @@ def test_migrate(mocker: MockerFixture):
|
||||
"ALTER TABLE `email` DROP INDEX `idx_email_email_4a1a33`",
|
||||
"ALTER TABLE `product` DROP INDEX `uid_product_name_869427`",
|
||||
"ALTER TABLE `product` ALTER COLUMN `view_num` DROP DEFAULT",
|
||||
"ALTER TABLE `product` RENAME COLUMN `is_deleted` TO `is_delete`",
|
||||
"ALTER TABLE `product` RENAME COLUMN `is_reviewed` TO `is_review`",
|
||||
"ALTER TABLE `user` ADD `avatar` VARCHAR(200) NOT NULL DEFAULT ''",
|
||||
"ALTER TABLE `user` DROP INDEX `username`",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(200) NOT NULL",
|
||||
"DROP TABLE IF EXISTS `email_user`",
|
||||
"DROP TABLE IF EXISTS `newmodel`",
|
||||
"DROP TABLE IF EXISTS `product_user`",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `intro` LONGTEXT NOT NULL",
|
||||
"ALTER TABLE `config` MODIFY COLUMN `value` TEXT NOT NULL",
|
||||
"ALTER TABLE `category` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
|
||||
"ALTER TABLE `product` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
|
||||
"ALTER TABLE `product` MODIFY COLUMN `is_reviewed` BOOL NOT NULL COMMENT 'Is Reviewed'",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `last_login` DATETIME(6) NOT NULL COMMENT 'Last Login'",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `is_superuser` BOOL NOT NULL COMMENT 'Is SuperUser' DEFAULT 0",
|
||||
"ALTER TABLE `user` MODIFY COLUMN `longitude` DECIMAL(12,9) NOT NULL",
|
||||
"ALTER TABLE `product` MODIFY COLUMN `body` LONGTEXT NOT NULL",
|
||||
"ALTER TABLE `email` MODIFY COLUMN `is_primary` BOOL NOT NULL DEFAULT 0",
|
||||
"CREATE TABLE `config_category` (\n `config_id` INT NOT NULL REFERENCES `config` (`id`) ON DELETE CASCADE,\n `category_id` INT NOT NULL REFERENCES `category` (`id`) ON DELETE CASCADE\n) CHARACTER SET utf8mb4",
|
||||
"DROP TABLE IF EXISTS `config_category_map`",
|
||||
}
|
||||
if should_add_user_id_column_type_alter_sql():
|
||||
sql = "ALTER TABLE `category` MODIFY COLUMN `user_id` INT NOT NULL COMMENT 'User'"
|
||||
expected_upgrade_operators.add(sql)
|
||||
expected_downgrade_operators.add(sql)
|
||||
assert not set(Migrate.upgrade_operators).symmetric_difference(expected_upgrade_operators)
|
||||
|
||||
assert not set(Migrate.downgrade_operators).symmetric_difference(
|
||||
@@ -952,34 +1038,36 @@ def test_migrate(mocker: MockerFixture):
|
||||
|
||||
elif isinstance(Migrate.ddl, PostgresDDL):
|
||||
expected_upgrade_operators = {
|
||||
'DROP INDEX "uid_category_title_f7fc03"',
|
||||
'DROP INDEX IF EXISTS "uid_category_title_f7fc03"',
|
||||
'ALTER TABLE "category" ALTER COLUMN "name" DROP NOT NULL',
|
||||
'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(100) USING "slug"::VARCHAR(100)',
|
||||
'ALTER TABLE "category" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
|
||||
'ALTER TABLE "category" RENAME COLUMN "user_id" TO "owner_id"',
|
||||
'ALTER TABLE "category" ADD CONSTRAINT "fk_category_user_110d4c63" FOREIGN KEY ("owner_id") REFERENCES "user" ("id") ON DELETE CASCADE',
|
||||
'ALTER TABLE "config" DROP COLUMN "name"',
|
||||
'DROP INDEX "uid_config_name_2c83c8"',
|
||||
'DROP INDEX IF EXISTS "uid_config_name_2c83c8"',
|
||||
'ALTER TABLE "config" ADD "user_id" INT NOT NULL',
|
||||
'ALTER TABLE "config" ADD CONSTRAINT "fk_config_user_17daa970" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE',
|
||||
'ALTER TABLE "config" ALTER COLUMN "status" DROP DEFAULT',
|
||||
'ALTER TABLE "config" ALTER COLUMN "value" TYPE JSONB USING "value"::JSONB',
|
||||
'ALTER TABLE "configs" RENAME TO "config"',
|
||||
'ALTER TABLE "email" ADD "address" VARCHAR(200) NOT NULL',
|
||||
'ALTER TABLE "email" DROP COLUMN "user_id"',
|
||||
'ALTER TABLE "email" RENAME COLUMN "id" TO "email_id"',
|
||||
'ALTER TABLE "email" ALTER COLUMN "is_primary" TYPE BOOL USING "is_primary"::BOOL',
|
||||
'DROP INDEX "uid_product_uuid_d33c18"',
|
||||
'ALTER TABLE "email" DROP COLUMN "user_id"',
|
||||
'ALTER TABLE "email" ADD CONSTRAINT "fk_email_config_76a9dc71" FOREIGN KEY ("config_id") REFERENCES "config" ("id") ON DELETE CASCADE',
|
||||
'ALTER TABLE "email" ADD "config_id" INT NOT NULL UNIQUE',
|
||||
'DROP INDEX IF EXISTS "uid_product_uuid_d33c18"',
|
||||
'ALTER TABLE "product" DROP COLUMN "uuid"',
|
||||
'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0',
|
||||
'ALTER TABLE "product" RENAME COLUMN "image" TO "pic"',
|
||||
'ALTER TABLE "product" ALTER COLUMN "is_reviewed" TYPE BOOL USING "is_reviewed"::BOOL',
|
||||
'ALTER TABLE "product" ALTER COLUMN "body" TYPE TEXT USING "body"::TEXT',
|
||||
'ALTER TABLE "product" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
|
||||
'ALTER TABLE "product" RENAME COLUMN "is_review" TO "is_reviewed"',
|
||||
'ALTER TABLE "product" RENAME COLUMN "is_delete" TO "is_deleted"',
|
||||
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(100) USING "password"::VARCHAR(100)',
|
||||
'ALTER TABLE "user" DROP COLUMN "avatar"',
|
||||
'ALTER TABLE "user" ALTER COLUMN "is_superuser" TYPE BOOL USING "is_superuser"::BOOL',
|
||||
'ALTER TABLE "user" ALTER COLUMN "last_login" TYPE TIMESTAMPTZ USING "last_login"::TIMESTAMPTZ',
|
||||
'ALTER TABLE "user" ALTER COLUMN "intro" TYPE TEXT USING "intro"::TEXT',
|
||||
'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL',
|
||||
'ALTER TABLE "user" ALTER COLUMN "longitude" TYPE DECIMAL(10,8) USING "longitude"::DECIMAL(10,8)',
|
||||
'CREATE INDEX "idx_product_name_869427" ON "product" ("name", "type_db_alias")',
|
||||
'CREATE INDEX "idx_email_email_4a1a33" ON "email" ("email")',
|
||||
@@ -987,48 +1075,52 @@ def test_migrate(mocker: MockerFixture):
|
||||
'CREATE TABLE IF NOT EXISTS "newmodel" (\n "id" SERIAL NOT NULL PRIMARY KEY,\n "name" VARCHAR(50) NOT NULL\n);\nCOMMENT ON COLUMN "config"."user_id" IS \'User\'',
|
||||
'CREATE UNIQUE INDEX "uid_product_name_869427" ON "product" ("name", "type_db_alias")',
|
||||
'CREATE UNIQUE INDEX "uid_user_usernam_9987ab" ON "user" ("username")',
|
||||
'CREATE TABLE "product_user" (\n "product_id" INT NOT NULL REFERENCES "product" ("id") ON DELETE CASCADE,\n "user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE\n)',
|
||||
'CREATE TABLE "config_category_map" (\n "category_id" INT NOT NULL REFERENCES "category" ("id") ON DELETE CASCADE,\n "config_id" INT NOT NULL REFERENCES "config" ("id") ON DELETE CASCADE\n)',
|
||||
'DROP TABLE IF EXISTS "config_category"',
|
||||
}
|
||||
expected_downgrade_operators = {
|
||||
'CREATE UNIQUE INDEX "uid_category_title_f7fc03" ON "category" ("title")',
|
||||
'ALTER TABLE "category" ALTER COLUMN "name" SET NOT NULL',
|
||||
'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(200) USING "slug"::VARCHAR(200)',
|
||||
'ALTER TABLE "category" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
|
||||
'ALTER TABLE "category" RENAME COLUMN "owner_id" TO "user_id"',
|
||||
'ALTER TABLE "category" DROP CONSTRAINT IF EXISTS "fk_category_user_110d4c63"',
|
||||
'ALTER TABLE "config" ADD "name" VARCHAR(100) NOT NULL UNIQUE',
|
||||
'CREATE UNIQUE INDEX "uid_config_name_2c83c8" ON "config" ("name")',
|
||||
'ALTER TABLE "config" ALTER COLUMN "status" SET DEFAULT 1',
|
||||
'ALTER TABLE "config" DROP COLUMN "user_id"',
|
||||
'ALTER TABLE "config" DROP CONSTRAINT "fk_config_user_17daa970"',
|
||||
'ALTER TABLE "config" DROP CONSTRAINT IF EXISTS "fk_config_user_17daa970"',
|
||||
'ALTER TABLE "config" RENAME TO "configs"',
|
||||
'ALTER TABLE "config" ALTER COLUMN "value" TYPE JSONB USING "value"::JSONB',
|
||||
'ALTER TABLE "config" DROP COLUMN "user_id"',
|
||||
'ALTER TABLE "email" ADD "user_id" INT NOT NULL',
|
||||
'ALTER TABLE "email" DROP COLUMN "address"',
|
||||
'ALTER TABLE "email" RENAME COLUMN "email_id" TO "id"',
|
||||
'ALTER TABLE "email" ALTER COLUMN "is_primary" TYPE BOOL USING "is_primary"::BOOL',
|
||||
'ALTER TABLE "email" DROP COLUMN "config_id"',
|
||||
'ALTER TABLE "email" DROP CONSTRAINT IF EXISTS "fk_email_config_76a9dc71"',
|
||||
'ALTER TABLE "product" ADD "uuid" INT NOT NULL UNIQUE',
|
||||
'CREATE UNIQUE INDEX "uid_product_uuid_d33c18" ON "product" ("uuid")',
|
||||
'ALTER TABLE "product" ALTER COLUMN "view_num" DROP DEFAULT',
|
||||
'ALTER TABLE "product" RENAME COLUMN "pic" TO "image"',
|
||||
'ALTER TABLE "product" RENAME COLUMN "is_deleted" TO "is_delete"',
|
||||
'ALTER TABLE "product" RENAME COLUMN "is_reviewed" TO "is_review"',
|
||||
'ALTER TABLE "user" ADD "avatar" VARCHAR(200) NOT NULL DEFAULT \'\'',
|
||||
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(200) USING "password"::VARCHAR(200)',
|
||||
'ALTER TABLE "user" ALTER COLUMN "last_login" TYPE TIMESTAMPTZ USING "last_login"::TIMESTAMPTZ',
|
||||
'ALTER TABLE "user" ALTER COLUMN "is_superuser" TYPE BOOL USING "is_superuser"::BOOL',
|
||||
'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL',
|
||||
'ALTER TABLE "user" ALTER COLUMN "intro" TYPE TEXT USING "intro"::TEXT',
|
||||
'ALTER TABLE "user" ALTER COLUMN "longitude" TYPE DECIMAL(12,9) USING "longitude"::DECIMAL(12,9)',
|
||||
'ALTER TABLE "product" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
|
||||
'ALTER TABLE "product" ALTER COLUMN "is_reviewed" TYPE BOOL USING "is_reviewed"::BOOL',
|
||||
'ALTER TABLE "product" ALTER COLUMN "body" TYPE TEXT USING "body"::TEXT',
|
||||
'DROP INDEX "idx_product_name_869427"',
|
||||
'DROP INDEX "idx_email_email_4a1a33"',
|
||||
'DROP INDEX "uid_user_usernam_9987ab"',
|
||||
'DROP INDEX "uid_product_name_869427"',
|
||||
'DROP TABLE IF EXISTS "product_user"',
|
||||
'DROP INDEX IF EXISTS "idx_product_name_869427"',
|
||||
'DROP INDEX IF EXISTS "idx_email_email_4a1a33"',
|
||||
'DROP INDEX IF EXISTS "uid_user_usernam_9987ab"',
|
||||
'DROP INDEX IF EXISTS "uid_product_name_869427"',
|
||||
'DROP TABLE IF EXISTS "email_user"',
|
||||
'DROP TABLE IF EXISTS "newmodel"',
|
||||
'CREATE TABLE "config_category" (\n "config_id" INT NOT NULL REFERENCES "config" ("id") ON DELETE CASCADE,\n "category_id" INT NOT NULL REFERENCES "category" ("id") ON DELETE CASCADE\n)',
|
||||
'DROP TABLE IF EXISTS "config_category_map"',
|
||||
}
|
||||
if should_add_user_id_column_type_alter_sql():
|
||||
sql = 'ALTER TABLE "category" ALTER COLUMN "user_id" TYPE INT USING "user_id"::INT'
|
||||
expected_upgrade_operators.add(sql)
|
||||
expected_downgrade_operators.add(sql)
|
||||
assert not set(Migrate.upgrade_operators).symmetric_difference(expected_upgrade_operators)
|
||||
assert not set(Migrate.downgrade_operators).symmetric_difference(
|
||||
expected_downgrade_operators
|
||||
@@ -1060,6 +1152,30 @@ def test_sort_all_version_files(mocker):
|
||||
]
|
||||
|
||||
|
||||
def test_sort_files_containing_non_migrations(mocker):
|
||||
mocker.patch(
|
||||
"os.listdir",
|
||||
return_value=[
|
||||
"1_datetime_update.py",
|
||||
"11_datetime_update.py",
|
||||
"10_datetime_update.py",
|
||||
"2_datetime_update.py",
|
||||
"not_a_migration.py",
|
||||
"999.py",
|
||||
"123foo_not_a_migration.py",
|
||||
],
|
||||
)
|
||||
|
||||
Migrate.migrate_location = "."
|
||||
|
||||
assert Migrate.get_all_version_files() == [
|
||||
"1_datetime_update.py",
|
||||
"2_datetime_update.py",
|
||||
"10_datetime_update.py",
|
||||
"11_datetime_update.py",
|
||||
]
|
||||
|
||||
|
||||
async def test_empty_migration(mocker, tmp_path: Path) -> None:
|
||||
mocker.patch("os.listdir", return_value=[])
|
||||
Migrate.app = "foo"
|
||||
|
304
tests/test_sqlite_migrate.py
Normal file
304
tests/test_sqlite_migrate.py
Normal file
@@ -0,0 +1,304 @@
|
||||
import contextlib
|
||||
import os
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from aerich.ddl.sqlite import SqliteDDL
|
||||
from aerich.migrate import Migrate
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
from contextlib import chdir
|
||||
else:
|
||||
|
||||
class chdir(contextlib.AbstractContextManager): # Copied from source code of Python3.13
|
||||
"""Non thread-safe context manager to change the current working directory."""
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
self._old_cwd = []
|
||||
|
||||
def __enter__(self):
|
||||
self._old_cwd.append(os.getcwd())
|
||||
os.chdir(self.path)
|
||||
|
||||
def __exit__(self, *excinfo):
|
||||
os.chdir(self._old_cwd.pop())
|
||||
|
||||
|
||||
MODELS = """from __future__ import annotations
|
||||
|
||||
from tortoise import Model, fields
|
||||
|
||||
|
||||
class Foo(Model):
|
||||
name = fields.CharField(max_length=60, db_index=False)
|
||||
"""
|
||||
|
||||
SETTINGS = """from __future__ import annotations
|
||||
|
||||
TORTOISE_ORM = {
|
||||
"connections": {"default": "sqlite://db.sqlite3"},
|
||||
"apps": {"models": {"models": ["models", "aerich.models"]}},
|
||||
}
|
||||
"""
|
||||
|
||||
CONFTEST = """from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Generator
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
from tortoise import Tortoise, connections
|
||||
|
||||
import settings
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def event_loop() -> Generator:
|
||||
policy = asyncio.get_event_loop_policy()
|
||||
res = policy.new_event_loop()
|
||||
asyncio.set_event_loop(res)
|
||||
res._close = res.close # type:ignore[attr-defined]
|
||||
res.close = lambda: None # type:ignore[method-assign]
|
||||
|
||||
yield res
|
||||
|
||||
res._close() # type:ignore[attr-defined]
|
||||
|
||||
|
||||
@pytest_asyncio.fixture(scope="session", autouse=True)
|
||||
async def api(event_loop, request):
|
||||
await Tortoise.init(config=settings.TORTOISE_ORM)
|
||||
request.addfinalizer(lambda: event_loop.run_until_complete(connections.close_all(discard=True)))
|
||||
"""
|
||||
|
||||
TESTS = """from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
from tortoise.exceptions import IntegrityError
|
||||
|
||||
from models import Foo
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_allow_duplicate() -> None:
|
||||
await Foo.all().delete()
|
||||
await Foo.create(name="foo")
|
||||
obj = await Foo.create(name="foo")
|
||||
assert (await Foo.all().count()) == 2
|
||||
await obj.delete()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_unique_is_true() -> None:
|
||||
with pytest.raises(IntegrityError):
|
||||
await Foo.create(name="foo")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_unique_field() -> None:
|
||||
if not await Foo.filter(age=0).exists():
|
||||
await Foo.create(name="0_"+uuid.uuid4().hex, age=0)
|
||||
with pytest.raises(IntegrityError):
|
||||
await Foo.create(name=uuid.uuid4().hex, age=0)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_drop_unique_field() -> None:
|
||||
name = "1_" + uuid.uuid4().hex
|
||||
await Foo.create(name=name, age=0)
|
||||
assert (await Foo.filter(name=name).exists())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_with_age_field() -> None:
|
||||
name = "2_" + uuid.uuid4().hex
|
||||
await Foo.create(name=name, age=0)
|
||||
obj = await Foo.get(name=name)
|
||||
assert obj.age == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_without_age_field() -> None:
|
||||
name = "3_" + uuid.uuid4().hex
|
||||
await Foo.create(name=name, age=0)
|
||||
obj = await Foo.get(name=name)
|
||||
assert getattr(obj, "age", None) is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_m2m_with_custom_through() -> None:
|
||||
from models import Group, FooGroup
|
||||
name = "4_" + uuid.uuid4().hex
|
||||
foo = await Foo.create(name=name)
|
||||
group = await Group.create(name=name+"1")
|
||||
await FooGroup.all().delete()
|
||||
await foo.groups.add(group)
|
||||
foo_group = await FooGroup.get(foo=foo, group=group)
|
||||
assert not foo_group.is_active
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_m2m_field_after_init_db() -> None:
|
||||
from models import Group
|
||||
name = "5_" + uuid.uuid4().hex
|
||||
foo = await Foo.create(name=name)
|
||||
group = await Group.create(name=name+"1")
|
||||
await foo.groups.add(group)
|
||||
assert (await group.users.all().first()) == foo
|
||||
"""
|
||||
|
||||
|
||||
def run_aerich(cmd: str) -> None:
|
||||
with contextlib.suppress(subprocess.TimeoutExpired):
|
||||
if not cmd.startswith("aerich"):
|
||||
cmd = "aerich " + cmd
|
||||
subprocess.run(shlex.split(cmd), timeout=2)
|
||||
|
||||
|
||||
def run_shell(cmd: str) -> subprocess.CompletedProcess:
|
||||
envs = dict(os.environ, PYTHONPATH=".")
|
||||
return subprocess.run(shlex.split(cmd), env=envs)
|
||||
|
||||
|
||||
def test_sqlite_migrate(tmp_path: Path) -> None:
|
||||
if (ddl := getattr(Migrate, "ddl", None)) and not isinstance(ddl, SqliteDDL):
|
||||
return
|
||||
with chdir(tmp_path):
|
||||
models_py = Path("models.py")
|
||||
settings_py = Path("settings.py")
|
||||
test_py = Path("_test.py")
|
||||
models_py.write_text(MODELS)
|
||||
settings_py.write_text(SETTINGS)
|
||||
test_py.write_text(TESTS)
|
||||
Path("conftest.py").write_text(CONFTEST)
|
||||
if (db_file := Path("db.sqlite3")).exists():
|
||||
db_file.unlink()
|
||||
run_aerich("aerich init -t settings.TORTOISE_ORM")
|
||||
run_aerich("aerich init-db")
|
||||
r = run_shell("pytest _test.py::test_allow_duplicate")
|
||||
assert r.returncode == 0
|
||||
# Add index
|
||||
models_py.write_text(MODELS.replace("index=False", "index=True"))
|
||||
run_aerich("aerich migrate") # migrations/models/1_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest -s _test.py::test_allow_duplicate")
|
||||
assert r.returncode == 0
|
||||
# Drop index
|
||||
models_py.write_text(MODELS)
|
||||
run_aerich("aerich migrate") # migrations/models/2_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest -s _test.py::test_allow_duplicate")
|
||||
assert r.returncode == 0
|
||||
# Add unique index
|
||||
models_py.write_text(MODELS.replace("index=False", "index=True, unique=True"))
|
||||
run_aerich("aerich migrate") # migrations/models/3_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest _test.py::test_unique_is_true")
|
||||
assert r.returncode == 0
|
||||
# Drop unique index
|
||||
models_py.write_text(MODELS)
|
||||
run_aerich("aerich migrate") # migrations/models/4_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest _test.py::test_allow_duplicate")
|
||||
assert r.returncode == 0
|
||||
# Add field with unique=True
|
||||
with models_py.open("a") as f:
|
||||
f.write(" age = fields.IntField(unique=True, default=0)")
|
||||
run_aerich("aerich migrate") # migrations/models/5_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest _test.py::test_add_unique_field")
|
||||
assert r.returncode == 0
|
||||
# Drop unique field
|
||||
models_py.write_text(MODELS)
|
||||
run_aerich("aerich migrate") # migrations/models/6_
|
||||
run_aerich("aerich upgrade")
|
||||
r = run_shell("pytest -s _test.py::test_drop_unique_field")
|
||||
assert r.returncode == 0
|
||||
|
||||
# Initial with indexed field and then drop it
|
||||
migrations_dir = Path("migrations/models")
|
||||
shutil.rmtree(migrations_dir)
|
||||
db_file.unlink()
|
||||
models_py.write_text(MODELS + " age = fields.IntField(db_index=True)")
|
||||
run_aerich("aerich init -t settings.TORTOISE_ORM")
|
||||
run_aerich("aerich init-db")
|
||||
migration_file = list(migrations_dir.glob("0_*.py"))[0]
|
||||
assert "CREATE INDEX" in migration_file.read_text()
|
||||
r = run_shell("pytest _test.py::test_with_age_field")
|
||||
assert r.returncode == 0
|
||||
models_py.write_text(MODELS)
|
||||
run_aerich("aerich migrate")
|
||||
run_aerich("aerich upgrade")
|
||||
migration_file_1 = list(migrations_dir.glob("1_*.py"))[0]
|
||||
assert "DROP INDEX" in migration_file_1.read_text()
|
||||
r = run_shell("pytest _test.py::test_without_age_field")
|
||||
assert r.returncode == 0
|
||||
|
||||
# Generate migration file in emptry directory
|
||||
db_file.unlink()
|
||||
run_aerich("aerich init-db")
|
||||
assert not db_file.exists()
|
||||
for p in migrations_dir.glob("*"):
|
||||
if p.is_dir():
|
||||
shutil.rmtree(p)
|
||||
else:
|
||||
p.unlink()
|
||||
run_aerich("aerich init-db")
|
||||
assert db_file.exists()
|
||||
|
||||
# init without '[tool]' section in pyproject.toml
|
||||
config_file = Path("pyproject.toml")
|
||||
config_file.write_text('[project]\nname = "project"')
|
||||
run_aerich("init -t settings.TORTOISE_ORM")
|
||||
assert "[tool.aerich]" in config_file.read_text()
|
||||
|
||||
# add m2m with custom model for through
|
||||
new = """
|
||||
groups = fields.ManyToManyField("models.Group", through="foo_group")
|
||||
|
||||
class Group(Model):
|
||||
name = fields.CharField(max_length=60)
|
||||
|
||||
class FooGroup(Model):
|
||||
foo = fields.ForeignKeyField("models.Foo")
|
||||
group = fields.ForeignKeyField("models.Group")
|
||||
is_active = fields.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
table = "foo_group"
|
||||
"""
|
||||
models_py.write_text(MODELS + new)
|
||||
run_aerich("aerich migrate")
|
||||
run_aerich("aerich upgrade")
|
||||
migration_file_1 = list(migrations_dir.glob("1_*.py"))[0]
|
||||
assert "foo_group" in migration_file_1.read_text()
|
||||
r = run_shell("pytest _test.py::test_m2m_with_custom_through")
|
||||
assert r.returncode == 0
|
||||
|
||||
# add m2m field after init-db
|
||||
new = """
|
||||
groups = fields.ManyToManyField("models.Group", through="foo_group", related_name="users")
|
||||
|
||||
class Group(Model):
|
||||
name = fields.CharField(max_length=60)
|
||||
"""
|
||||
if db_file.exists():
|
||||
db_file.unlink()
|
||||
if migrations_dir.exists():
|
||||
shutil.rmtree(migrations_dir)
|
||||
models_py.write_text(MODELS)
|
||||
run_aerich("aerich init-db")
|
||||
models_py.write_text(MODELS + new)
|
||||
run_aerich("aerich migrate")
|
||||
run_aerich("aerich upgrade")
|
||||
migration_file_1 = list(migrations_dir.glob("1_*.py"))[0]
|
||||
assert "foo_group" in migration_file_1.read_text()
|
||||
r = run_shell("pytest _test.py::test_add_m2m_field_after_init_db")
|
||||
assert r.returncode == 0
|
@@ -1,6 +1,164 @@
|
||||
from aerich.utils import import_py_file
|
||||
from aerich.utils import get_dict_diff_by_key, import_py_file
|
||||
|
||||
|
||||
def test_import_py_file() -> None:
|
||||
m = import_py_file("aerich/utils.py")
|
||||
assert getattr(m, "import_py_file")
|
||||
assert getattr(m, "import_py_file", None)
|
||||
|
||||
|
||||
class TestDiffFields:
|
||||
def test_the_same_through_order(self) -> None:
|
||||
old = [
|
||||
{"name": "users", "through": "users_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "members", "through": "users_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert type(get_dict_diff_by_key(old, new)).__name__ == "generator"
|
||||
assert len(diffs) == 1
|
||||
assert diffs == [("change", [0, "name"], ("users", "members"))]
|
||||
|
||||
def test_same_through_with_different_orders(self) -> None:
|
||||
old = [
|
||||
{"name": "users", "through": "users_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
{"name": "members", "through": "users_group"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert len(diffs) == 1
|
||||
assert diffs == [("change", [0, "name"], ("users", "members"))]
|
||||
|
||||
def test_the_same_field_name_order(self) -> None:
|
||||
old = [
|
||||
{"name": "users", "through": "users_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "users", "through": "user_groups"},
|
||||
{"name": "admins", "through": "admin_groups"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert len(diffs) == 4
|
||||
assert diffs == [
|
||||
("remove", "", [(0, {"name": "users", "through": "users_group"})]),
|
||||
("remove", "", [(0, {"name": "admins", "through": "admins_group"})]),
|
||||
("add", "", [(0, {"name": "users", "through": "user_groups"})]),
|
||||
("add", "", [(0, {"name": "admins", "through": "admin_groups"})]),
|
||||
]
|
||||
|
||||
def test_same_field_name_with_different_orders(self) -> None:
|
||||
old = [
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
{"name": "users", "through": "users_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "users", "through": "user_groups"},
|
||||
{"name": "admins", "through": "admin_groups"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert len(diffs) == 4
|
||||
assert diffs == [
|
||||
("remove", "", [(0, {"name": "admins", "through": "admins_group"})]),
|
||||
("remove", "", [(0, {"name": "users", "through": "users_group"})]),
|
||||
("add", "", [(0, {"name": "users", "through": "user_groups"})]),
|
||||
("add", "", [(0, {"name": "admins", "through": "admin_groups"})]),
|
||||
]
|
||||
|
||||
def test_drop_one(self) -> None:
|
||||
old = [
|
||||
{"name": "users", "through": "users_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert len(diffs) == 1
|
||||
assert diffs == [("remove", "", [(0, {"name": "users", "through": "users_group"})])]
|
||||
|
||||
def test_add_one(self) -> None:
|
||||
old = [
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "users", "through": "users_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert len(diffs) == 1
|
||||
assert diffs == [("add", "", [(0, {"name": "users", "through": "users_group"})])]
|
||||
|
||||
def test_drop_some(self) -> None:
|
||||
old = [
|
||||
{"name": "users", "through": "users_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
{"name": "staffs", "through": "staffs_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert len(diffs) == 2
|
||||
assert diffs == [
|
||||
("remove", "", [(0, {"name": "users", "through": "users_group"})]),
|
||||
("remove", "", [(0, {"name": "staffs", "through": "staffs_group"})]),
|
||||
]
|
||||
|
||||
def test_add_some(self) -> None:
|
||||
old = [
|
||||
{"name": "staffs", "through": "staffs_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "users", "through": "users_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
{"name": "staffs", "through": "staffs_group"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert len(diffs) == 2
|
||||
assert diffs == [
|
||||
("add", "", [(0, {"name": "users", "through": "users_group"})]),
|
||||
("add", "", [(0, {"name": "admins", "through": "admins_group"})]),
|
||||
]
|
||||
|
||||
def test_some_through_unchanged(self) -> None:
|
||||
old = [
|
||||
{"name": "staffs", "through": "staffs_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "users", "through": "users_group"},
|
||||
{"name": "admins_new", "through": "admins_group"},
|
||||
{"name": "staffs_new", "through": "staffs_group"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert len(diffs) == 3
|
||||
assert diffs == [
|
||||
("change", [0, "name"], ("staffs", "staffs_new")),
|
||||
("change", [0, "name"], ("admins", "admins_new")),
|
||||
("add", "", [(0, {"name": "users", "through": "users_group"})]),
|
||||
]
|
||||
|
||||
def test_some_unchanged_without_drop_or_add(self) -> None:
|
||||
old = [
|
||||
{"name": "staffs", "through": "staffs_group"},
|
||||
{"name": "admins", "through": "admins_group"},
|
||||
{"name": "users", "through": "users_group"},
|
||||
]
|
||||
new = [
|
||||
{"name": "users_new", "through": "users_group"},
|
||||
{"name": "admins_new", "through": "admins_group"},
|
||||
{"name": "staffs_new", "through": "staffs_group"},
|
||||
]
|
||||
diffs = list(get_dict_diff_by_key(old, new))
|
||||
assert len(diffs) == 3
|
||||
assert diffs == [
|
||||
("change", [0, "name"], ("staffs", "staffs_new")),
|
||||
("change", [0, "name"], ("admins", "admins_new")),
|
||||
("change", [0, "name"], ("users", "users_new")),
|
||||
]
|
||||
|
Reference in New Issue
Block a user