chore: upgrade deps and fix ruff lint issues (#374)
* chore: upgrade deps and apply ruff lint for tests/ * style: fix ruff lint issues
This commit is contained in:
parent
252cb97767
commit
44025823ee
@ -42,7 +42,7 @@ class Command:
|
|||||||
async def _upgrade(self, conn, version_file) -> None:
|
async def _upgrade(self, conn, version_file) -> None:
|
||||||
file_path = Path(Migrate.migrate_location, version_file)
|
file_path = Path(Migrate.migrate_location, version_file)
|
||||||
m = import_py_file(file_path)
|
m = import_py_file(file_path)
|
||||||
upgrade = getattr(m, "upgrade")
|
upgrade = m.upgrade
|
||||||
await conn.execute_script(await upgrade(conn))
|
await conn.execute_script(await upgrade(conn))
|
||||||
await Aerich.create(
|
await Aerich.create(
|
||||||
version=version_file,
|
version=version_file,
|
||||||
@ -89,7 +89,7 @@ class Command:
|
|||||||
) as conn:
|
) as conn:
|
||||||
file_path = Path(Migrate.migrate_location, file)
|
file_path = Path(Migrate.migrate_location, file)
|
||||||
m = import_py_file(file_path)
|
m = import_py_file(file_path)
|
||||||
downgrade = getattr(m, "downgrade")
|
downgrade = m.downgrade
|
||||||
downgrade_sql = await downgrade(conn)
|
downgrade_sql = await downgrade(conn)
|
||||||
if not downgrade_sql.strip():
|
if not downgrade_sql.strip():
|
||||||
raise DowngradeError("No downgrade items found")
|
raise DowngradeError("No downgrade items found")
|
||||||
|
@ -47,8 +47,10 @@ async def cli(ctx: Context, config, app) -> None:
|
|||||||
location = tool["location"]
|
location = tool["location"]
|
||||||
tortoise_orm = tool["tortoise_orm"]
|
tortoise_orm = tool["tortoise_orm"]
|
||||||
src_folder = tool.get("src_folder", CONFIG_DEFAULT_VALUES["src_folder"])
|
src_folder = tool.get("src_folder", CONFIG_DEFAULT_VALUES["src_folder"])
|
||||||
except NonExistentKey:
|
except NonExistentKey as e:
|
||||||
raise UsageError("You need run `aerich init` again when upgrading to aerich 0.6.0+.")
|
raise UsageError(
|
||||||
|
"You need run `aerich init` again when upgrading to aerich 0.6.0+."
|
||||||
|
) from e
|
||||||
add_src_path(src_folder)
|
add_src_path(src_folder)
|
||||||
tortoise_config = get_tortoise_config(ctx, tortoise_orm)
|
tortoise_config = get_tortoise_config(ctx, tortoise_orm)
|
||||||
if not app:
|
if not app:
|
||||||
@ -182,10 +184,7 @@ async def init(ctx: Context, tortoise_orm, location, src_folder) -> None:
|
|||||||
add_src_path(src_folder)
|
add_src_path(src_folder)
|
||||||
get_tortoise_config(ctx, tortoise_orm)
|
get_tortoise_config(ctx, tortoise_orm)
|
||||||
config_path = Path(config_file)
|
config_path = Path(config_file)
|
||||||
if config_path.exists():
|
content = config_path.read_bytes() if config_path.exists() else "[tool.aerich]"
|
||||||
content = config_path.read_text()
|
|
||||||
else:
|
|
||||||
content = "[tool.aerich]"
|
|
||||||
doc: dict = tomlkit.parse(content)
|
doc: dict = tomlkit.parse(content)
|
||||||
table = tomlkit.table()
|
table = tomlkit.table()
|
||||||
table["tortoise_orm"] = tortoise_orm
|
table["tortoise_orm"] = tortoise_orm
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import contextlib
|
||||||
from typing import Any, Callable, Dict, Optional, TypedDict
|
from typing import Any, Callable, Dict, Optional, TypedDict
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
@ -61,8 +62,8 @@ class Column(BaseModel):
|
|||||||
elif self.data_type == "bool":
|
elif self.data_type == "bool":
|
||||||
default = f"default={'True' if self.default == 'true' else 'False'}, "
|
default = f"default={'True' if self.default == 'true' else 'False'}, "
|
||||||
elif self.data_type in ("datetime", "timestamptz", "TIMESTAMP"):
|
elif self.data_type in ("datetime", "timestamptz", "TIMESTAMP"):
|
||||||
if "CURRENT_TIMESTAMP" == self.default:
|
if self.default == "CURRENT_TIMESTAMP":
|
||||||
if "DEFAULT_GENERATED on update CURRENT_TIMESTAMP" == self.extra:
|
if self.extra == "DEFAULT_GENERATED on update CURRENT_TIMESTAMP":
|
||||||
default = "auto_now=True, "
|
default = "auto_now=True, "
|
||||||
else:
|
else:
|
||||||
default = "auto_now_add=True, "
|
default = "auto_now_add=True, "
|
||||||
@ -94,10 +95,8 @@ class Inspect:
|
|||||||
|
|
||||||
def __init__(self, conn: BaseDBAsyncClient, tables: list[str] | None = None) -> None:
|
def __init__(self, conn: BaseDBAsyncClient, tables: list[str] | None = None) -> None:
|
||||||
self.conn = conn
|
self.conn = conn
|
||||||
try:
|
with contextlib.suppress(AttributeError):
|
||||||
self.database = conn.database # type:ignore[attr-defined]
|
self.database = conn.database # type:ignore[attr-defined]
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
self.tables = tables
|
self.tables = tables
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -40,16 +40,11 @@ where c.TABLE_SCHEMA = %s
|
|||||||
and c.TABLE_NAME = %s"""
|
and c.TABLE_NAME = %s"""
|
||||||
ret = await self.conn.execute_query_dict(sql, [self.database, table])
|
ret = await self.conn.execute_query_dict(sql, [self.database, table])
|
||||||
for row in ret:
|
for row in ret:
|
||||||
non_unique = row["NON_UNIQUE"]
|
unique = index = False
|
||||||
if non_unique is None:
|
if (non_unique := row["NON_UNIQUE"]) is not None:
|
||||||
unique = False
|
|
||||||
else:
|
|
||||||
unique = not non_unique
|
unique = not non_unique
|
||||||
index_name = row["INDEX_NAME"]
|
if (index_name := row["INDEX_NAME"]) is not None:
|
||||||
if index_name is None:
|
index = index_name != "PRIMARY"
|
||||||
index = False
|
|
||||||
else:
|
|
||||||
index = row["INDEX_NAME"] != "PRIMARY"
|
|
||||||
columns.append(
|
columns.append(
|
||||||
Column(
|
Column(
|
||||||
name=row["COLUMN_NAME"],
|
name=row["COLUMN_NAME"],
|
||||||
|
@ -271,7 +271,7 @@ class Migrate:
|
|||||||
# m2m fields
|
# m2m fields
|
||||||
old_m2m_fields = cast(List[dict], old_model_describe.get("m2m_fields"))
|
old_m2m_fields = cast(List[dict], old_model_describe.get("m2m_fields"))
|
||||||
new_m2m_fields = cast(List[dict], new_model_describe.get("m2m_fields"))
|
new_m2m_fields = cast(List[dict], new_model_describe.get("m2m_fields"))
|
||||||
for action, option, change in diff(old_m2m_fields, new_m2m_fields):
|
for action, _, change in diff(old_m2m_fields, new_m2m_fields):
|
||||||
if change[0][0] == "db_constraint":
|
if change[0][0] == "db_constraint":
|
||||||
continue
|
continue
|
||||||
new_value = change[0][1]
|
new_value = change[0][1]
|
||||||
@ -346,22 +346,14 @@ class Migrate:
|
|||||||
old_data_field_name = cast(str, old_data_field.get("name"))
|
old_data_field_name = cast(str, old_data_field.get("name"))
|
||||||
if len(changes) == 2:
|
if len(changes) == 2:
|
||||||
# rename field
|
# rename field
|
||||||
|
name_diff = (old_data_field_name, new_data_field_name)
|
||||||
|
column_diff = (
|
||||||
|
old_data_field.get("db_column"),
|
||||||
|
new_data_field.get("db_column"),
|
||||||
|
)
|
||||||
if (
|
if (
|
||||||
changes[0]
|
changes[0] == ("change", "name", name_diff)
|
||||||
== (
|
and changes[1] == ("change", "db_column", column_diff)
|
||||||
"change",
|
|
||||||
"name",
|
|
||||||
(old_data_field_name, new_data_field_name),
|
|
||||||
)
|
|
||||||
and changes[1]
|
|
||||||
== (
|
|
||||||
"change",
|
|
||||||
"db_column",
|
|
||||||
(
|
|
||||||
old_data_field.get("db_column"),
|
|
||||||
new_data_field.get("db_column"),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
and old_data_field_name not in new_data_fields_name
|
and old_data_field_name not in new_data_fields_name
|
||||||
):
|
):
|
||||||
if upgrade:
|
if upgrade:
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
|
import contextlib
|
||||||
import os
|
import os
|
||||||
from typing import Generator
|
from typing import Generator
|
||||||
|
|
||||||
@ -57,10 +58,8 @@ async def initialize_tests(event_loop, request) -> None:
|
|||||||
# Placing init outside the try block since it doesn't
|
# Placing init outside the try block since it doesn't
|
||||||
# establish connections to the DB eagerly.
|
# establish connections to the DB eagerly.
|
||||||
await Tortoise.init(config=tortoise_orm)
|
await Tortoise.init(config=tortoise_orm)
|
||||||
try:
|
with contextlib.suppress(DBConnectionError, OperationalError):
|
||||||
await Tortoise._drop_databases()
|
await Tortoise._drop_databases()
|
||||||
except (DBConnectionError, OperationalError):
|
|
||||||
pass
|
|
||||||
await Tortoise.init(config=tortoise_orm, _create_db=True)
|
await Tortoise.init(config=tortoise_orm, _create_db=True)
|
||||||
await generate_schema_for_client(Tortoise.get_connection("default"), safe=True)
|
await generate_schema_for_client(Tortoise.get_connection("default"), safe=True)
|
||||||
|
|
||||||
|
18
poetry.lock
generated
18
poetry.lock
generated
@ -34,13 +34,13 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""}
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyio"
|
name = "anyio"
|
||||||
version = "4.5.2"
|
version = "4.6.2"
|
||||||
description = "High level compatibility layer for multiple asynchronous event loop implementations"
|
description = "High level compatibility layer for multiple asynchronous event loop implementations"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "anyio-4.5.2-py3-none-any.whl", hash = "sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f"},
|
{file = "anyio-4.6.2-py3-none-any.whl", hash = "sha256:6caec6b1391f6f6d7b2ef2258d2902d36753149f67478f7df4be8e54d03a8f54"},
|
||||||
{file = "anyio-4.5.2.tar.gz", hash = "sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b"},
|
{file = "anyio-4.6.2.tar.gz", hash = "sha256:f72a7bb3dd0752b3bd8b17a844a019d7fbf6ae218c588f4f9ba1b2f600b12347"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@ -650,13 +650,13 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pydantic"
|
name = "pydantic"
|
||||||
version = "2.10.2"
|
version = "2.10.3"
|
||||||
description = "Data validation using Python type hints"
|
description = "Data validation using Python type hints"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e"},
|
{file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"},
|
||||||
{file = "pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa"},
|
{file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@ -796,13 +796,13 @@ windows-terminal = ["colorama (>=0.4.6)"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pypika-tortoise"
|
name = "pypika-tortoise"
|
||||||
version = "0.3.1"
|
version = "0.3.2"
|
||||||
description = "Forked from pypika and streamline just for tortoise-orm"
|
description = "Forked from pypika and streamline just for tortoise-orm"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8,<4.0"
|
python-versions = ">=3.8,<4.0"
|
||||||
files = [
|
files = [
|
||||||
{file = "pypika_tortoise-0.3.1-py3-none-any.whl", hash = "sha256:eee0d49c99ed1b932f7c48f8b87d8492aeb3c7e6a48ba69bc462eb9e3b5b20a2"},
|
{file = "pypika_tortoise-0.3.2-py3-none-any.whl", hash = "sha256:c5c52bc4473fe6f3db36cf659340750246ec5dd0f980d04ae7811430e299c3a2"},
|
||||||
{file = "pypika_tortoise-0.3.1.tar.gz", hash = "sha256:6f9861dd34fd21a009e79b174159e61699da28cb2607617e688b7e79e6c9ef7e"},
|
{file = "pypika_tortoise-0.3.2.tar.gz", hash = "sha256:f5d508e2ef00255e52ec6ac79ef889e10dbab328f218c55cd134c4d02ff9f6f4"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -67,5 +67,7 @@ pretty = true
|
|||||||
python_version = "3.8"
|
python_version = "3.8"
|
||||||
ignore_missing_imports = true
|
ignore_missing_imports = true
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
line-length = 100
|
||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
ignore = ['E501']
|
ignore = ['E501']
|
||||||
|
@ -3,4 +3,4 @@ from aerich.utils import import_py_file
|
|||||||
|
|
||||||
def test_import_py_file() -> None:
|
def test_import_py_file() -> None:
|
||||||
m = import_py_file("aerich/utils.py")
|
m = import_py_file("aerich/utils.py")
|
||||||
assert getattr(m, "import_py_file")
|
assert getattr(m, "import_py_file", None)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user