chore: upgrade deps and fix ruff lint issues (#374)

* chore: upgrade deps and apply ruff lint for tests/

* style: fix ruff lint issues
This commit is contained in:
Waket Zheng 2024-12-05 15:56:00 +08:00 committed by GitHub
parent 252cb97767
commit 44025823ee
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 37 additions and 51 deletions

View File

@ -42,7 +42,7 @@ class Command:
async def _upgrade(self, conn, version_file) -> None:
file_path = Path(Migrate.migrate_location, version_file)
m = import_py_file(file_path)
upgrade = getattr(m, "upgrade")
upgrade = m.upgrade
await conn.execute_script(await upgrade(conn))
await Aerich.create(
version=version_file,
@ -89,7 +89,7 @@ class Command:
) as conn:
file_path = Path(Migrate.migrate_location, file)
m = import_py_file(file_path)
downgrade = getattr(m, "downgrade")
downgrade = m.downgrade
downgrade_sql = await downgrade(conn)
if not downgrade_sql.strip():
raise DowngradeError("No downgrade items found")

View File

@ -47,8 +47,10 @@ async def cli(ctx: Context, config, app) -> None:
location = tool["location"]
tortoise_orm = tool["tortoise_orm"]
src_folder = tool.get("src_folder", CONFIG_DEFAULT_VALUES["src_folder"])
except NonExistentKey:
raise UsageError("You need run `aerich init` again when upgrading to aerich 0.6.0+.")
except NonExistentKey as e:
raise UsageError(
"You need run `aerich init` again when upgrading to aerich 0.6.0+."
) from e
add_src_path(src_folder)
tortoise_config = get_tortoise_config(ctx, tortoise_orm)
if not app:
@ -182,10 +184,7 @@ async def init(ctx: Context, tortoise_orm, location, src_folder) -> None:
add_src_path(src_folder)
get_tortoise_config(ctx, tortoise_orm)
config_path = Path(config_file)
if config_path.exists():
content = config_path.read_text()
else:
content = "[tool.aerich]"
content = config_path.read_bytes() if config_path.exists() else "[tool.aerich]"
doc: dict = tomlkit.parse(content)
table = tomlkit.table()
table["tortoise_orm"] = tortoise_orm

View File

@ -1,5 +1,6 @@
from __future__ import annotations
import contextlib
from typing import Any, Callable, Dict, Optional, TypedDict
from pydantic import BaseModel
@ -61,8 +62,8 @@ class Column(BaseModel):
elif self.data_type == "bool":
default = f"default={'True' if self.default == 'true' else 'False'}, "
elif self.data_type in ("datetime", "timestamptz", "TIMESTAMP"):
if "CURRENT_TIMESTAMP" == self.default:
if "DEFAULT_GENERATED on update CURRENT_TIMESTAMP" == self.extra:
if self.default == "CURRENT_TIMESTAMP":
if self.extra == "DEFAULT_GENERATED on update CURRENT_TIMESTAMP":
default = "auto_now=True, "
else:
default = "auto_now_add=True, "
@ -94,10 +95,8 @@ class Inspect:
def __init__(self, conn: BaseDBAsyncClient, tables: list[str] | None = None) -> None:
self.conn = conn
try:
with contextlib.suppress(AttributeError):
self.database = conn.database # type:ignore[attr-defined]
except AttributeError:
pass
self.tables = tables
@property

View File

@ -40,16 +40,11 @@ where c.TABLE_SCHEMA = %s
and c.TABLE_NAME = %s"""
ret = await self.conn.execute_query_dict(sql, [self.database, table])
for row in ret:
non_unique = row["NON_UNIQUE"]
if non_unique is None:
unique = False
else:
unique = index = False
if (non_unique := row["NON_UNIQUE"]) is not None:
unique = not non_unique
index_name = row["INDEX_NAME"]
if index_name is None:
index = False
else:
index = row["INDEX_NAME"] != "PRIMARY"
if (index_name := row["INDEX_NAME"]) is not None:
index = index_name != "PRIMARY"
columns.append(
Column(
name=row["COLUMN_NAME"],

View File

@ -271,7 +271,7 @@ class Migrate:
# m2m fields
old_m2m_fields = cast(List[dict], old_model_describe.get("m2m_fields"))
new_m2m_fields = cast(List[dict], new_model_describe.get("m2m_fields"))
for action, option, change in diff(old_m2m_fields, new_m2m_fields):
for action, _, change in diff(old_m2m_fields, new_m2m_fields):
if change[0][0] == "db_constraint":
continue
new_value = change[0][1]
@ -346,22 +346,14 @@ class Migrate:
old_data_field_name = cast(str, old_data_field.get("name"))
if len(changes) == 2:
# rename field
name_diff = (old_data_field_name, new_data_field_name)
column_diff = (
old_data_field.get("db_column"),
new_data_field.get("db_column"),
)
if (
changes[0]
== (
"change",
"name",
(old_data_field_name, new_data_field_name),
)
and changes[1]
== (
"change",
"db_column",
(
old_data_field.get("db_column"),
new_data_field.get("db_column"),
),
)
changes[0] == ("change", "name", name_diff)
and changes[1] == ("change", "db_column", column_diff)
and old_data_field_name not in new_data_fields_name
):
if upgrade:

View File

@ -1,4 +1,5 @@
import asyncio
import contextlib
import os
from typing import Generator
@ -57,10 +58,8 @@ async def initialize_tests(event_loop, request) -> None:
# Placing init outside the try block since it doesn't
# establish connections to the DB eagerly.
await Tortoise.init(config=tortoise_orm)
try:
with contextlib.suppress(DBConnectionError, OperationalError):
await Tortoise._drop_databases()
except (DBConnectionError, OperationalError):
pass
await Tortoise.init(config=tortoise_orm, _create_db=True)
await generate_schema_for_client(Tortoise.get_connection("default"), safe=True)

18
poetry.lock generated
View File

@ -34,13 +34,13 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""}
[[package]]
name = "anyio"
version = "4.5.2"
version = "4.6.2"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
optional = false
python-versions = ">=3.8"
files = [
{file = "anyio-4.5.2-py3-none-any.whl", hash = "sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f"},
{file = "anyio-4.5.2.tar.gz", hash = "sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b"},
{file = "anyio-4.6.2-py3-none-any.whl", hash = "sha256:6caec6b1391f6f6d7b2ef2258d2902d36753149f67478f7df4be8e54d03a8f54"},
{file = "anyio-4.6.2.tar.gz", hash = "sha256:f72a7bb3dd0752b3bd8b17a844a019d7fbf6ae218c588f4f9ba1b2f600b12347"},
]
[package.dependencies]
@ -650,13 +650,13 @@ files = [
[[package]]
name = "pydantic"
version = "2.10.2"
version = "2.10.3"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.8"
files = [
{file = "pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e"},
{file = "pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa"},
{file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"},
{file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"},
]
[package.dependencies]
@ -796,13 +796,13 @@ windows-terminal = ["colorama (>=0.4.6)"]
[[package]]
name = "pypika-tortoise"
version = "0.3.1"
version = "0.3.2"
description = "Forked from pypika and streamline just for tortoise-orm"
optional = false
python-versions = ">=3.8,<4.0"
files = [
{file = "pypika_tortoise-0.3.1-py3-none-any.whl", hash = "sha256:eee0d49c99ed1b932f7c48f8b87d8492aeb3c7e6a48ba69bc462eb9e3b5b20a2"},
{file = "pypika_tortoise-0.3.1.tar.gz", hash = "sha256:6f9861dd34fd21a009e79b174159e61699da28cb2607617e688b7e79e6c9ef7e"},
{file = "pypika_tortoise-0.3.2-py3-none-any.whl", hash = "sha256:c5c52bc4473fe6f3db36cf659340750246ec5dd0f980d04ae7811430e299c3a2"},
{file = "pypika_tortoise-0.3.2.tar.gz", hash = "sha256:f5d508e2ef00255e52ec6ac79ef889e10dbab328f218c55cd134c4d02ff9f6f4"},
]
[[package]]

View File

@ -67,5 +67,7 @@ pretty = true
python_version = "3.8"
ignore_missing_imports = true
[tool.ruff]
line-length = 100
[tool.ruff.lint]
ignore = ['E501']

View File

@ -3,4 +3,4 @@ from aerich.utils import import_py_file
def test_import_py_file() -> None:
m = import_py_file("aerich/utils.py")
assert getattr(m, "import_py_file")
assert getattr(m, "import_py_file", None)