finish base cli

This commit is contained in:
long2ice 2020-05-14 18:46:17 +08:00
parent d385647fba
commit 2da90ecca6
21 changed files with 465 additions and 623 deletions

0
CHANGELOG.rst Normal file
View File

21
LICENSE Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2020 long2ice
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -1,4 +1,4 @@
checkfiles = alice/ examples/ tests/ conftest.py
checkfiles = alice/ tests/
black_opts = -l 100 -t py38
py_warn = PYTHONDEVMODE=1
@ -7,12 +7,12 @@ help:
@echo
@echo "usage: make <target>"
@echo "Targets:"
@echo " test Runs all tests"
@echo " test Runs all tests"
@echo " style Auto-formats the code"
deps:
@which pip-sync > /dev/null || pip install -q pip-tools
@pip-sync tests/requirements.txt
@pip install -r requirements-dev.txt
style: deps
isort -rc $(checkfiles)

View File

@ -1 +1 @@
__version__ = '0.1.0'
__version__ = "0.1.0"

View File

@ -1,13 +1,13 @@
import importlib
import json
import os
import sys
from enum import Enum
import asyncclick as click
from asyncclick import BadParameter, ClickException
from tortoise import Tortoise, generate_schema_for_client
from tortoise import generate_schema_for_client, ConfigurationError, Tortoise
from alice.backends.mysql import MysqlDDL
from alice.migrate import Migrate
from alice.utils import get_app_connection
@ -15,103 +15,165 @@ sys.path.append(os.getcwd())
class Color(str, Enum):
green = 'green'
red = 'red'
green = "green"
red = "red"
@click.group(context_settings={'help_option_names': ['-h', '--help']})
@click.option('-c', '--config', default='settings', show_default=True,
help='Tortoise-ORM config module, will read config variable from it, default is `settings`.')
@click.option('-t', '--tortoise-orm', default='TORTOISE_ORM', show_default=True,
help='Tortoise-ORM config dict variable, default is `TORTOISE_ORM`.')
@click.option('-l', '--location', default='./migrations', show_default=True,
help='Migrate store location, default is `./migrations`.')
@click.option('-a', '--app', default='models', show_default=True, help='Tortoise-ORM app name, default is `models`.')
@click.group(context_settings={"help_option_names": ["-h", "--help"]})
@click.option(
"--config",
default="settings",
show_default=True,
help="Tortoise-ORM config module, will read config variable from it.",
)
@click.option(
"--tortoise-orm",
default="TORTOISE_ORM",
show_default=True,
help="Tortoise-ORM config dict variable.",
)
@click.option(
"--location", default="./migrations", show_default=True, help="Migrate store location."
)
@click.option("--app", default="models", show_default=True, help="Tortoise-ORM app name.")
@click.pass_context
async def cli(ctx, config, tortoise_orm, location, app):
ctx.ensure_object(dict)
try:
config_module = importlib.import_module(config)
config = getattr(config_module, tortoise_orm, None)
if not config:
raise BadParameter(param_hint=['--config'],
message=f'Can\'t get "{tortoise_orm}" from module "{config_module}"')
await Tortoise.init(config=config)
ctx.obj['config'] = config
ctx.obj['location'] = location
ctx.obj['app'] = app
if app not in config.get('apps').keys():
raise BadParameter(param_hint=['--app'], message=f'No app found in "{config}"')
except ModuleNotFoundError:
raise BadParameter(param_hint=['--tortoise-orm'], message=f'No module named "{config}"')
raise BadParameter(param_hint=["--tortoise-orm"], message=f'No module named "{config}"')
config = getattr(config_module, tortoise_orm, None)
if not config:
raise BadParameter(
param_hint=["--config"],
message=f'Can\'t get "{tortoise_orm}" from module "{config_module}"',
)
if app not in config.get("apps").keys():
raise BadParameter(param_hint=["--app"], message=f'No app found in "{config}"')
ctx.obj["config"] = config
ctx.obj["location"] = location
ctx.obj["app"] = app
try:
await Migrate.init_with_old_models(config, app, location)
except ConfigurationError:
pass
@cli.command()
@cli.command(help="Generate migrate changes file.")
@click.option("--name", default="update", show_default=True, help="Migrate name.")
@click.pass_context
def migrate(ctx):
config = ctx.obj['config']
location = ctx.obj['location']
app = ctx.obj['app']
async def migrate(ctx, name):
config = ctx.obj["config"]
location = ctx.obj["location"]
app = ctx.obj["app"]
old_models = Migrate.read_old_models(app, location)
print(old_models)
new_models = Tortoise.apps.get(app)
print(new_models)
ret = Migrate(MysqlDDL(get_app_connection(config, app))).diff_models(old_models, new_models)
print(ret)
ret = Migrate.migrate(name)
if not ret:
click.secho("No changes detected", fg=Color.green)
else:
Migrate.write_old_models(config, app, location)
click.secho(f"Success migrate {ret}", fg=Color.green)
@cli.command()
@cli.command(help="Upgrade to latest version.")
@click.pass_context
def upgrade():
pass
async def upgrade(ctx):
app = ctx.obj["app"]
config = ctx.obj["config"]
connection = get_app_connection(config, app)
available_versions = Migrate.get_all_version_files(is_all=False)
if not available_versions:
return click.secho("No migrate items", fg=Color.green)
async with connection._in_transaction() as conn:
for file in available_versions:
file_path = os.path.join(Migrate.migrate_location, file)
with open(file_path, "r") as f:
content = json.load(f)
upgrade_query_list = content.get("upgrade")
for upgrade_query in upgrade_query_list:
await conn.execute_query(upgrade_query)
with open(file_path, "w") as f:
content["migrate"] = True
json.dump(content, f, indent=4)
click.secho(f"Success upgrade {file}", fg=Color.green)
@cli.command()
@cli.command(help="Downgrade to previous version.")
@click.pass_context
def downgrade():
pass
async def downgrade(ctx):
app = ctx.obj["app"]
config = ctx.obj["config"]
connection = get_app_connection(config, app)
available_versions = Migrate.get_all_version_files()
if not available_versions:
return click.secho("No migrate items", fg=Color.green)
async with connection._in_transaction() as conn:
for file in available_versions:
file_path = os.path.join(Migrate.migrate_location, file)
with open(file_path, "r") as f:
content = json.load(f)
if content.get("migrate"):
downgrade_query_list = content.get("downgrade")
for downgrade_query in downgrade_query_list:
await conn.execute_query(downgrade_query)
with open(file_path, "w") as f:
content["migrate"] = False
json.dump(content, f, indent=4)
return click.secho(f"Success downgrade {file}", fg=Color.green)
@cli.command()
@click.option('--safe', is_flag=True, default=True,
help='When set to true, creates the table only when it does not already exist..', show_default=True)
@cli.command(help="Show current available heads in migrate location.")
@click.pass_context
async def initdb(ctx, safe):
location = ctx.obj['location']
config = ctx.obj['config']
app = ctx.obj['app']
await generate_schema_for_client(get_app_connection(config, app), safe)
Migrate.write_old_models(app, location)
click.secho(f'Success initdb for app `{app}`', fg=Color.green)
def heads(ctx):
for version in Migrate.get_all_version_files(is_all=False):
click.secho(version, fg=Color.green)
@cli.command()
@click.option('--overwrite', is_flag=True, default=False, help=f'Overwrite {Migrate.old_models}.', show_default=True)
@cli.command(help="List all migrate items.")
@click.pass_context
def init(ctx, overwrite):
location = ctx.obj['location']
app = ctx.obj['app']
def history(ctx):
for version in Migrate.get_all_version_files():
click.secho(version, fg=Color.green)
@cli.command(
help="Init migrate location and generate schema, you must call first before other actions."
)
@click.option(
"--safe",
is_flag=True,
default=True,
help="When set to true, creates the table only when it does not already exist.",
show_default=True,
)
@click.pass_context
async def init(ctx, safe):
location = ctx.obj["location"]
app = ctx.obj["app"]
config = ctx.obj["config"]
if not os.path.isdir(location):
os.mkdir(location)
dirname = os.path.join(location, app)
if not os.path.isdir(dirname):
os.mkdir(dirname)
click.secho(f'Success create migrate location {dirname}', fg=Color.green)
if overwrite:
Migrate.write_old_models(app, location)
dirname = os.path.join(location, app)
if not os.path.isdir(dirname):
os.mkdir(dirname)
click.secho(f"Success create migrate location {dirname}", fg=Color.green)
else:
raise ClickException('Already inited')
raise ClickException(f"Already inited app `{app}`")
Migrate.write_old_models(config, app, location)
await Migrate.init_with_old_models(config, app, location)
await generate_schema_for_client(get_app_connection(config, app), safe)
click.secho(f"Success init for app `{app}`", fg=Color.green)
if __name__ == '__main__':
cli(_anyio_backend='asyncio')
def main():
cli(_anyio_backend="asyncio")

View File

@ -1,32 +1,32 @@
from typing import Type, List
from typing import List, Type
from tortoise import Model, BaseDBAsyncClient, ForeignKeyFieldInstance
from tortoise import BaseDBAsyncClient, ForeignKeyFieldInstance, Model
from tortoise.backends.base.schema_generator import BaseSchemaGenerator
from tortoise.fields import Field, UUIDField, TextField, JSONField
from tortoise.fields import Field, JSONField, TextField, UUIDField
class DDL:
schema_generator_cls: Type[BaseSchemaGenerator] = BaseSchemaGenerator
DIALECT = "sql"
_DROP_TABLE_TEMPLATE = 'DROP TABLE {table_name} IF EXISTS'
_ADD_COLUMN_TEMPLATE = 'ALTER TABLE {table_name} ADD {column}'
_DROP_COLUMN_TEMPLATE = 'ALTER TABLE {table_name} DROP COLUMN {column_name}'
_ADD_INDEX_TEMPLATE = 'ALTER TABLE {table_name} ADD {unique} INDEX {index_name} ({column_names})'
_DROP_INDEX_TEMPLATE = 'ALTER TABLE {table_name} DROP INDEX {index_name}'
_ADD_FK_TEMPLATE = 'ALTER TABLE {table_name} ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}'
_DROP_FK_TEMPLATE = 'ALTER TABLE {table_name} DROP FOREIGN KEY {fk_name}'
_DROP_TABLE_TEMPLATE = "DROP TABLE {table_name} IF EXISTS"
_ADD_COLUMN_TEMPLATE = "ALTER TABLE {table_name} ADD {column}"
_DROP_COLUMN_TEMPLATE = "ALTER TABLE {table_name} DROP COLUMN {column_name}"
_ADD_INDEX_TEMPLATE = (
"ALTER TABLE {table_name} ADD {unique} INDEX {index_name} ({column_names})"
)
_DROP_INDEX_TEMPLATE = "ALTER TABLE {table_name} DROP INDEX {index_name}"
_ADD_FK_TEMPLATE = "ALTER TABLE {table_name} ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}"
_DROP_FK_TEMPLATE = "ALTER TABLE {table_name} DROP FOREIGN KEY {fk_name}"
def __init__(self, client: "BaseDBAsyncClient"):
self.client = client
self.schema_generator = self.schema_generator_cls(client)
def create_table(self, model: "Type[Model]"):
return self.schema_generator._get_table_sql(model, True)['table_creation_string']
return self.schema_generator._get_table_sql(model, True)["table_creation_string"]
def drop_table(self, model: "Type[Model]"):
return self._DROP_TABLE_TEMPLATE.format(
table_name=model._meta.db_table
)
return self._DROP_TABLE_TEMPLATE.format(table_name=model._meta.db_table)
def add_column(self, model: "Type[Model]", field_object: Field):
db_table = model._meta.db_table
@ -59,33 +59,37 @@ class DDL:
nullable="NOT NULL" if not field_object.null else "",
unique="UNIQUE" if field_object.unique else "",
comment=self.schema_generator._column_comment_generator(
table=db_table, column=field_object.model_field_name, comment=field_object.description
table=db_table,
column=field_object.model_field_name,
comment=field_object.description,
)
if field_object.description else "",
if field_object.description
else "",
is_primary_key=field_object.pk,
default=default
)
default=default,
),
)
def drop_column(self, model: "Type[Model]", column_name: str):
return self._DROP_COLUMN_TEMPLATE.format(
table_name=model._meta.db_table,
column_name=column_name
table_name=model._meta.db_table, column_name=column_name
)
def add_index(self, model: "Type[Model]", field_names: List[str], unique=False):
return self._ADD_INDEX_TEMPLATE.format(
unique='UNIQUE' if unique else '',
index_name=self.schema_generator._generate_index_name("idx" if not unique else "uid", model,
field_names),
unique="UNIQUE" if unique else "",
index_name=self.schema_generator._generate_index_name(
"idx" if not unique else "uid", model, field_names
),
table_name=model._meta.db_table,
column_names=", ".join([self.schema_generator.quote(f) for f in field_names]),
)
def drop_index(self, model: "Type[Model]", field_names: List[str], unique=False):
return self._DROP_INDEX_TEMPLATE.format(
index_name=self.schema_generator._generate_index_name("idx" if not unique else "uid", model,
field_names),
index_name=self.schema_generator._generate_index_name(
"idx" if not unique else "uid", model, field_names
),
table_name=model._meta.db_table,
)
@ -99,7 +103,7 @@ class DDL:
from_table=db_table,
from_field=field.model_field_name,
to_table=field.related_model._meta.db_table,
to_field=to_field_name
to_field=to_field_name,
)
return self._ADD_FK_TEMPLATE.format(
table_name=db_table,
@ -120,6 +124,6 @@ class DDL:
from_table=model._meta.db_table,
from_field=field.model_field_name,
to_table=field.related_model._meta.db_table,
to_field=to_field_name
)
to_field=to_field_name,
),
)

View File

@ -1,8 +1,8 @@
from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator
from alice.backends import DDL
from alice.ddl import DDL
class MysqlDDL(DDL):
schema_generator_cls = MySQLSchemaGenerator
DIALECT = "mysql"
DIALECT = MySQLSchemaGenerator.DIALECT

6
alice/exceptions.py Normal file
View File

@ -0,0 +1,6 @@
class ConfigurationError(Exception):
"""
config error
"""
pass

View File

@ -1,79 +1,183 @@
import importlib
import inspect
import json
import os
import re
from copy import deepcopy
from datetime import datetime
from typing import Dict, List, Type
import dill
from typing import List, Type, Dict
from tortoise import Model, ForeignKeyFieldInstance, Tortoise
from tortoise import ForeignKeyFieldInstance, Model, Tortoise
from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator
from tortoise.fields import Field
from alice.backends import DDL
from alice.ddl import DDL
from alice.ddl.mysql import MysqlDDL
from alice.exceptions import ConfigurationError
from alice.utils import get_app_connection
class Migrate:
operators: List
upgrade_operators: List[str] = []
downgrade_operators: List[str] = []
ddl: DDL
old_models = 'old_models.pickle'
migrate_config: dict
old_models = "old_models"
diff_app = "diff_models"
app: str
migrate_location: str
def __init__(self, ddl: DDL):
self.operators = []
self.ddl = ddl
@classmethod
def get_old_model_file(cls):
return cls.old_models + ".py"
@staticmethod
def write_old_models(app, location):
ret = Tortoise.apps.get(app)
old_models = {}
for k, v in ret.items():
old_models[k] = deepcopy(v)
@classmethod
def _get_all_migrate_files(cls):
return sorted(filter(lambda x: x.endswith("json"), os.listdir(cls.migrate_location)))
dirname = os.path.join(location, app)
@classmethod
def _get_latest_version(cls) -> int:
ret = cls._get_all_migrate_files()
if ret:
return int(ret[-1].split("_")[0])
return 0
with open(os.path.join(dirname, Migrate.old_models), 'wb') as f:
dill.dump(old_models, f, )
@classmethod
def get_all_version_files(cls, is_all=True):
files = cls._get_all_migrate_files()
ret = []
for file in files:
with open(os.path.join(cls.migrate_location, file), "r") as f:
content = json.load(f)
if is_all or not content.get("migrate"):
ret.append(file)
return ret
@staticmethod
def read_old_models(app, location):
dirname = os.path.join(location, app)
with open(os.path.join(dirname, Migrate.old_models), 'rb') as f:
return dill.load(f, )
@classmethod
async def init_with_old_models(cls, config: dict, app: str, location: str):
migrate_config = cls._get_migrate_config(config, app, location)
def diff_models_module(self, old_models_module, new_models_module):
old_module = importlib.import_module(old_models_module)
old_models = {}
new_models = {}
for name, obj in inspect.getmembers(old_module):
if inspect.isclass(obj) and issubclass(obj, Model):
old_models[obj.__name__] = obj
cls.app = app
cls.migrate_config = migrate_config
cls.migrate_location = os.path.join(location, app)
new_module = importlib.import_module(new_models_module)
for name, obj in inspect.getmembers(new_module):
if inspect.isclass(obj) and issubclass(obj, Model):
new_models[obj.__name__] = obj
self.diff_models(old_models, new_models)
await Tortoise.init(config=migrate_config)
def diff_models(self, old_models: Dict[str, Type[Model]], new_models: Dict[str, Type[Model]]):
connection = get_app_connection(config, app)
if connection.schema_generator is MySQLSchemaGenerator:
cls.ddl = MysqlDDL(connection)
else:
raise NotImplementedError("Current only support MySQL")
@classmethod
def _generate_diff_sql(cls, name):
now = datetime.now().strftime("%Y%M%D%H%M%S").replace("/", "")
filename = f"{cls._get_latest_version() + 1}_{now}_{name}.json"
content = {
"upgrade": cls.upgrade_operators,
"download": cls.downgrade_operators,
"migrate": False,
}
with open(os.path.join(cls.migrate_location, filename), "w") as f:
json.dump(content, f, indent=4)
return filename
@classmethod
def migrate(cls, name):
if not cls.migrate_config:
raise ConfigurationError("You must call init_with_old_models() first!")
apps = Tortoise.apps
diff_models = apps.get(cls.diff_app)
app_models = apps.get(cls.app)
cls._diff_models(diff_models, app_models)
cls._diff_models(app_models, diff_models, False)
if not cls.upgrade_operators:
return False
return cls._generate_diff_sql(name)
@classmethod
def _add_operator(cls, operator: str, upgrade=True):
if upgrade:
cls.upgrade_operators.append(operator)
else:
cls.downgrade_operators.append(operator)
@classmethod
def cp_models(
cls, model_files: List[str], old_model_file,
):
"""
cp currents models to old_model_files
:param model_files:
:param old_model_file:
:return:
"""
pattern = (
r"(ManyToManyField|ForeignKeyField|OneToOneField)\((model_name)?(\"|\')(\w+)(.+)\)"
)
for i, model_file in enumerate(model_files):
with open(model_file, "r") as f:
content = f.read()
ret = re.sub(pattern, rf"\1\2(\3{cls.diff_app}\5)", content)
with open(old_model_file, "w" if i == 0 else "w+a") as f:
f.write(ret)
@classmethod
def _get_migrate_config(cls, config: dict, app: str, location: str):
temp_config = deepcopy(config)
path = os.path.join(location, app, cls.old_models)
path = path.replace("/", ".").lstrip(".")
temp_config["apps"][cls.diff_app] = {"models": [path]}
return temp_config
@classmethod
def write_old_models(cls, config: dict, app: str, location: str):
old_model_files = []
models = config.get("apps").get(app).get("models")
for model in models:
old_model_files.append(model.replace(".", "/") + ".py")
cls.cp_models(old_model_files, os.path.join(location, app, cls.get_old_model_file()))
@classmethod
def _diff_models(
cls, old_models: Dict[str, Type[Model]], new_models: Dict[str, Type[Model]], upgrade=True
):
"""
diff models and add operators
:param old_models:
:param new_models:
:param upgrade:
:return:
"""
for new_model_str, new_model in new_models.items():
if new_model_str not in old_models.keys():
self.add_model(new_model)
cls._add_operator(cls.add_model(new_model), upgrade)
else:
self.diff_model(old_models.get(new_model_str), new_model)
cls.diff_model(old_models.get(new_model_str), new_model, upgrade)
for old_model in old_models:
if old_model not in new_models.keys():
self.remove_model(old_models.get(old_model))
cls._add_operator(cls.remove_model(old_models.get(old_model)), upgrade)
def _add_operator(self, operator):
self.operators.append(operator)
@classmethod
def add_model(cls, model: Type[Model]):
return cls.ddl.create_table(model)
def add_model(self, model: Type[Model]):
self._add_operator(self.ddl.create_table(model))
@classmethod
def remove_model(cls, model: Type[Model]):
return cls.ddl.drop_table(model)
def remove_model(self, model: Type[Model]):
self._add_operator(self.ddl.drop_table(model))
def diff_model(self, old_model: Type[Model], new_model: Type[Model]):
@classmethod
def diff_model(cls, old_model: Type[Model], new_model: Type[Model], upgrade=True):
"""
diff single model
:param old_model:
:param new_model:
:param upgrade:
:return:
"""
old_fields_map = old_model._meta.fields_map
new_fields_map = new_model._meta.fields_map
old_keys = old_fields_map.keys()
@ -81,31 +185,35 @@ class Migrate:
for new_key in new_keys:
new_field = new_fields_map.get(new_key)
if new_key not in old_keys:
self._add_field(new_model, new_field)
cls._add_operator(cls._add_field(new_model, new_field), upgrade)
else:
old_field = old_fields_map.get(new_key)
if old_field.index and not new_field.index:
self._remove_index(old_model, old_field)
cls._add_operator(cls._remove_index(old_model, old_field), upgrade)
elif new_field.index and not old_field.index:
self._add_index(new_model, new_field)
cls._add_operator(cls._add_index(new_model, new_field), upgrade)
for old_key in old_keys:
if old_key not in new_keys:
field = old_fields_map.get(old_key)
self._remove_field(old_model, field)
cls._add_operator(cls._remove_field(old_model, field), upgrade)
def _remove_index(self, model: Type[Model], field: Field):
self._add_operator(self.ddl.drop_index(model, [field.model_field_name], field.unique))
@classmethod
def _remove_index(cls, model: Type[Model], field: Field):
return cls.ddl.drop_index(model, [field.model_field_name], field.unique)
def _add_index(self, model: Type[Model], field: Field):
self._add_operator(self.ddl.add_index(model, [field.model_field_name], field.unique))
@classmethod
def _add_index(cls, model: Type[Model], field: Field):
return cls.ddl.add_index(model, [field.model_field_name], field.unique)
def _add_field(self, model: Type[Model], field: Field):
@classmethod
def _add_field(cls, model: Type[Model], field: Field):
if isinstance(field, ForeignKeyFieldInstance):
self._add_operator(self.ddl.add_fk(model, field))
return cls.ddl.add_fk(model, field)
else:
self._add_operator(self.ddl.add_column(model, field))
return cls.ddl.add_column(model, field)
def _remove_field(self, model: Type[Model], field: Field):
@classmethod
def _remove_field(cls, model: Type[Model], field: Field):
if isinstance(field, ForeignKeyFieldInstance):
self._add_operator(self.ddl.drop_fk(model, field))
self._add_operator(self.ddl.drop_column(model, field.model_field_name))
return cls.ddl.drop_fk(model, field)
return cls.ddl.drop_column(model, field.model_field_name)

View File

@ -1,11 +1,11 @@
from tortoise import Tortoise
def get_app_connection(config: dict, app: str):
def get_app_connection(config, app):
"""
get tortoise connection by app
get tortoise app
:param config:
:param app:
:return:
"""
return Tortoise.get_connection(config.get('apps').get(app).get('default_connection')),
return Tortoise.get_connection(config.get("apps").get(app).get("default_connection"))

396
poetry.lock generated
View File

@ -1,396 +0,0 @@
[[package]]
category = "main"
description = "MySQL driver for asyncio."
name = "aiomysql"
optional = false
python-versions = "*"
version = "0.0.20"
[package.dependencies]
PyMySQL = ">=0.9,<=0.9.2"
[package.extras]
sa = ["sqlalchemy (>=1.0)"]
[[package]]
category = "main"
description = "asyncio bridge to the standard sqlite3 module"
name = "aiosqlite"
optional = false
python-versions = ">=3.6"
version = "0.13.0"
[[package]]
category = "main"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
name = "anyio"
optional = false
python-versions = ">=3.5.3"
version = "1.3.0"
[package.dependencies]
async-generator = "*"
sniffio = ">=1.1"
[package.extras]
curio = ["curio (>=0.9)"]
doc = ["sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"]
test = ["coverage (>=4.5)", "hypothesis (>=4.0)", "pytest (>=3.7.2)", "uvloop"]
trio = ["trio (>=0.12)"]
[[package]]
category = "main"
description = "Async generators and context managers for Python 3.5+"
name = "async-generator"
optional = false
python-versions = ">=3.5"
version = "1.10"
[[package]]
category = "main"
description = "A simple anyio-compatible fork of Click, for powerful command line utilities."
name = "asyncclick"
optional = false
python-versions = ">=3.6"
version = "7.0.9"
[package.dependencies]
anyio = "*"
[package.extras]
dev = ["coverage", "pytest-runner", "pytest-trio", "pytest (>=3)", "sphinx", "tox"]
docs = ["sphinx"]
[[package]]
category = "dev"
description = "Enhance the standard unittest package with features for testing asyncio libraries"
name = "asynctest"
optional = false
python-versions = ">=3.5"
version = "0.13.0"
[[package]]
category = "main"
description = "Foreign Function Interface for Python calling C code."
name = "cffi"
optional = false
python-versions = "*"
version = "1.14.0"
[package.dependencies]
pycparser = "*"
[[package]]
category = "main"
description = "Fast ISO8601 date time parser for Python written in C"
marker = "sys_platform != \"win32\" and implementation_name == \"cpython\""
name = "ciso8601"
optional = false
python-versions = "*"
version = "2.1.3"
[[package]]
category = "main"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
name = "cryptography"
optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
version = "2.9.2"
[package.dependencies]
cffi = ">=1.8,<1.11.3 || >1.11.3"
six = ">=1.4.1"
[package.extras]
docs = ["sphinx (>=1.6.5,<1.8.0 || >1.8.0)", "sphinx-rtd-theme"]
docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"]
idna = ["idna (>=2.1)"]
pep8test = ["flake8", "flake8-import-order", "pep8-naming"]
test = ["pytest (>=3.6.0,<3.9.0 || >3.9.0,<3.9.1 || >3.9.1,<3.9.2 || >3.9.2)", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,<3.79.2 || >3.79.2)"]
[[package]]
category = "main"
description = "serialize all of python"
name = "dill"
optional = false
python-versions = ">=2.6, !=3.0.*"
version = "0.3.1.1"
[package.extras]
graph = ["objgraph (>=1.7.2)"]
[[package]]
category = "dev"
description = "the modular source code checker: pep8 pyflakes and co"
name = "flake8"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
version = "3.8.1"
[package.dependencies]
mccabe = ">=0.6.0,<0.7.0"
pycodestyle = ">=2.6.0a1,<2.7.0"
pyflakes = ">=2.2.0,<2.3.0"
[[package]]
category = "main"
description = "Simple module to parse ISO 8601 dates"
marker = "sys_platform == \"win32\" or implementation_name != \"cpython\""
name = "iso8601"
optional = false
python-versions = "*"
version = "0.1.12"
[[package]]
category = "dev"
description = "McCabe checker, plugin for flake8"
name = "mccabe"
optional = false
python-versions = "*"
version = "0.6.1"
[[package]]
category = "dev"
description = "Python style guide checker"
name = "pycodestyle"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "2.6.0"
[[package]]
category = "main"
description = "C parser in Python"
name = "pycparser"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "2.20"
[[package]]
category = "dev"
description = "passive checker of Python programs"
name = "pyflakes"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "2.2.0"
[[package]]
category = "main"
description = "Pure Python MySQL Driver"
name = "pymysql"
optional = false
python-versions = "*"
version = "0.9.2"
[package.dependencies]
cryptography = "*"
[[package]]
category = "main"
description = "A SQL query builder API for Python"
name = "pypika"
optional = false
python-versions = "*"
version = "0.37.6"
[[package]]
category = "main"
description = "Python 2 and 3 compatibility utilities"
name = "six"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
version = "1.14.0"
[[package]]
category = "main"
description = "Sniff out which async library your code is running under"
name = "sniffio"
optional = false
python-versions = ">=3.5"
version = "1.1.0"
[[package]]
category = "dev"
description = "tasks runner for python projects"
name = "taskipy"
optional = false
python-versions = ">=3.6,<4.0"
version = "1.2.1"
[package.dependencies]
toml = ">=0.10.0,<0.11.0"
[[package]]
category = "dev"
description = "Python Library for Tom's Obvious, Minimal Language"
name = "toml"
optional = false
python-versions = "*"
version = "0.10.0"
[[package]]
category = "main"
description = "Easy async ORM for python, built with relations in mind"
name = "tortoise-orm"
optional = false
python-versions = "*"
version = "0.16.11"
[package.dependencies]
aiosqlite = ">=0.11.0"
ciso8601 = ">=2.1.2"
iso8601 = ">=0.1.12"
pypika = ">=0.36.5"
typing-extensions = ">=3.7"
[package.source]
reference = "72f84f0848dc68041157f03e60cd1c92b0ee5137"
type = "git"
url = "https://github.com/tortoise/tortoise-orm.git"
[[package]]
category = "main"
description = "Backported and Experimental Type Hints for Python 3.5+"
name = "typing-extensions"
optional = false
python-versions = "*"
version = "3.7.4.2"
[metadata]
content-hash = "4809b238c12841eb28a6517843828716f207e9ed41b273bb681ae7a831e34af4"
python-versions = "^3.8"
[metadata.files]
aiomysql = [
{file = "aiomysql-0.0.20-py3-none-any.whl", hash = "sha256:5fd798481f16625b424eec765c56d712ac78a51f3bd0175a3de94107aae43307"},
{file = "aiomysql-0.0.20.tar.gz", hash = "sha256:d89ce25d44dadb43cf2d9e4603bd67b7a0ad12d5e67208de013629ba648df2ba"},
]
aiosqlite = [
{file = "aiosqlite-0.13.0-py3-none-any.whl", hash = "sha256:50688c40632ae249f986ab3ae2c66a45c0535b84a5d4aae0e0be572b5fed6909"},
{file = "aiosqlite-0.13.0.tar.gz", hash = "sha256:6e92961ae9e606b43b05e29b129e346b29e400fcbd63e3c0c564d89230257645"},
]
anyio = [
{file = "anyio-1.3.0-py3-none-any.whl", hash = "sha256:db2c3d21576870b95d4fd0b8f4a0f9c64057f777c578f3a8127179a17c8c067e"},
{file = "anyio-1.3.0.tar.gz", hash = "sha256:7deae0315dd10aa41c21528b83352e4b52f44e6153a21081a3d1cd8c03728e46"},
]
async-generator = [
{file = "async_generator-1.10-py3-none-any.whl", hash = "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b"},
{file = "async_generator-1.10.tar.gz", hash = "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"},
]
asyncclick = [
{file = "asyncclick-7.0.9.tar.gz", hash = "sha256:62cebf3eca36d973802e2dd521ca1db11c5bf4544e9795e093d1a53cb688a8c2"},
]
asynctest = [
{file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"},
{file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"},
]
cffi = [
{file = "cffi-1.14.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1cae98a7054b5c9391eb3249b86e0e99ab1e02bb0cc0575da191aedadbdf4384"},
{file = "cffi-1.14.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:cf16e3cf6c0a5fdd9bc10c21687e19d29ad1fe863372b5543deaec1039581a30"},
{file = "cffi-1.14.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:f2b0fa0c01d8a0c7483afd9f31d7ecf2d71760ca24499c8697aeb5ca37dc090c"},
{file = "cffi-1.14.0-cp27-cp27m-win32.whl", hash = "sha256:99f748a7e71ff382613b4e1acc0ac83bf7ad167fb3802e35e90d9763daba4d78"},
{file = "cffi-1.14.0-cp27-cp27m-win_amd64.whl", hash = "sha256:c420917b188a5582a56d8b93bdd8e0f6eca08c84ff623a4c16e809152cd35793"},
{file = "cffi-1.14.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:399aed636c7d3749bbed55bc907c3288cb43c65c4389964ad5ff849b6370603e"},
{file = "cffi-1.14.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:cab50b8c2250b46fe738c77dbd25ce017d5e6fb35d3407606e7a4180656a5a6a"},
{file = "cffi-1.14.0-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:001bf3242a1bb04d985d63e138230802c6c8d4db3668fb545fb5005ddf5bb5ff"},
{file = "cffi-1.14.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:e56c744aa6ff427a607763346e4170629caf7e48ead6921745986db3692f987f"},
{file = "cffi-1.14.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:b8c78301cefcf5fd914aad35d3c04c2b21ce8629b5e4f4e45ae6812e461910fa"},
{file = "cffi-1.14.0-cp35-cp35m-win32.whl", hash = "sha256:8c0ffc886aea5df6a1762d0019e9cb05f825d0eec1f520c51be9d198701daee5"},
{file = "cffi-1.14.0-cp35-cp35m-win_amd64.whl", hash = "sha256:8a6c688fefb4e1cd56feb6c511984a6c4f7ec7d2a1ff31a10254f3c817054ae4"},
{file = "cffi-1.14.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:95cd16d3dee553f882540c1ffe331d085c9e629499ceadfbda4d4fde635f4b7d"},
{file = "cffi-1.14.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:66e41db66b47d0d8672d8ed2708ba91b2f2524ece3dee48b5dfb36be8c2f21dc"},
{file = "cffi-1.14.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:028a579fc9aed3af38f4892bdcc7390508adabc30c6af4a6e4f611b0c680e6ac"},
{file = "cffi-1.14.0-cp36-cp36m-win32.whl", hash = "sha256:cef128cb4d5e0b3493f058f10ce32365972c554572ff821e175dbc6f8ff6924f"},
{file = "cffi-1.14.0-cp36-cp36m-win_amd64.whl", hash = "sha256:337d448e5a725bba2d8293c48d9353fc68d0e9e4088d62a9571def317797522b"},
{file = "cffi-1.14.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e577934fc5f8779c554639376beeaa5657d54349096ef24abe8c74c5d9c117c3"},
{file = "cffi-1.14.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:62ae9af2d069ea2698bf536dcfe1e4eed9090211dbaafeeedf5cb6c41b352f66"},
{file = "cffi-1.14.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:14491a910663bf9f13ddf2bc8f60562d6bc5315c1f09c704937ef17293fb85b0"},
{file = "cffi-1.14.0-cp37-cp37m-win32.whl", hash = "sha256:c43866529f2f06fe0edc6246eb4faa34f03fe88b64a0a9a942561c8e22f4b71f"},
{file = "cffi-1.14.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2089ed025da3919d2e75a4d963d008330c96751127dd6f73c8dc0c65041b4c26"},
{file = "cffi-1.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3b911c2dbd4f423b4c4fcca138cadde747abdb20d196c4a48708b8a2d32b16dd"},
{file = "cffi-1.14.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:7e63cbcf2429a8dbfe48dcc2322d5f2220b77b2e17b7ba023d6166d84655da55"},
{file = "cffi-1.14.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:3d311bcc4a41408cf5854f06ef2c5cab88f9fded37a3b95936c9879c1640d4c2"},
{file = "cffi-1.14.0-cp38-cp38-win32.whl", hash = "sha256:675686925a9fb403edba0114db74e741d8181683dcf216be697d208857e04ca8"},
{file = "cffi-1.14.0-cp38-cp38-win_amd64.whl", hash = "sha256:00789914be39dffba161cfc5be31b55775de5ba2235fe49aa28c148236c4e06b"},
{file = "cffi-1.14.0.tar.gz", hash = "sha256:2d384f4a127a15ba701207f7639d94106693b6cd64173d6c8988e2c25f3ac2b6"},
]
ciso8601 = [
{file = "ciso8601-2.1.3.tar.gz", hash = "sha256:bdbb5b366058b1c87735603b23060962c439ac9be66f1ae91e8c7dbd7d59e262"},
]
cryptography = [
{file = "cryptography-2.9.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:daf54a4b07d67ad437ff239c8a4080cfd1cc7213df57d33c97de7b4738048d5e"},
{file = "cryptography-2.9.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:3b3eba865ea2754738616f87292b7f29448aec342a7c720956f8083d252bf28b"},
{file = "cryptography-2.9.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:c447cf087cf2dbddc1add6987bbe2f767ed5317adb2d08af940db517dd704365"},
{file = "cryptography-2.9.2-cp27-cp27m-win32.whl", hash = "sha256:f118a95c7480f5be0df8afeb9a11bd199aa20afab7a96bcf20409b411a3a85f0"},
{file = "cryptography-2.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:c4fd17d92e9d55b84707f4fd09992081ba872d1a0c610c109c18e062e06a2e55"},
{file = "cryptography-2.9.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d0d5aeaedd29be304848f1c5059074a740fa9f6f26b84c5b63e8b29e73dfc270"},
{file = "cryptography-2.9.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e4014639d3d73fbc5ceff206049c5a9a849cefd106a49fa7aaaa25cc0ce35cf"},
{file = "cryptography-2.9.2-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:96c080ae7118c10fcbe6229ab43eb8b090fccd31a09ef55f83f690d1ef619a1d"},
{file = "cryptography-2.9.2-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:e993468c859d084d5579e2ebee101de8f5a27ce8e2159959b6673b418fd8c785"},
{file = "cryptography-2.9.2-cp35-abi3-manylinux2010_x86_64.whl", hash = "sha256:88c881dd5a147e08d1bdcf2315c04972381d026cdb803325c03fe2b4a8ed858b"},
{file = "cryptography-2.9.2-cp35-cp35m-win32.whl", hash = "sha256:651448cd2e3a6bc2bb76c3663785133c40d5e1a8c1a9c5429e4354201c6024ae"},
{file = "cryptography-2.9.2-cp35-cp35m-win_amd64.whl", hash = "sha256:726086c17f94747cedbee6efa77e99ae170caebeb1116353c6cf0ab67ea6829b"},
{file = "cryptography-2.9.2-cp36-cp36m-win32.whl", hash = "sha256:091d31c42f444c6f519485ed528d8b451d1a0c7bf30e8ca583a0cac44b8a0df6"},
{file = "cryptography-2.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:bb1f0281887d89617b4c68e8db9a2c42b9efebf2702a3c5bf70599421a8623e3"},
{file = "cryptography-2.9.2-cp37-cp37m-win32.whl", hash = "sha256:18452582a3c85b96014b45686af264563e3e5d99d226589f057ace56196ec78b"},
{file = "cryptography-2.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:22e91636a51170df0ae4dcbd250d318fd28c9f491c4e50b625a49964b24fe46e"},
{file = "cryptography-2.9.2-cp38-cp38-win32.whl", hash = "sha256:844a76bc04472e5135b909da6aed84360f522ff5dfa47f93e3dd2a0b84a89fa0"},
{file = "cryptography-2.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:1dfa985f62b137909496e7fc182dac687206d8d089dd03eaeb28ae16eec8e7d5"},
{file = "cryptography-2.9.2.tar.gz", hash = "sha256:a0c30272fb4ddda5f5ffc1089d7405b7a71b0b0f51993cb4e5dbb4590b2fc229"},
]
dill = [
{file = "dill-0.3.1.1.tar.gz", hash = "sha256:42d8ef819367516592a825746a18073ced42ca169ab1f5f4044134703e7a049c"},
]
flake8 = [
{file = "flake8-3.8.1-py2.py3-none-any.whl", hash = "sha256:6c1193b0c3f853ef763969238f6c81e9e63ace9d024518edc020d5f1d6d93195"},
{file = "flake8-3.8.1.tar.gz", hash = "sha256:ea6623797bf9a52f4c9577d780da0bb17d65f870213f7b5bcc9fca82540c31d5"},
]
iso8601 = [
{file = "iso8601-0.1.12-py2.py3-none-any.whl", hash = "sha256:210e0134677cc0d02f6028087fee1df1e1d76d372ee1db0bf30bf66c5c1c89a3"},
{file = "iso8601-0.1.12-py3-none-any.whl", hash = "sha256:bbbae5fb4a7abfe71d4688fd64bff70b91bbd74ef6a99d964bab18f7fdf286dd"},
{file = "iso8601-0.1.12.tar.gz", hash = "sha256:49c4b20e1f38aa5cf109ddcd39647ac419f928512c869dc01d5c7098eddede82"},
]
mccabe = [
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
]
pycodestyle = [
{file = "pycodestyle-2.6.0-py2.py3-none-any.whl", hash = "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367"},
{file = "pycodestyle-2.6.0.tar.gz", hash = "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"},
]
pycparser = [
{file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"},
{file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"},
]
pyflakes = [
{file = "pyflakes-2.2.0-py2.py3-none-any.whl", hash = "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92"},
{file = "pyflakes-2.2.0.tar.gz", hash = "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"},
]
pymysql = [
{file = "PyMySQL-0.9.2-py2.py3-none-any.whl", hash = "sha256:95f057328357e0e13a30e67857a8c694878b0175797a9a203ee7adbfb9b1ec5f"},
{file = "PyMySQL-0.9.2.tar.gz", hash = "sha256:9ec760cbb251c158c19d6c88c17ca00a8632bac713890e465b2be01fdc30713f"},
]
pypika = [
{file = "PyPika-0.37.6.tar.gz", hash = "sha256:64510fa36667e8bb654bdc1be5a3a77bac1dbc2f03d4848efac08e39d9cac6f5"},
]
six = [
{file = "six-1.14.0-py2.py3-none-any.whl", hash = "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"},
{file = "six-1.14.0.tar.gz", hash = "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a"},
]
sniffio = [
{file = "sniffio-1.1.0-py3-none-any.whl", hash = "sha256:20ed6d5b46f8ae136d00b9dcb807615d83ed82ceea6b2058cecb696765246da5"},
{file = "sniffio-1.1.0.tar.gz", hash = "sha256:8e3810100f69fe0edd463d02ad407112542a11ffdc29f67db2bf3771afb87a21"},
]
taskipy = [
{file = "taskipy-1.2.1-py3-none-any.whl", hash = "sha256:99bdaf5b19791c2345806847147e0fc2d28e1ac9446058def5a8b6b3fc9f23e2"},
{file = "taskipy-1.2.1.tar.gz", hash = "sha256:5eb2c3b1606c896c7fa799848e71e8883b880759224958d07ba760e5db263175"},
]
toml = [
{file = "toml-0.10.0-py2.7.egg", hash = "sha256:f1db651f9657708513243e61e6cc67d101a39bad662eaa9b5546f789338e07a3"},
{file = "toml-0.10.0-py2.py3-none-any.whl", hash = "sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e"},
{file = "toml-0.10.0.tar.gz", hash = "sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c"},
]
tortoise-orm = []
typing-extensions = [
{file = "typing_extensions-3.7.4.2-py2-none-any.whl", hash = "sha256:f8d2bd89d25bc39dabe7d23df520442fa1d8969b82544370e03d88b5a591c392"},
{file = "typing_extensions-3.7.4.2-py3-none-any.whl", hash = "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5"},
{file = "typing_extensions-3.7.4.2.tar.gz", hash = "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae"},
]

View File

@ -9,16 +9,18 @@ python = "^3.8"
tortoise-orm = {git = "https://github.com/tortoise/tortoise-orm.git", branch = "develop"}
aiomysql = "*"
asyncclick = "*"
dill = "*"
[tool.poetry.dev-dependencies]
taskipy = "*"
asynctest = "*"
flake8 = "*"
isort = "*"
black = "*"
[build-system]
requires = ["poetry>=0.12"]
build-backend = "poetry.masonry.api"
[tool.taskipy.tasks]
export = "poetry export -f requirements.txt --without-hashes > requirements.txt"
export = "poetry export -f requirements.txt --without-hashes > requirements.txt"
export-dev = "poetry export -f requirements.txt --dev --without-hashes > requirements-dev.txt"

24
requirements-dev.txt Normal file
View File

@ -0,0 +1,24 @@
aiomysql==0.0.20
aiosqlite==0.13.0
anyio==1.3.0
async-generator==1.10
asyncclick==7.0.9
asynctest==0.13.0
cffi==1.14.0
ciso8601==2.1.3; sys_platform != "win32" and implementation_name == "cpython"
cryptography==2.9.2
flake8==3.8.1
iso8601==0.1.12; sys_platform == "win32" or implementation_name != "cpython"
isort==4.3.21
mccabe==0.6.1
pycodestyle==2.6.0
pycparser==2.20
pyflakes==2.2.0
pymysql==0.9.2
pypika==0.37.6
six==1.14.0
sniffio==1.1.0
taskipy==1.2.1
toml==0.10.1
-e git+https://github.com/tortoise/tortoise-orm.git@95c384a4742ee5980f8e4ae934bfdb0d8137bb40#egg=tortoise-orm
typing-extensions==3.7.4.2

View File

@ -6,12 +6,10 @@ asyncclick==7.0.9
cffi==1.14.0
ciso8601==2.1.3; sys_platform != "win32" and implementation_name == "cpython"
cryptography==2.9.2
dill==0.3.1.1
iso8601==0.1.12; sys_platform == "win32" or implementation_name != "cpython"
pycparser==2.20
pymysql==0.9.2
pypika==0.37.6
six==1.14.0
sniffio==1.1.0
-e git+https://github.com/tortoise/tortoise-orm.git@72f84f0848dc68041157f03e60cd1c92b0ee5137#egg=tortoise-orm
typing-extensions==3.7.4.2

View File

@ -33,7 +33,7 @@ setup(
include_package_data=True,
zip_safe=True,
entry_points={
'console_scripts': ['alice = alice.cli:cli'],
'console_scripts': ['alice = alice.cli:main'],
},
platforms='any',
keywords=(

View File

@ -1,28 +1,19 @@
from asynctest import TestCase
from tortoise import Tortoise
from alice.backends.mysql import MysqlDDL
from alice.migrate import Migrate
from alice.ddl.mysql import MysqlDDL
TORTOISE_ORM = {
'connections': {
'default': 'mysql://root:123456@127.0.0.1:3306/test',
},
'apps': {
'models': {
'models': ['tests.models'],
'default_connection': 'default',
},
}
"connections": {"default": "mysql://root:123456@127.0.0.1:3306/test",},
"apps": {"models": {"models": ["tests.models"], "default_connection": "default",},},
}
class DBTestCase(TestCase):
async def setUp(self) -> None:
await Tortoise.init(config=TORTOISE_ORM)
self.client = Tortoise.get_connection('default')
self.client = Tortoise.get_connection("default")
self.ddl = MysqlDDL(self.client)
self.migrate = Migrate(ddl=self.ddl)
async def tearDown(self) -> None:
await Tortoise.close_connections()

View File

@ -6,44 +6,50 @@ class TestDDL(DBTestCase):
def test_create_table(self):
ret = self.ddl.create_table(Category)
self.assertEqual(
ret, """CREATE TABLE IF NOT EXISTS `category` (
ret,
"""CREATE TABLE IF NOT EXISTS `category` (
`id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
`slug` VARCHAR(200) NOT NULL,
`name` VARCHAR(200) NOT NULL,
`created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6),
`user_id` INT NOT NULL COMMENT 'User',
CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE
) CHARACTER SET utf8mb4;""")
) CHARACTER SET utf8mb4;""",
)
def test_drop_table(self):
ret = self.ddl.drop_table(Category)
self.assertEqual(ret, "DROP TABLE category IF EXISTS")
def test_add_column(self):
ret = self.ddl.add_column(Category, Category._meta.fields_map.get('name'))
ret = self.ddl.add_column(Category, Category._meta.fields_map.get("name"))
self.assertEqual(ret, "ALTER TABLE category ADD `name` VARCHAR(200) NOT NULL")
def test_drop_column(self):
ret = self.ddl.drop_column(Category, 'name')
ret = self.ddl.drop_column(Category, "name")
self.assertEqual(ret, "ALTER TABLE category DROP COLUMN name")
def test_add_index(self):
ret = self.ddl.add_index(Category, ['name'])
ret = self.ddl.add_index(Category, ["name"])
self.assertEqual(ret, "ALTER TABLE category ADD INDEX idx_category_name_8b0cb9 (`name`)")
ret = self.ddl.add_index(Category, ['name'], True)
self.assertEqual(ret, "ALTER TABLE category ADD UNIQUE INDEX uid_category_name_8b0cb9 (`name`)")
ret = self.ddl.add_index(Category, ["name"], True)
self.assertEqual(
ret, "ALTER TABLE category ADD UNIQUE INDEX uid_category_name_8b0cb9 (`name`)"
)
def test_drop_index(self):
ret = self.ddl.drop_index(Category, ['name'])
ret = self.ddl.drop_index(Category, ["name"])
self.assertEqual(ret, "ALTER TABLE category DROP INDEX idx_category_name_8b0cb9")
ret = self.ddl.drop_index(Category, ['name'], True)
ret = self.ddl.drop_index(Category, ["name"], True)
self.assertEqual(ret, "ALTER TABLE category DROP INDEX uid_category_name_8b0cb9")
def test_add_fk(self):
ret = self.ddl.add_fk(Category, Category._meta.fields_map.get('user'))
self.assertEqual(ret,
"ALTER TABLE category ADD CONSTRAINT `fk_category_user_366ffa6f` FOREIGN KEY (`user`) REFERENCES `user` (`id`) ON DELETE CASCADE")
ret = self.ddl.add_fk(Category, Category._meta.fields_map.get("user"))
self.assertEqual(
ret,
"ALTER TABLE category ADD CONSTRAINT `fk_category_user_366ffa6f` FOREIGN KEY (`user`) REFERENCES `user` (`id`) ON DELETE CASCADE",
)
def test_drop_fk(self):
ret = self.ddl.drop_fk(Category, Category._meta.fields_map.get('user'))
ret = self.ddl.drop_fk(Category, Category._meta.fields_map.get("user"))
self.assertEqual(ret, "ALTER TABLE category DROP FOREIGN KEY fk_category_user_366ffa6f")

View File

@ -3,5 +3,4 @@ from tests.backends.mysql import DBTestCase
class TestMigrate(DBTestCase):
async def test_migrate(self):
self.migrate.diff_models_module('tests.models', 'tests.new_models')
print(self.migrate.operators)
pass

View File

@ -1,6 +1,7 @@
import datetime
from enum import IntEnum
from tortoise import fields, Model
from tortoise import Model, fields
class ProductType(IntEnum):
@ -23,41 +24,41 @@ class Status(IntEnum):
class User(Model):
username = fields.CharField(max_length=20, unique=True)
password = fields.CharField(max_length=200)
last_login = fields.DatetimeField(description='Last Login', default=datetime.datetime.now)
is_active = fields.BooleanField(default=True, description='Is Active')
is_superuser = fields.BooleanField(default=False, description='Is SuperUser')
avatar = fields.CharField(max_length=200, default='')
intro = fields.TextField(default='')
last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now)
is_active = fields.BooleanField(default=True, description="Is Active")
is_superuser = fields.BooleanField(default=False, description="Is SuperUser")
avatar = fields.CharField(max_length=200, default="")
intro = fields.TextField(default="")
created_at = fields.DatetimeField(auto_now_add=True)
updated_at = fields.DatetimeField(auto_now=True)
def __str__(self):
return f'{self.pk}#{self.username}'
return f"{self.pk}#{self.username}"
class Category(Model):
slug = fields.CharField(max_length=200)
name = fields.CharField(max_length=200)
user = fields.ForeignKeyField('models.User', description='User')
user = fields.ForeignKeyField("models.User", description="User")
created_at = fields.DatetimeField(auto_now_add=True)
def __str__(self):
return f'{self.pk}#{self.name}'
return f"{self.pk}#{self.name}"
class Product(Model):
categories = fields.ManyToManyField('models.Category')
categories = fields.ManyToManyField("models.Category")
name = fields.CharField(max_length=50)
view_num = fields.IntField(description='View Num')
view_num = fields.IntField(description="View Num")
sort = fields.IntField()
is_reviewed = fields.BooleanField(description='Is Reviewed')
type = fields.IntEnumField(ProductType, description='Product Type')
is_reviewed = fields.BooleanField(description="Is Reviewed")
type = fields.IntEnumField(ProductType, description="Product Type")
image = fields.CharField(max_length=200)
body = fields.TextField()
created_at = fields.DatetimeField(auto_now_add=True)
def __str__(self):
return f'{self.pk}#{self.name}'
return f"{self.pk}#{self.name}"
class Config(Model):
@ -67,4 +68,4 @@ class Config(Model):
status: Status = fields.IntEnumField(Status, default=Status.on)
def __str__(self):
return f'{self.pk}#{self.label}'
return f"{self.pk}#{self.label}"

View File

@ -1,9 +1,6 @@
from unittest import TestCase
from alice.utils import cp_models
class TestUtils(TestCase):
def test_cp_models(self):
ret = cp_models('models.py', 'new_models.py', 'new_models')
print(ret)
def test_get_app_connection(self):
pass

19
tox.ini Normal file
View File

@ -0,0 +1,19 @@
[tox]
envlist = py{37,38,39}
skip_missing_interpreters = True
[testenv]
whitelist_externals=
make
commands=
make ci
deps =
-r requirements-dev.txt
[testenv:pypy3]
deps =
-r tests/requirements-pypy.txt
setenv =
PYTEST_ADDOPTS="--no-cov"
commands=
make _testall