Compare commits
12 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
63e8d06157 | ||
|
68ef8ac676 | ||
|
8b5cf6faa0 | ||
|
fac00d45cc | ||
|
6f7893d376 | ||
|
b1521c4cc7 | ||
|
24c1f4cb7d | ||
|
661f241dac | ||
|
01787558d6 | ||
|
699b0321a4 | ||
|
4a83021892 | ||
|
af63221875 |
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@@ -1 +1 @@
|
|||||||
custom: ["https://sponsor.long2ice.cn"]
|
custom: ["https://sponsor.long2ice.io"]
|
||||||
|
6
.github/workflows/ci.yml
vendored
6
.github/workflows/ci.yml
vendored
@@ -26,9 +26,9 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
- name: Install and configure Poetry
|
- name: Install and configure Poetry
|
||||||
uses: snok/install-poetry@v1.1.1
|
run: |
|
||||||
with:
|
pip install -U pip poetry
|
||||||
virtualenvs-create: false
|
poetry config virtualenvs.create false
|
||||||
- name: CI
|
- name: CI
|
||||||
env:
|
env:
|
||||||
MYSQL_PASS: root
|
MYSQL_PASS: root
|
||||||
|
6
.github/workflows/pypi.yml
vendored
6
.github/workflows/pypi.yml
vendored
@@ -12,9 +12,9 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
- name: Install and configure Poetry
|
- name: Install and configure Poetry
|
||||||
uses: snok/install-poetry@v1.1.1
|
run: |
|
||||||
with:
|
pip install -U pip poetry
|
||||||
virtualenvs-create: false
|
poetry config virtualenvs.create false
|
||||||
- name: Build dists
|
- name: Build dists
|
||||||
run: make build
|
run: make build
|
||||||
- name: Pypi Publish
|
- name: Pypi Publish
|
||||||
|
20
CHANGELOG.md
20
CHANGELOG.md
@@ -1,7 +1,27 @@
|
|||||||
# ChangeLog
|
# ChangeLog
|
||||||
|
|
||||||
|
## 0.6
|
||||||
|
|
||||||
|
### 0.6.0
|
||||||
|
|
||||||
|
- Change default config file from `aerich.ini` to `pyproject.toml`. (#197)
|
||||||
|
|
||||||
|
**Upgrade note:**
|
||||||
|
1. Run `aerich init -t config.TORTOISE_ORM`.
|
||||||
|
2. Remove `aerich.ini`.
|
||||||
|
- Remove `pydantic` dependency. (#198)
|
||||||
|
- `inspectdb` support `DATE`. (#215)
|
||||||
|
|
||||||
## 0.5
|
## 0.5
|
||||||
|
|
||||||
|
### 0.5.8
|
||||||
|
|
||||||
|
- Support `indexes` change. (#193)
|
||||||
|
|
||||||
|
### 0.5.7
|
||||||
|
|
||||||
|
- Fix no module found error. (#188) (#189)
|
||||||
|
|
||||||
### 0.5.6
|
### 0.5.6
|
||||||
|
|
||||||
- Add `Command` class. (#148) (#141) (#123) (#106)
|
- Add `Command` class. (#148) (#141) (#123) (#106)
|
||||||
|
2
Makefile
2
Makefile
@@ -12,7 +12,7 @@ up:
|
|||||||
@poetry update
|
@poetry update
|
||||||
|
|
||||||
deps:
|
deps:
|
||||||
@poetry install -E asyncpg -E asyncmy -E aiomysql
|
@poetry install -E asyncpg -E asyncmy
|
||||||
|
|
||||||
style: deps
|
style: deps
|
||||||
isort -src $(checkfiles)
|
isort -src $(checkfiles)
|
||||||
|
12
README.md
12
README.md
@@ -15,7 +15,7 @@ it\'s own migration solution.
|
|||||||
Just install from pypi:
|
Just install from pypi:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
> pip install aerich
|
pip install aerich
|
||||||
```
|
```
|
||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
@@ -27,11 +27,8 @@ Usage: aerich [OPTIONS] COMMAND [ARGS]...
|
|||||||
|
|
||||||
Options:
|
Options:
|
||||||
-V, --version Show the version and exit.
|
-V, --version Show the version and exit.
|
||||||
-c, --config TEXT Config file. [default: aerich.ini]
|
-c, --config TEXT Config file. [default: pyproject.toml]
|
||||||
--app TEXT Tortoise-ORM app name.
|
--app TEXT Tortoise-ORM app name.
|
||||||
-n, --name TEXT Name of section in .ini file to use for aerich config.
|
|
||||||
[default: aerich]
|
|
||||||
|
|
||||||
-h, --help Show this message and exit.
|
-h, --help Show this message and exit.
|
||||||
|
|
||||||
Commands:
|
Commands:
|
||||||
@@ -70,10 +67,9 @@ Usage: aerich init [OPTIONS]
|
|||||||
|
|
||||||
Init config file and generate root migrate location.
|
Init config file and generate root migrate location.
|
||||||
|
|
||||||
OOptions:
|
Options:
|
||||||
-t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like
|
-t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like
|
||||||
settings.TORTOISE_ORM. [required]
|
settings.TORTOISE_ORM. [required]
|
||||||
|
|
||||||
--location TEXT Migrate store location. [default: ./migrations]
|
--location TEXT Migrate store location. [default: ./migrations]
|
||||||
-s, --src_folder TEXT Folder of the source, relative to the project root.
|
-s, --src_folder TEXT Folder of the source, relative to the project root.
|
||||||
-h, --help Show this message and exit.
|
-h, --help Show this message and exit.
|
||||||
@@ -85,7 +81,7 @@ Initialize the config file and migrations location:
|
|||||||
> aerich init -t tests.backends.mysql.TORTOISE_ORM
|
> aerich init -t tests.backends.mysql.TORTOISE_ORM
|
||||||
|
|
||||||
Success create migrate location ./migrations
|
Success create migrate location ./migrations
|
||||||
Success generate config file aerich.ini
|
Success write config to pyproject.toml
|
||||||
```
|
```
|
||||||
|
|
||||||
### Init db
|
### Init db
|
||||||
|
@@ -1,5 +1,3 @@
|
|||||||
__version__ = "0.5.6"
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List
|
from typing import List
|
||||||
@@ -14,7 +12,6 @@ from aerich.inspectdb import InspectDb
|
|||||||
from aerich.migrate import Migrate
|
from aerich.migrate import Migrate
|
||||||
from aerich.models import Aerich
|
from aerich.models import Aerich
|
||||||
from aerich.utils import (
|
from aerich.utils import (
|
||||||
add_src_path,
|
|
||||||
get_app_connection,
|
get_app_connection,
|
||||||
get_app_connection_name,
|
get_app_connection_name,
|
||||||
get_models_describe,
|
get_models_describe,
|
||||||
@@ -29,14 +26,11 @@ class Command:
|
|||||||
tortoise_config: dict,
|
tortoise_config: dict,
|
||||||
app: str = "models",
|
app: str = "models",
|
||||||
location: str = "./migrations",
|
location: str = "./migrations",
|
||||||
src_folder: str = ".",
|
|
||||||
):
|
):
|
||||||
self.tortoise_config = tortoise_config
|
self.tortoise_config = tortoise_config
|
||||||
self.app = app
|
self.app = app
|
||||||
self.location = location
|
self.location = location
|
||||||
self.src_folder = src_folder
|
|
||||||
Migrate.app = app
|
Migrate.app = app
|
||||||
add_src_path(src_folder)
|
|
||||||
|
|
||||||
async def init(self):
|
async def init(self):
|
||||||
await Migrate.init(self.tortoise_config, self.app, self.location)
|
await Migrate.init(self.tortoise_config, self.app, self.location)
|
||||||
|
@@ -1,21 +1,21 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import os
|
import os
|
||||||
from configparser import ConfigParser
|
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
import tomlkit
|
||||||
from click import Context, UsageError
|
from click import Context, UsageError
|
||||||
|
from tomlkit.exceptions import NonExistentKey
|
||||||
from tortoise import Tortoise
|
from tortoise import Tortoise
|
||||||
|
|
||||||
from aerich.exceptions import DowngradeError
|
from aerich.exceptions import DowngradeError
|
||||||
from aerich.utils import add_src_path, get_tortoise_config
|
from aerich.utils import add_src_path, get_tortoise_config
|
||||||
|
|
||||||
from . import Command, __version__
|
from . import Command
|
||||||
from .enums import Color
|
from .enums import Color
|
||||||
|
from .version import __version__
|
||||||
parser = ConfigParser()
|
|
||||||
|
|
||||||
CONFIG_DEFAULT_VALUES = {
|
CONFIG_DEFAULT_VALUES = {
|
||||||
"src_folder": ".",
|
"src_folder": ".",
|
||||||
@@ -42,39 +42,35 @@ def coro(f):
|
|||||||
@click.option(
|
@click.option(
|
||||||
"-c",
|
"-c",
|
||||||
"--config",
|
"--config",
|
||||||
default="aerich.ini",
|
default="pyproject.toml",
|
||||||
show_default=True,
|
show_default=True,
|
||||||
help="Config file.",
|
help="Config file.",
|
||||||
)
|
)
|
||||||
@click.option("--app", required=False, help="Tortoise-ORM app name.")
|
@click.option("--app", required=False, help="Tortoise-ORM app name.")
|
||||||
@click.option(
|
|
||||||
"-n",
|
|
||||||
"--name",
|
|
||||||
default="aerich",
|
|
||||||
show_default=True,
|
|
||||||
help="Name of section in .ini file to use for aerich config.",
|
|
||||||
)
|
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
@coro
|
@coro
|
||||||
async def cli(ctx: Context, config, app, name):
|
async def cli(ctx: Context, config, app):
|
||||||
ctx.ensure_object(dict)
|
ctx.ensure_object(dict)
|
||||||
ctx.obj["config_file"] = config
|
ctx.obj["config_file"] = config
|
||||||
ctx.obj["name"] = name
|
|
||||||
|
|
||||||
invoked_subcommand = ctx.invoked_subcommand
|
invoked_subcommand = ctx.invoked_subcommand
|
||||||
if invoked_subcommand != "init":
|
if invoked_subcommand != "init":
|
||||||
if not Path(config).exists():
|
if not Path(config).exists():
|
||||||
raise UsageError("You must exec init first", ctx=ctx)
|
raise UsageError("You must exec init first", ctx=ctx)
|
||||||
parser.read(config)
|
with open(config, "r") as f:
|
||||||
|
content = f.read()
|
||||||
location = parser[name]["location"]
|
doc = tomlkit.parse(content)
|
||||||
tortoise_orm = parser[name]["tortoise_orm"]
|
try:
|
||||||
src_folder = parser[name].get("src_folder", CONFIG_DEFAULT_VALUES["src_folder"])
|
tool = doc["tool"]["aerich"]
|
||||||
|
location = tool["location"]
|
||||||
|
tortoise_orm = tool["tortoise_orm"]
|
||||||
|
src_folder = tool.get("src_folder", CONFIG_DEFAULT_VALUES["src_folder"])
|
||||||
|
except NonExistentKey:
|
||||||
|
raise UsageError("You need run aerich init again when upgrade to 0.6.0+")
|
||||||
|
add_src_path(src_folder)
|
||||||
tortoise_config = get_tortoise_config(ctx, tortoise_orm)
|
tortoise_config = get_tortoise_config(ctx, tortoise_orm)
|
||||||
app = app or list(tortoise_config.get("apps").keys())[0]
|
app = app or list(tortoise_config.get("apps").keys())[0]
|
||||||
command = Command(
|
command = Command(tortoise_config=tortoise_config, app=app, location=location)
|
||||||
tortoise_config=tortoise_config, app=app, location=location, src_folder=src_folder
|
|
||||||
)
|
|
||||||
ctx.obj["command"] = command
|
ctx.obj["command"] = command
|
||||||
if invoked_subcommand != "init-db":
|
if invoked_subcommand != "init-db":
|
||||||
if not Path(location, app).exists():
|
if not Path(location, app).exists():
|
||||||
@@ -187,9 +183,6 @@ async def history(ctx: Context):
|
|||||||
@coro
|
@coro
|
||||||
async def init(ctx: Context, tortoise_orm, location, src_folder):
|
async def init(ctx: Context, tortoise_orm, location, src_folder):
|
||||||
config_file = ctx.obj["config_file"]
|
config_file = ctx.obj["config_file"]
|
||||||
name = ctx.obj["name"]
|
|
||||||
if Path(config_file).exists():
|
|
||||||
return click.secho("Configuration file already created", fg=Color.yellow)
|
|
||||||
|
|
||||||
if os.path.isabs(src_folder):
|
if os.path.isabs(src_folder):
|
||||||
src_folder = os.path.relpath(os.getcwd(), src_folder)
|
src_folder = os.path.relpath(os.getcwd(), src_folder)
|
||||||
@@ -201,18 +194,22 @@ async def init(ctx: Context, tortoise_orm, location, src_folder):
|
|||||||
add_src_path(src_folder)
|
add_src_path(src_folder)
|
||||||
get_tortoise_config(ctx, tortoise_orm)
|
get_tortoise_config(ctx, tortoise_orm)
|
||||||
|
|
||||||
parser.add_section(name)
|
with open(config_file, "r") as f:
|
||||||
parser.set(name, "tortoise_orm", tortoise_orm)
|
content = f.read()
|
||||||
parser.set(name, "location", location)
|
doc = tomlkit.parse(content)
|
||||||
parser.set(name, "src_folder", src_folder)
|
table = tomlkit.table()
|
||||||
|
table["tortoise_orm"] = tortoise_orm
|
||||||
|
table["location"] = location
|
||||||
|
table["src_folder"] = src_folder
|
||||||
|
doc["tool"]["aerich"] = table
|
||||||
|
|
||||||
with open(config_file, "w", encoding="utf-8") as f:
|
with open(config_file, "w") as f:
|
||||||
parser.write(f)
|
f.write(tomlkit.dumps(doc))
|
||||||
|
|
||||||
Path(location).mkdir(parents=True, exist_ok=True)
|
Path(location).mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
click.secho(f"Success create migrate location {location}", fg=Color.green)
|
click.secho(f"Success create migrate location {location}", fg=Color.green)
|
||||||
click.secho(f"Success generate config file {config_file}", fg=Color.green)
|
click.secho(f"Success write config to {config_file}", fg=Color.green)
|
||||||
|
|
||||||
|
|
||||||
@cli.command(help="Generate schema and generate app migrate location.")
|
@cli.command(help="Generate schema and generate app migrate location.")
|
||||||
|
@@ -16,6 +16,7 @@ class InspectDb:
|
|||||||
"TEXT": " {field} = fields.TextField({null}{default}{comment})",
|
"TEXT": " {field} = fields.TextField({null}{default}{comment})",
|
||||||
"DATETIME": " {field} = fields.DatetimeField({null}{default}{comment})",
|
"DATETIME": " {field} = fields.DatetimeField({null}{default}{comment})",
|
||||||
"FLOAT": " {field} = fields.FloatField({null}{default}{comment})",
|
"FLOAT": " {field} = fields.FloatField({null}{default}{comment})",
|
||||||
|
"DATE": " {field} = fields.DateField({null}{default}{comment})",
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self, conn: BaseDBAsyncClient, tables: Optional[List[str]] = None):
|
def __init__(self, conn: BaseDBAsyncClient, tables: Optional[List[str]] = None):
|
||||||
|
@@ -138,21 +138,21 @@ class Migrate:
|
|||||||
return await cls._generate_diff_sql(name)
|
return await cls._generate_diff_sql(name)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _add_operator(cls, operator: str, upgrade=True, fk_m2m=False):
|
def _add_operator(cls, operator: str, upgrade=True, fk_m2m_index=False):
|
||||||
"""
|
"""
|
||||||
add operator,differentiate fk because fk is order limit
|
add operator,differentiate fk because fk is order limit
|
||||||
:param operator:
|
:param operator:
|
||||||
:param upgrade:
|
:param upgrade:
|
||||||
:param fk_m2m:
|
:param fk_m2m_index:
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
if upgrade:
|
if upgrade:
|
||||||
if fk_m2m:
|
if fk_m2m_index:
|
||||||
cls._upgrade_fk_m2m_index_operators.append(operator)
|
cls._upgrade_fk_m2m_index_operators.append(operator)
|
||||||
else:
|
else:
|
||||||
cls.upgrade_operators.append(operator)
|
cls.upgrade_operators.append(operator)
|
||||||
else:
|
else:
|
||||||
if fk_m2m:
|
if fk_m2m_index:
|
||||||
cls._downgrade_fk_m2m_index_operators.append(operator)
|
cls._downgrade_fk_m2m_index_operators.append(operator)
|
||||||
else:
|
else:
|
||||||
cls.downgrade_operators.append(operator)
|
cls.downgrade_operators.append(operator)
|
||||||
@@ -192,7 +192,8 @@ class Migrate:
|
|||||||
new_unique_together = set(
|
new_unique_together = set(
|
||||||
map(lambda x: tuple(x), new_model_describe.get("unique_together"))
|
map(lambda x: tuple(x), new_model_describe.get("unique_together"))
|
||||||
)
|
)
|
||||||
|
old_indexes = set(map(lambda x: tuple(x), old_model_describe.get("indexes", [])))
|
||||||
|
new_indexes = set(map(lambda x: tuple(x), new_model_describe.get("indexes", [])))
|
||||||
old_pk_field = old_model_describe.get("pk_field")
|
old_pk_field = old_model_describe.get("pk_field")
|
||||||
new_pk_field = new_model_describe.get("pk_field")
|
new_pk_field = new_model_describe.get("pk_field")
|
||||||
# pk field
|
# pk field
|
||||||
@@ -224,7 +225,7 @@ class Migrate:
|
|||||||
new_models.get(change[0][1].get("model_name")),
|
new_models.get(change[0][1].get("model_name")),
|
||||||
),
|
),
|
||||||
upgrade,
|
upgrade,
|
||||||
fk_m2m=True,
|
fk_m2m_index=True,
|
||||||
)
|
)
|
||||||
elif action == "remove":
|
elif action == "remove":
|
||||||
add = False
|
add = False
|
||||||
@@ -235,14 +236,19 @@ class Migrate:
|
|||||||
cls._downgrade_m2m.append(table)
|
cls._downgrade_m2m.append(table)
|
||||||
add = True
|
add = True
|
||||||
if add:
|
if add:
|
||||||
cls._add_operator(cls.drop_m2m(table), upgrade, fk_m2m=True)
|
cls._add_operator(cls.drop_m2m(table), upgrade, True)
|
||||||
# add unique_together
|
# add unique_together
|
||||||
for index in new_unique_together.difference(old_unique_together):
|
for index in new_unique_together.difference(old_unique_together):
|
||||||
cls._add_operator(cls._add_index(model, index, True), upgrade, True)
|
cls._add_operator(cls._add_index(model, index, True), upgrade, True)
|
||||||
# remove unique_together
|
# remove unique_together
|
||||||
for index in old_unique_together.difference(new_unique_together):
|
for index in old_unique_together.difference(new_unique_together):
|
||||||
cls._add_operator(cls._drop_index(model, index, True), upgrade, True)
|
cls._add_operator(cls._drop_index(model, index, True), upgrade, True)
|
||||||
|
# add indexes
|
||||||
|
for index in new_indexes.difference(old_indexes):
|
||||||
|
cls._add_operator(cls._add_index(model, index, False), upgrade, True)
|
||||||
|
# remove indexes
|
||||||
|
for index in old_indexes.difference(new_indexes):
|
||||||
|
cls._add_operator(cls._drop_index(model, index, False), upgrade, True)
|
||||||
old_data_fields = old_model_describe.get("data_fields")
|
old_data_fields = old_model_describe.get("data_fields")
|
||||||
new_data_fields = new_model_describe.get("data_fields")
|
new_data_fields = new_model_describe.get("data_fields")
|
||||||
|
|
||||||
@@ -356,7 +362,7 @@ class Migrate:
|
|||||||
model, fk_field, new_models.get(fk_field.get("python_type"))
|
model, fk_field, new_models.get(fk_field.get("python_type"))
|
||||||
),
|
),
|
||||||
upgrade,
|
upgrade,
|
||||||
fk_m2m=True,
|
fk_m2m_index=True,
|
||||||
)
|
)
|
||||||
# drop fk
|
# drop fk
|
||||||
for old_fk_field_name in set(old_fk_fields_name).difference(
|
for old_fk_field_name in set(old_fk_fields_name).difference(
|
||||||
@@ -371,7 +377,7 @@ class Migrate:
|
|||||||
model, old_fk_field, old_models.get(old_fk_field.get("python_type"))
|
model, old_fk_field, old_models.get(old_fk_field.get("python_type"))
|
||||||
),
|
),
|
||||||
upgrade,
|
upgrade,
|
||||||
fk_m2m=True,
|
fk_m2m_index=True,
|
||||||
)
|
)
|
||||||
# change fields
|
# change fields
|
||||||
for field_name in set(new_data_fields_name).intersection(set(old_data_fields_name)):
|
for field_name in set(new_data_fields_name).intersection(set(old_data_fields_name)):
|
||||||
|
1
aerich/version.py
Normal file
1
aerich/version.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
__version__ = "0.6.0"
|
717
poetry.lock
generated
717
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
|||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "aerich"
|
name = "aerich"
|
||||||
version = "0.5.6"
|
version = "0.6.0"
|
||||||
description = "A database migrations tool for Tortoise ORM."
|
description = "A database migrations tool for Tortoise ORM."
|
||||||
authors = ["long2ice <long2ice@gmail.com>"]
|
authors = ["long2ice <long2ice@gmail.com>"]
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
@@ -16,13 +16,13 @@ include = ["CHANGELOG.md", "LICENSE", "README.md"]
|
|||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = "^3.7"
|
python = "^3.7"
|
||||||
tortoise-orm = "*"
|
tortoise-orm = "^0.17.7"
|
||||||
click = "*"
|
click = "*"
|
||||||
pydantic = "*"
|
|
||||||
aiomysql = { version = "*", optional = true }
|
|
||||||
asyncpg = { version = "*", optional = true }
|
asyncpg = { version = "*", optional = true }
|
||||||
|
asyncmy = { version = "*", optional = true }
|
||||||
ddlparse = "*"
|
ddlparse = "*"
|
||||||
dictdiffer = "*"
|
dictdiffer = "*"
|
||||||
|
tomlkit = "*"
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.dev-dependencies]
|
||||||
flake8 = "*"
|
flake8 = "*"
|
||||||
@@ -38,7 +38,6 @@ cryptography = "*"
|
|||||||
[tool.poetry.extras]
|
[tool.poetry.extras]
|
||||||
asyncmy = ["asyncmy"]
|
asyncmy = ["asyncmy"]
|
||||||
asyncpg = ["asyncpg"]
|
asyncpg = ["asyncpg"]
|
||||||
aiomysql = ["aiomysql"]
|
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry>=0.12"]
|
requires = ["poetry>=0.12"]
|
||||||
|
@@ -65,6 +65,7 @@ class Product(Model):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
unique_together = (("name", "type"),)
|
unique_together = (("name", "type"),)
|
||||||
|
indexes = (("name", "type"),)
|
||||||
|
|
||||||
|
|
||||||
class Config(Model):
|
class Config(Model):
|
||||||
|
@@ -17,6 +17,7 @@ old_models_describe = {
|
|||||||
"description": None,
|
"description": None,
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"unique_together": [],
|
"unique_together": [],
|
||||||
|
"indexes": [],
|
||||||
"pk_field": {
|
"pk_field": {
|
||||||
"name": "id",
|
"name": "id",
|
||||||
"field_type": "IntField",
|
"field_type": "IntField",
|
||||||
@@ -151,6 +152,7 @@ old_models_describe = {
|
|||||||
"description": None,
|
"description": None,
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"unique_together": [],
|
"unique_together": [],
|
||||||
|
"indexes": [],
|
||||||
"pk_field": {
|
"pk_field": {
|
||||||
"name": "id",
|
"name": "id",
|
||||||
"field_type": "IntField",
|
"field_type": "IntField",
|
||||||
@@ -242,6 +244,7 @@ old_models_describe = {
|
|||||||
"description": None,
|
"description": None,
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"unique_together": [],
|
"unique_together": [],
|
||||||
|
"indexes": [],
|
||||||
"pk_field": {
|
"pk_field": {
|
||||||
"name": "id",
|
"name": "id",
|
||||||
"field_type": "IntField",
|
"field_type": "IntField",
|
||||||
@@ -334,6 +337,7 @@ old_models_describe = {
|
|||||||
"description": None,
|
"description": None,
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"unique_together": [],
|
"unique_together": [],
|
||||||
|
"indexes": [],
|
||||||
"pk_field": {
|
"pk_field": {
|
||||||
"name": "id",
|
"name": "id",
|
||||||
"field_type": "IntField",
|
"field_type": "IntField",
|
||||||
@@ -512,6 +516,7 @@ old_models_describe = {
|
|||||||
"description": None,
|
"description": None,
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"unique_together": [],
|
"unique_together": [],
|
||||||
|
"indexes": [],
|
||||||
"pk_field": {
|
"pk_field": {
|
||||||
"name": "id",
|
"name": "id",
|
||||||
"field_type": "IntField",
|
"field_type": "IntField",
|
||||||
@@ -681,6 +686,7 @@ old_models_describe = {
|
|||||||
"description": None,
|
"description": None,
|
||||||
"docstring": None,
|
"docstring": None,
|
||||||
"unique_together": [],
|
"unique_together": [],
|
||||||
|
"indexes": [],
|
||||||
"pk_field": {
|
"pk_field": {
|
||||||
"name": "id",
|
"name": "id",
|
||||||
"field_type": "IntField",
|
"field_type": "IntField",
|
||||||
@@ -793,6 +799,7 @@ def test_migrate(mocker: MockerFixture):
|
|||||||
"ALTER TABLE `configs` RENAME TO `config`",
|
"ALTER TABLE `configs` RENAME TO `config`",
|
||||||
"ALTER TABLE `product` RENAME COLUMN `image` TO `pic`",
|
"ALTER TABLE `product` RENAME COLUMN `image` TO `pic`",
|
||||||
"ALTER TABLE `email` RENAME COLUMN `id` TO `email_id`",
|
"ALTER TABLE `email` RENAME COLUMN `id` TO `email_id`",
|
||||||
|
"ALTER TABLE `product` ADD INDEX `idx_product_name_869427` (`name`, `type_db_alias`)",
|
||||||
"ALTER TABLE `email` ADD INDEX `idx_email_email_4a1a33` (`email`)",
|
"ALTER TABLE `email` ADD INDEX `idx_email_email_4a1a33` (`email`)",
|
||||||
"ALTER TABLE `product` ADD UNIQUE INDEX `uid_product_name_869427` (`name`, `type_db_alias`)",
|
"ALTER TABLE `product` ADD UNIQUE INDEX `uid_product_name_869427` (`name`, `type_db_alias`)",
|
||||||
"ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0",
|
"ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0",
|
||||||
@@ -816,6 +823,7 @@ def test_migrate(mocker: MockerFixture):
|
|||||||
"ALTER TABLE `config` RENAME TO `configs`",
|
"ALTER TABLE `config` RENAME TO `configs`",
|
||||||
"ALTER TABLE `product` RENAME COLUMN `pic` TO `image`",
|
"ALTER TABLE `product` RENAME COLUMN `pic` TO `image`",
|
||||||
"ALTER TABLE `email` RENAME COLUMN `email_id` TO `id`",
|
"ALTER TABLE `email` RENAME COLUMN `email_id` TO `id`",
|
||||||
|
"ALTER TABLE `product` DROP INDEX `idx_product_name_869427`",
|
||||||
"ALTER TABLE `email` DROP INDEX `idx_email_email_4a1a33`",
|
"ALTER TABLE `email` DROP INDEX `idx_email_email_4a1a33`",
|
||||||
"ALTER TABLE `product` DROP INDEX `uid_product_name_869427`",
|
"ALTER TABLE `product` DROP INDEX `uid_product_name_869427`",
|
||||||
"ALTER TABLE `product` ALTER COLUMN `view_num` DROP DEFAULT",
|
"ALTER TABLE `product` ALTER COLUMN `view_num` DROP DEFAULT",
|
||||||
@@ -843,6 +851,7 @@ def test_migrate(mocker: MockerFixture):
|
|||||||
'ALTER TABLE "product" RENAME COLUMN "image" TO "pic"',
|
'ALTER TABLE "product" RENAME COLUMN "image" TO "pic"',
|
||||||
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(100) USING "password"::VARCHAR(100)',
|
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(100) USING "password"::VARCHAR(100)',
|
||||||
'ALTER TABLE "user" DROP COLUMN "avatar"',
|
'ALTER TABLE "user" DROP COLUMN "avatar"',
|
||||||
|
'CREATE INDEX "idx_product_name_869427" ON "product" ("name", "type_db_alias")',
|
||||||
'CREATE INDEX "idx_email_email_4a1a33" ON "email" ("email")',
|
'CREATE INDEX "idx_email_email_4a1a33" ON "email" ("email")',
|
||||||
'CREATE TABLE "email_user" ("email_id" INT NOT NULL REFERENCES "email" ("email_id") ON DELETE CASCADE,"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE)',
|
'CREATE TABLE "email_user" ("email_id" INT NOT NULL REFERENCES "email" ("email_id") ON DELETE CASCADE,"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE)',
|
||||||
'CREATE TABLE IF NOT EXISTS "newmodel" (\n "id" SERIAL NOT NULL PRIMARY KEY,\n "name" VARCHAR(50) NOT NULL\n);\nCOMMENT ON COLUMN "config"."user_id" IS \'User\';',
|
'CREATE TABLE IF NOT EXISTS "newmodel" (\n "id" SERIAL NOT NULL PRIMARY KEY,\n "name" VARCHAR(50) NOT NULL\n);\nCOMMENT ON COLUMN "config"."user_id" IS \'User\';',
|
||||||
@@ -865,6 +874,7 @@ def test_migrate(mocker: MockerFixture):
|
|||||||
'ALTER TABLE "product" RENAME COLUMN "pic" TO "image"',
|
'ALTER TABLE "product" RENAME COLUMN "pic" TO "image"',
|
||||||
'ALTER TABLE "user" ADD "avatar" VARCHAR(200) NOT NULL DEFAULT \'\'',
|
'ALTER TABLE "user" ADD "avatar" VARCHAR(200) NOT NULL DEFAULT \'\'',
|
||||||
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(200) USING "password"::VARCHAR(200)',
|
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(200) USING "password"::VARCHAR(200)',
|
||||||
|
'DROP INDEX "idx_product_name_869427"',
|
||||||
'DROP INDEX "idx_email_email_4a1a33"',
|
'DROP INDEX "idx_email_email_4a1a33"',
|
||||||
'DROP INDEX "idx_user_usernam_9987ab"',
|
'DROP INDEX "idx_user_usernam_9987ab"',
|
||||||
'DROP INDEX "uid_product_name_869427"',
|
'DROP INDEX "uid_product_name_869427"',
|
||||||
|
Reference in New Issue
Block a user