Compare commits
	
		
			50 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | c39462820c | ||
|  | f15cbaf9e0 | ||
|  | 15131469df | ||
|  | c60c1610f0 | ||
|  | 63e8d06157 | ||
|  | 68ef8ac676 | ||
|  | 8b5cf6faa0 | ||
|  | fac00d45cc | ||
|  | 6f7893d376 | ||
|  | b1521c4cc7 | ||
|  | 24c1f4cb7d | ||
|  | 661f241dac | ||
|  | 01787558d6 | ||
|  | 699b0321a4 | ||
|  | 4a83021892 | ||
|  | af63221875 | ||
|  | 359525716c | ||
|  | 7d3eb2e151 | ||
|  | d8abf79449 | ||
|  | aa9f40ae27 | ||
|  | 79b7ae343a | ||
|  | 6f5a9ab78c | ||
|  | 1e5a83c281 | ||
|  | 180420843d | ||
|  | 58f66b91cf | ||
|  | 064d7ff675 | ||
|  | 2da794d823 | ||
|  | 77005f3793 | ||
|  | 5a873b8b69 | ||
|  | 3989b7c674 | ||
|  | 694b05356f | ||
|  | 919d56c936 | ||
|  | 7bcf9b2fed | ||
|  | 9f663299cf | ||
|  | 28dbdf2663 | ||
|  | e71a4b60a5 | ||
|  | 62840136be | ||
|  | 185514f711 | ||
|  | 8e783e031e | ||
|  | 10b7272ca8 | ||
|  | 0c763c6024 | ||
|  | c6371a5c16 | ||
|  | 1dbf9185b6 | ||
|  | 9bf2de0b9a | ||
|  | bf1cf21324 | ||
|  | 8b08329493 | ||
|  | 5bc7d23d95 | ||
|  | a253aa96cb | ||
|  | 15a6e874dd | ||
|  | 19a5dcbf3f | 
							
								
								
									
										2
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1 +1 @@ | |||||||
| custom: ["https://sponsor.long2ice.cn"] | custom: ["https://sponsor.long2ice.io"] | ||||||
|   | |||||||
							
								
								
									
										14
									
								
								.github/workflows/ci.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										14
									
								
								.github/workflows/ci.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,5 +1,11 @@ | |||||||
| name: ci | name: ci | ||||||
| on: [ push, pull_request ] | on: | ||||||
|  |   push: | ||||||
|  |     branches-ignore: | ||||||
|  |       - master | ||||||
|  |   pull_request: | ||||||
|  |     branches-ignore: | ||||||
|  |       - master | ||||||
| jobs: | jobs: | ||||||
|   ci: |   ci: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
| @@ -20,9 +26,9 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           python-version: '3.x' |           python-version: '3.x' | ||||||
|       - name: Install and configure Poetry |       - name: Install and configure Poetry | ||||||
|         uses: snok/install-poetry@v1.1.1 |         run: | | ||||||
|         with: |           pip install -U pip poetry | ||||||
|           virtualenvs-create: false |           poetry config virtualenvs.create false | ||||||
|       - name: CI |       - name: CI | ||||||
|         env: |         env: | ||||||
|           MYSQL_PASS: root |           MYSQL_PASS: root | ||||||
|   | |||||||
							
								
								
									
										6
									
								
								.github/workflows/pypi.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/pypi.yml
									
									
									
									
										vendored
									
									
								
							| @@ -12,9 +12,9 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           python-version: '3.x' |           python-version: '3.x' | ||||||
|       - name: Install and configure Poetry |       - name: Install and configure Poetry | ||||||
|         uses: snok/install-poetry@v1.1.1 |         run: | | ||||||
|         with: |           pip install -U pip poetry | ||||||
|           virtualenvs-create: false |           poetry config virtualenvs.create false | ||||||
|       - name: Build dists |       - name: Build dists | ||||||
|         run: make build |         run: make build | ||||||
|       - name: Pypi Publish |       - name: Pypi Publish | ||||||
|   | |||||||
							
								
								
									
										45
									
								
								CHANGELOG.md
									
									
									
									
									
								
							
							
						
						
									
										45
									
								
								CHANGELOG.md
									
									
									
									
									
								
							| @@ -1,7 +1,52 @@ | |||||||
| # ChangeLog | # ChangeLog | ||||||
|  |  | ||||||
|  | ## 0.6 | ||||||
|  |  | ||||||
|  | ### 0.6.2 | ||||||
|  |  | ||||||
|  | - Support migration for specified index. (#203) | ||||||
|  |  | ||||||
|  | ### 0.6.1 | ||||||
|  |  | ||||||
|  | - Fix `pyproject.toml` not existing error. (#217) | ||||||
|  |  | ||||||
|  | ### 0.6.0 | ||||||
|  |  | ||||||
|  | - Change default config file from `aerich.ini` to `pyproject.toml`. (#197) | ||||||
|  |  | ||||||
|  |   **Upgrade note:** | ||||||
|  |     1. Run `aerich init -t config.TORTOISE_ORM`. | ||||||
|  |     2. Remove `aerich.ini`. | ||||||
|  | - Remove `pydantic` dependency. (#198) | ||||||
|  | - `inspectdb` support `DATE`. (#215) | ||||||
|  |  | ||||||
| ## 0.5 | ## 0.5 | ||||||
|  |  | ||||||
|  | ### 0.5.8 | ||||||
|  |  | ||||||
|  | - Support `indexes` change. (#193) | ||||||
|  |  | ||||||
|  | ### 0.5.7 | ||||||
|  |  | ||||||
|  | - Fix no module found error. (#188) (#189) | ||||||
|  |  | ||||||
|  | ### 0.5.6 | ||||||
|  |  | ||||||
|  | - Add `Command` class. (#148) (#141) (#123) (#106) | ||||||
|  | - Fix: migrate doesn't use source_field in unique_together. (#181) | ||||||
|  |  | ||||||
|  | ### 0.5.5 | ||||||
|  |  | ||||||
|  | - Fix KeyError: 'src_folder' after upgrading aerich to 0.5.4. (#176) | ||||||
|  | - Fix MySQL 5.X rename column. | ||||||
|  | - Fix `db_constraint` when fk changed. (#179) | ||||||
|  |  | ||||||
|  | ### 0.5.4 | ||||||
|  |  | ||||||
|  | - Fix incorrect index creation order. (#151) | ||||||
|  | - Not catch exception when import config. (#164) | ||||||
|  | - Support `drop column` for sqlite. (#40) | ||||||
|  |  | ||||||
| ### 0.5.3 | ### 0.5.3 | ||||||
|  |  | ||||||
| - Fix postgre alter null. (#142) | - Fix postgre alter null. (#142) | ||||||
|   | |||||||
							
								
								
									
										2
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								Makefile
									
									
									
									
									
								
							| @@ -12,7 +12,7 @@ up: | |||||||
| 	@poetry update | 	@poetry update | ||||||
|  |  | ||||||
| deps: | deps: | ||||||
| 	@poetry install -E asyncpg -E asyncmy -E aiomysql | 	@poetry install -E asyncpg -E asyncmy | ||||||
|  |  | ||||||
| style: deps | style: deps | ||||||
| 	isort -src $(checkfiles) | 	isort -src $(checkfiles) | ||||||
|   | |||||||
							
								
								
									
										51
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										51
									
								
								README.md
									
									
									
									
									
								
							| @@ -1,13 +1,13 @@ | |||||||
| # Aerich | # Aerich | ||||||
|  |  | ||||||
| [](https://pypi.python.org/pypi/aerich) | [](https://pypi.python.org/pypi/aerich) | ||||||
| [](https://github.com/long2ice/aerich) | [](https://github.com/tortoise/aerich) | ||||||
| [](https://github.com/long2ice/aerich/actions?query=workflow:pypi) | [](https://github.com/tortoise/aerich/actions?query=workflow:pypi) | ||||||
| [](https://github.com/long2ice/aerich/actions?query=workflow:ci) | [](https://github.com/tortoise/aerich/actions?query=workflow:ci) | ||||||
|  |  | ||||||
| ## Introduction | ## Introduction | ||||||
|  |  | ||||||
| Aerich is a database migrations tool for Tortoise-ORM, which is like alembic for SQLAlchemy, or like Django ORM with | Aerich is a database migrations tool for TortoiseORM, which is like alembic for SQLAlchemy, or like Django ORM with | ||||||
| it\'s own migration solution. | it\'s own migration solution. | ||||||
|  |  | ||||||
| ## Install | ## Install | ||||||
| @@ -15,7 +15,7 @@ it\'s own migration solution. | |||||||
| Just install from pypi: | Just install from pypi: | ||||||
|  |  | ||||||
| ```shell | ```shell | ||||||
| > pip install aerich | pip install aerich | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| ## Quick Start | ## Quick Start | ||||||
| @@ -26,10 +26,9 @@ Just install from pypi: | |||||||
| Usage: aerich [OPTIONS] COMMAND [ARGS]... | Usage: aerich [OPTIONS] COMMAND [ARGS]... | ||||||
|  |  | ||||||
| Options: | Options: | ||||||
|   -c, --config TEXT  Config file.  [default: aerich.ini] |   -V, --version      Show the version and exit. | ||||||
|   --app TEXT         Tortoise-ORM app name.  [default: models] |   -c, --config TEXT  Config file.  [default: pyproject.toml] | ||||||
|   -n, --name TEXT    Name of section in .ini file to use for aerich config. |   --app TEXT         Tortoise-ORM app name. | ||||||
|                      [default: aerich] |  | ||||||
|   -h, --help         Show this message and exit. |   -h, --help         Show this message and exit. | ||||||
|  |  | ||||||
| Commands: | Commands: | ||||||
| @@ -40,7 +39,7 @@ Commands: | |||||||
|   init-db    Generate schema and generate app migrate location. |   init-db    Generate schema and generate app migrate location. | ||||||
|   inspectdb  Introspects the database tables to standard output as... |   inspectdb  Introspects the database tables to standard output as... | ||||||
|   migrate    Generate migrate changes file. |   migrate    Generate migrate changes file. | ||||||
|   upgrade    Upgrade to latest version. |   upgrade    Upgrade to specified version. | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| ## Usage | ## Usage | ||||||
| @@ -69,9 +68,10 @@ Usage: aerich init [OPTIONS] | |||||||
|   Init config file and generate root migrate location. |   Init config file and generate root migrate location. | ||||||
|  |  | ||||||
| Options: | Options: | ||||||
|   -t, --tortoise-orm TEXT  Tortoise-ORM config module dict variable, like settings.TORTOISE_ORM. |   -t, --tortoise-orm TEXT  Tortoise-ORM config module dict variable, like | ||||||
|                            [required] |                            settings.TORTOISE_ORM.  [required] | ||||||
|   --location TEXT          Migrate store location.  [default: ./migrations] |   --location TEXT          Migrate store location.  [default: ./migrations] | ||||||
|  |   -s, --src_folder TEXT    Folder of the source, relative to the project root. | ||||||
|   -h, --help               Show this message and exit. |   -h, --help               Show this message and exit. | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| @@ -81,7 +81,7 @@ Initialize the config file and migrations location: | |||||||
| > aerich init -t tests.backends.mysql.TORTOISE_ORM | > aerich init -t tests.backends.mysql.TORTOISE_ORM | ||||||
|  |  | ||||||
| Success create migrate location ./migrations | Success create migrate location ./migrations | ||||||
| Success generate config file aerich.ini | Success write config to pyproject.toml | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| ### Init db | ### Init db | ||||||
| @@ -124,7 +124,7 @@ Now your db is migrated to latest. | |||||||
| ### Downgrade to specified version | ### Downgrade to specified version | ||||||
|  |  | ||||||
| ```shell | ```shell | ||||||
| > aerich init -h | > aerich downgrade -h | ||||||
|  |  | ||||||
| Usage: aerich downgrade [OPTIONS] | Usage: aerich downgrade [OPTIONS] | ||||||
|  |  | ||||||
| @@ -208,6 +208,29 @@ tortoise_orm = { | |||||||
|  |  | ||||||
| You only need to specify `aerich.models` in one app, and must specify `--app` when running `aerich migrate` and so on. | You only need to specify `aerich.models` in one app, and must specify `--app` when running `aerich migrate` and so on. | ||||||
|  |  | ||||||
|  | ## Restore `aerich` workflow | ||||||
|  |  | ||||||
|  | In some cases, such as broken changes from upgrade of `aerich`, you can't run `aerich migrate` or `aerich upgrade`, you | ||||||
|  | can make the following steps: | ||||||
|  |  | ||||||
|  | 1. drop `aerich` table. | ||||||
|  | 2. delete `migrations/{app}` directory. | ||||||
|  | 3. rerun `aerich init-db`. | ||||||
|  |  | ||||||
|  | Note that these actions is safe, also you can do that to reset your migrations if your migration files is too many. | ||||||
|  |  | ||||||
|  | ## Use `aerich` in application | ||||||
|  |  | ||||||
|  | You can use `aerich` out of cli by use `Command` class. | ||||||
|  |  | ||||||
|  | ```python | ||||||
|  | from aerich import Command | ||||||
|  |  | ||||||
|  | command = Command(tortoise_config=config, app='models') | ||||||
|  | await command.init() | ||||||
|  | await command.migrate('test') | ||||||
|  | ``` | ||||||
|  |  | ||||||
| ## License | ## License | ||||||
|  |  | ||||||
| This project is licensed under the | This project is licensed under the | ||||||
|   | |||||||
| @@ -1 +1,138 @@ | |||||||
| __version__ = "0.5.3" | import os | ||||||
|  | from pathlib import Path | ||||||
|  | from typing import List | ||||||
|  |  | ||||||
|  | from tortoise import Tortoise, generate_schema_for_client | ||||||
|  | from tortoise.exceptions import OperationalError | ||||||
|  | from tortoise.transactions import in_transaction | ||||||
|  | from tortoise.utils import get_schema_sql | ||||||
|  |  | ||||||
|  | from aerich.exceptions import DowngradeError | ||||||
|  | from aerich.inspectdb import InspectDb | ||||||
|  | from aerich.migrate import Migrate | ||||||
|  | from aerich.models import Aerich | ||||||
|  | from aerich.utils import ( | ||||||
|  |     get_app_connection, | ||||||
|  |     get_app_connection_name, | ||||||
|  |     get_models_describe, | ||||||
|  |     get_version_content_from_file, | ||||||
|  |     write_version_file, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class Command: | ||||||
|  |     def __init__( | ||||||
|  |         self, | ||||||
|  |         tortoise_config: dict, | ||||||
|  |         app: str = "models", | ||||||
|  |         location: str = "./migrations", | ||||||
|  |     ): | ||||||
|  |         self.tortoise_config = tortoise_config | ||||||
|  |         self.app = app | ||||||
|  |         self.location = location | ||||||
|  |         Migrate.app = app | ||||||
|  |  | ||||||
|  |     async def init(self): | ||||||
|  |         await Migrate.init(self.tortoise_config, self.app, self.location) | ||||||
|  |  | ||||||
|  |     async def upgrade(self): | ||||||
|  |         migrated = [] | ||||||
|  |         for version_file in Migrate.get_all_version_files(): | ||||||
|  |             try: | ||||||
|  |                 exists = await Aerich.exists(version=version_file, app=self.app) | ||||||
|  |             except OperationalError: | ||||||
|  |                 exists = False | ||||||
|  |             if not exists: | ||||||
|  |                 async with in_transaction( | ||||||
|  |                     get_app_connection_name(self.tortoise_config, self.app) | ||||||
|  |                 ) as conn: | ||||||
|  |                     file_path = Path(Migrate.migrate_location, version_file) | ||||||
|  |                     content = get_version_content_from_file(file_path) | ||||||
|  |                     upgrade_query_list = content.get("upgrade") | ||||||
|  |                     for upgrade_query in upgrade_query_list: | ||||||
|  |                         await conn.execute_script(upgrade_query) | ||||||
|  |                     await Aerich.create( | ||||||
|  |                         version=version_file, | ||||||
|  |                         app=self.app, | ||||||
|  |                         content=get_models_describe(self.app), | ||||||
|  |                     ) | ||||||
|  |                 migrated.append(version_file) | ||||||
|  |         return migrated | ||||||
|  |  | ||||||
|  |     async def downgrade(self, version: int, delete: bool): | ||||||
|  |         ret = [] | ||||||
|  |         if version == -1: | ||||||
|  |             specified_version = await Migrate.get_last_version() | ||||||
|  |         else: | ||||||
|  |             specified_version = await Aerich.filter( | ||||||
|  |                 app=self.app, version__startswith=f"{version}_" | ||||||
|  |             ).first() | ||||||
|  |         if not specified_version: | ||||||
|  |             raise DowngradeError("No specified version found") | ||||||
|  |         if version == -1: | ||||||
|  |             versions = [specified_version] | ||||||
|  |         else: | ||||||
|  |             versions = await Aerich.filter(app=self.app, pk__gte=specified_version.pk) | ||||||
|  |         for version in versions: | ||||||
|  |             file = version.version | ||||||
|  |             async with in_transaction( | ||||||
|  |                 get_app_connection_name(self.tortoise_config, self.app) | ||||||
|  |             ) as conn: | ||||||
|  |                 file_path = Path(Migrate.migrate_location, file) | ||||||
|  |                 content = get_version_content_from_file(file_path) | ||||||
|  |                 downgrade_query_list = content.get("downgrade") | ||||||
|  |                 if not downgrade_query_list: | ||||||
|  |                     raise DowngradeError("No downgrade items found") | ||||||
|  |                 for downgrade_query in downgrade_query_list: | ||||||
|  |                     await conn.execute_query(downgrade_query) | ||||||
|  |                 await version.delete() | ||||||
|  |                 if delete: | ||||||
|  |                     os.unlink(file_path) | ||||||
|  |                 ret.append(file) | ||||||
|  |         return ret | ||||||
|  |  | ||||||
|  |     async def heads(self): | ||||||
|  |         ret = [] | ||||||
|  |         versions = Migrate.get_all_version_files() | ||||||
|  |         for version in versions: | ||||||
|  |             if not await Aerich.exists(version=version, app=self.app): | ||||||
|  |                 ret.append(version) | ||||||
|  |         return ret | ||||||
|  |  | ||||||
|  |     async def history(self): | ||||||
|  |         ret = [] | ||||||
|  |         versions = Migrate.get_all_version_files() | ||||||
|  |         for version in versions: | ||||||
|  |             ret.append(version) | ||||||
|  |         return ret | ||||||
|  |  | ||||||
|  |     async def inspectdb(self, tables: List[str]): | ||||||
|  |         connection = get_app_connection(self.tortoise_config, self.app) | ||||||
|  |         inspect = InspectDb(connection, tables) | ||||||
|  |         await inspect.inspect() | ||||||
|  |  | ||||||
|  |     async def migrate(self, name: str = "update"): | ||||||
|  |         return await Migrate.migrate(name) | ||||||
|  |  | ||||||
|  |     async def init_db(self, safe: bool): | ||||||
|  |         location = self.location | ||||||
|  |         app = self.app | ||||||
|  |         dirname = Path(location, app) | ||||||
|  |         dirname.mkdir(parents=True) | ||||||
|  |  | ||||||
|  |         await Tortoise.init(config=self.tortoise_config) | ||||||
|  |         connection = get_app_connection(self.tortoise_config, app) | ||||||
|  |         await generate_schema_for_client(connection, safe) | ||||||
|  |  | ||||||
|  |         schema = get_schema_sql(connection, safe) | ||||||
|  |  | ||||||
|  |         version = await Migrate.generate_version() | ||||||
|  |         await Aerich.create( | ||||||
|  |             version=version, | ||||||
|  |             app=app, | ||||||
|  |             content=get_models_describe(app), | ||||||
|  |         ) | ||||||
|  |         content = { | ||||||
|  |             "upgrade": [schema], | ||||||
|  |         } | ||||||
|  |         write_version_file(Path(dirname, version), content) | ||||||
|   | |||||||
							
								
								
									
										242
									
								
								aerich/cli.py
									
									
									
									
									
								
							
							
						
						
									
										242
									
								
								aerich/cli.py
									
									
									
									
									
								
							| @@ -1,34 +1,24 @@ | |||||||
| import asyncio | import asyncio | ||||||
| import os | import os | ||||||
| import sys |  | ||||||
| from configparser import ConfigParser |  | ||||||
| from functools import wraps | from functools import wraps | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| from typing import List | from typing import List | ||||||
|  |  | ||||||
| import click | import click | ||||||
|  | import tomlkit | ||||||
| from click import Context, UsageError | from click import Context, UsageError | ||||||
| from tortoise import Tortoise, generate_schema_for_client | from tomlkit.exceptions import NonExistentKey | ||||||
| from tortoise.exceptions import OperationalError | from tortoise import Tortoise | ||||||
| from tortoise.transactions import in_transaction |  | ||||||
| from tortoise.utils import get_schema_sql |  | ||||||
|  |  | ||||||
| from aerich.inspectdb import InspectDb | from aerich import Command | ||||||
| from aerich.migrate import Migrate | from aerich.enums import Color | ||||||
| from aerich.utils import ( | from aerich.exceptions import DowngradeError | ||||||
|     get_app_connection, | from aerich.utils import add_src_path, get_tortoise_config | ||||||
|     get_app_connection_name, | from aerich.version import __version__ | ||||||
|     get_models_describe, |  | ||||||
|     get_tortoise_config, |  | ||||||
|     get_version_content_from_file, |  | ||||||
|     write_version_file, |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| from . import __version__ | CONFIG_DEFAULT_VALUES = { | ||||||
| from .enums import Color |     "src_folder": ".", | ||||||
| from .models import Aerich | } | ||||||
|  |  | ||||||
| parser = ConfigParser() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def coro(f): | def coro(f): | ||||||
| @@ -49,42 +39,42 @@ def coro(f): | |||||||
| @click.group(context_settings={"help_option_names": ["-h", "--help"]}) | @click.group(context_settings={"help_option_names": ["-h", "--help"]}) | ||||||
| @click.version_option(__version__, "-V", "--version") | @click.version_option(__version__, "-V", "--version") | ||||||
| @click.option( | @click.option( | ||||||
|     "-c", "--config", default="aerich.ini", show_default=True, help="Config file.", |     "-c", | ||||||
|  |     "--config", | ||||||
|  |     default="pyproject.toml", | ||||||
|  |     show_default=True, | ||||||
|  |     help="Config file.", | ||||||
| ) | ) | ||||||
| @click.option("--app", required=False, help="Tortoise-ORM app name.") | @click.option("--app", required=False, help="Tortoise-ORM app name.") | ||||||
| @click.option( |  | ||||||
|     "-n", |  | ||||||
|     "--name", |  | ||||||
|     default="aerich", |  | ||||||
|     show_default=True, |  | ||||||
|     help="Name of section in .ini file to use for aerich config.", |  | ||||||
| ) |  | ||||||
| @click.pass_context | @click.pass_context | ||||||
| @coro | @coro | ||||||
| async def cli(ctx: Context, config, app, name): | async def cli(ctx: Context, config, app): | ||||||
|     ctx.ensure_object(dict) |     ctx.ensure_object(dict) | ||||||
|     ctx.obj["config_file"] = config |     ctx.obj["config_file"] = config | ||||||
|     ctx.obj["name"] = name |  | ||||||
|  |  | ||||||
|     invoked_subcommand = ctx.invoked_subcommand |     invoked_subcommand = ctx.invoked_subcommand | ||||||
|     if invoked_subcommand != "init": |     if invoked_subcommand != "init": | ||||||
|         if not Path(config).exists(): |         if not Path(config).exists(): | ||||||
|             raise UsageError("You must exec init first", ctx=ctx) |             raise UsageError("You must exec init first", ctx=ctx) | ||||||
|         parser.read(config) |         with open(config, "r") as f: | ||||||
|  |             content = f.read() | ||||||
|         location = parser[name]["location"] |         doc = tomlkit.parse(content) | ||||||
|         tortoise_orm = parser[name]["tortoise_orm"] |         try: | ||||||
|  |             tool = doc["tool"]["aerich"] | ||||||
|  |             location = tool["location"] | ||||||
|  |             tortoise_orm = tool["tortoise_orm"] | ||||||
|  |             src_folder = tool.get("src_folder", CONFIG_DEFAULT_VALUES["src_folder"]) | ||||||
|  |         except NonExistentKey: | ||||||
|  |             raise UsageError("You need run aerich init again when upgrade to 0.6.0+") | ||||||
|  |         add_src_path(src_folder) | ||||||
|         tortoise_config = get_tortoise_config(ctx, tortoise_orm) |         tortoise_config = get_tortoise_config(ctx, tortoise_orm) | ||||||
|         app = app or list(tortoise_config.get("apps").keys())[0] |         app = app or list(tortoise_config.get("apps").keys())[0] | ||||||
|         ctx.obj["config"] = tortoise_config |         command = Command(tortoise_config=tortoise_config, app=app, location=location) | ||||||
|         ctx.obj["location"] = location |         ctx.obj["command"] = command | ||||||
|         ctx.obj["app"] = app |  | ||||||
|         Migrate.app = app |  | ||||||
|         if invoked_subcommand != "init-db": |         if invoked_subcommand != "init-db": | ||||||
|             if not Path(location, app).exists(): |             if not Path(location, app).exists(): | ||||||
|                 raise UsageError("You must exec init-db first", ctx=ctx) |                 raise UsageError("You must exec init-db first", ctx=ctx) | ||||||
|             await Migrate.init(tortoise_config, app, location) |             await command.init() | ||||||
|  |  | ||||||
|  |  | ||||||
| @cli.command(help="Generate migrate changes file.") | @cli.command(help="Generate migrate changes file.") | ||||||
| @@ -92,7 +82,8 @@ async def cli(ctx: Context, config, app, name): | |||||||
| @click.pass_context | @click.pass_context | ||||||
| @coro | @coro | ||||||
| async def migrate(ctx: Context, name): | async def migrate(ctx: Context, name): | ||||||
|     ret = await Migrate.migrate(name) |     command = ctx.obj["command"] | ||||||
|  |     ret = await command.migrate(name) | ||||||
|     if not ret: |     if not ret: | ||||||
|         return click.secho("No changes detected", fg=Color.yellow) |         return click.secho("No changes detected", fg=Color.yellow) | ||||||
|     click.secho(f"Success migrate {ret}", fg=Color.green) |     click.secho(f"Success migrate {ret}", fg=Color.green) | ||||||
| @@ -102,28 +93,13 @@ async def migrate(ctx: Context, name): | |||||||
| @click.pass_context | @click.pass_context | ||||||
| @coro | @coro | ||||||
| async def upgrade(ctx: Context): | async def upgrade(ctx: Context): | ||||||
|     config = ctx.obj["config"] |     command = ctx.obj["command"] | ||||||
|     app = ctx.obj["app"] |     migrated = await command.upgrade() | ||||||
|     migrated = False |  | ||||||
|     for version_file in Migrate.get_all_version_files(): |  | ||||||
|         try: |  | ||||||
|             exists = await Aerich.exists(version=version_file, app=app) |  | ||||||
|         except OperationalError: |  | ||||||
|             exists = False |  | ||||||
|         if not exists: |  | ||||||
|             async with in_transaction(get_app_connection_name(config, app)) as conn: |  | ||||||
|                 file_path = Path(Migrate.migrate_location, version_file) |  | ||||||
|                 content = get_version_content_from_file(file_path) |  | ||||||
|                 upgrade_query_list = content.get("upgrade") |  | ||||||
|                 for upgrade_query in upgrade_query_list: |  | ||||||
|                     await conn.execute_script(upgrade_query) |  | ||||||
|                 await Aerich.create( |  | ||||||
|                     version=version_file, app=app, content=get_models_describe(app), |  | ||||||
|                 ) |  | ||||||
|             click.secho(f"Success upgrade {version_file}", fg=Color.green) |  | ||||||
|             migrated = True |  | ||||||
|     if not migrated: |     if not migrated: | ||||||
|         click.secho("No upgrade items found", fg=Color.yellow) |         click.secho("No upgrade items found", fg=Color.yellow) | ||||||
|  |     else: | ||||||
|  |         for version_file in migrated: | ||||||
|  |             click.secho(f"Success upgrade {version_file}", fg=Color.green) | ||||||
|  |  | ||||||
|  |  | ||||||
| @cli.command(help="Downgrade to specified version.") | @cli.command(help="Downgrade to specified version.") | ||||||
| @@ -149,32 +125,12 @@ async def upgrade(ctx: Context): | |||||||
| ) | ) | ||||||
| @coro | @coro | ||||||
| async def downgrade(ctx: Context, version: int, delete: bool): | async def downgrade(ctx: Context, version: int, delete: bool): | ||||||
|     app = ctx.obj["app"] |     command = ctx.obj["command"] | ||||||
|     config = ctx.obj["config"] |     try: | ||||||
|     if version == -1: |         files = await command.downgrade(version, delete) | ||||||
|         specified_version = await Migrate.get_last_version() |     except DowngradeError as e: | ||||||
|     else: |         return click.secho(str(e), fg=Color.yellow) | ||||||
|         specified_version = await Aerich.filter(app=app, version__startswith=f"{version}_").first() |     for file in files: | ||||||
|     if not specified_version: |  | ||||||
|         return click.secho("No specified version found", fg=Color.yellow) |  | ||||||
|     if version == -1: |  | ||||||
|         versions = [specified_version] |  | ||||||
|     else: |  | ||||||
|         versions = await Aerich.filter(app=app, pk__gte=specified_version.pk) |  | ||||||
|     for version in versions: |  | ||||||
|         file = version.version |  | ||||||
|         async with in_transaction(get_app_connection_name(config, app)) as conn: |  | ||||||
|             file_path = Path(Migrate.migrate_location, file) |  | ||||||
|             content = get_version_content_from_file(file_path) |  | ||||||
|             downgrade_query_list = content.get("downgrade") |  | ||||||
|             if not downgrade_query_list: |  | ||||||
|                 click.secho("No downgrade items found", fg=Color.yellow) |  | ||||||
|                 return |  | ||||||
|             for downgrade_query in downgrade_query_list: |  | ||||||
|                 await conn.execute_query(downgrade_query) |  | ||||||
|             await version.delete() |  | ||||||
|             if delete: |  | ||||||
|                 os.unlink(file_path) |  | ||||||
|         click.secho(f"Success downgrade {file}", fg=Color.green) |         click.secho(f"Success downgrade {file}", fg=Color.green) | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -182,26 +138,24 @@ async def downgrade(ctx: Context, version: int, delete: bool): | |||||||
| @click.pass_context | @click.pass_context | ||||||
| @coro | @coro | ||||||
| async def heads(ctx: Context): | async def heads(ctx: Context): | ||||||
|     app = ctx.obj["app"] |     command = ctx.obj["command"] | ||||||
|     versions = Migrate.get_all_version_files() |     head_list = await command.heads() | ||||||
|     is_heads = False |     if not head_list: | ||||||
|     for version in versions: |         return click.secho("No available heads, try migrate first", fg=Color.green) | ||||||
|         if not await Aerich.exists(version=version, app=app): |     for version in head_list: | ||||||
|         click.secho(version, fg=Color.green) |         click.secho(version, fg=Color.green) | ||||||
|             is_heads = True |  | ||||||
|     if not is_heads: |  | ||||||
|         click.secho("No available heads,try migrate first", fg=Color.green) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @cli.command(help="List all migrate items.") | @cli.command(help="List all migrate items.") | ||||||
| @click.pass_context | @click.pass_context | ||||||
| @coro | @coro | ||||||
| async def history(ctx: Context): | async def history(ctx: Context): | ||||||
|     versions = Migrate.get_all_version_files() |     command = ctx.obj["command"] | ||||||
|  |     versions = await command.history() | ||||||
|  |     if not versions: | ||||||
|  |         return click.secho("No history, try migrate", fg=Color.green) | ||||||
|     for version in versions: |     for version in versions: | ||||||
|         click.secho(version, fg=Color.green) |         click.secho(version, fg=Color.green) | ||||||
|     if not versions: |  | ||||||
|         click.secho("No history,try migrate", fg=Color.green) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @cli.command(help="Init config file and generate root migrate location.") | @cli.command(help="Init config file and generate root migrate location.") | ||||||
| @@ -212,29 +166,51 @@ async def history(ctx: Context): | |||||||
|     help="Tortoise-ORM config module dict variable, like settings.TORTOISE_ORM.", |     help="Tortoise-ORM config module dict variable, like settings.TORTOISE_ORM.", | ||||||
| ) | ) | ||||||
| @click.option( | @click.option( | ||||||
|     "--location", default="./migrations", show_default=True, help="Migrate store location.", |     "--location", | ||||||
|  |     default="./migrations", | ||||||
|  |     show_default=True, | ||||||
|  |     help="Migrate store location.", | ||||||
|  | ) | ||||||
|  | @click.option( | ||||||
|  |     "-s", | ||||||
|  |     "--src_folder", | ||||||
|  |     default=CONFIG_DEFAULT_VALUES["src_folder"], | ||||||
|  |     show_default=False, | ||||||
|  |     help="Folder of the source, relative to the project root.", | ||||||
| ) | ) | ||||||
| @click.pass_context | @click.pass_context | ||||||
| @coro | @coro | ||||||
| async def init( | async def init(ctx: Context, tortoise_orm, location, src_folder): | ||||||
|     ctx: Context, tortoise_orm, location, |  | ||||||
| ): |  | ||||||
|     config_file = ctx.obj["config_file"] |     config_file = ctx.obj["config_file"] | ||||||
|     name = ctx.obj["name"] |  | ||||||
|  |     if os.path.isabs(src_folder): | ||||||
|  |         src_folder = os.path.relpath(os.getcwd(), src_folder) | ||||||
|  |     # Add ./ so it's clear that this is relative path | ||||||
|  |     if not src_folder.startswith("./"): | ||||||
|  |         src_folder = "./" + src_folder | ||||||
|  |  | ||||||
|  |     # check that we can find the configuration, if not we can fail before the config file gets created | ||||||
|  |     add_src_path(src_folder) | ||||||
|  |     get_tortoise_config(ctx, tortoise_orm) | ||||||
|     if Path(config_file).exists(): |     if Path(config_file).exists(): | ||||||
|         return click.secho("You have inited", fg=Color.yellow) |         with open(config_file, "r") as f: | ||||||
|  |             content = f.read() | ||||||
|  |         doc = tomlkit.parse(content) | ||||||
|  |     else: | ||||||
|  |         doc = tomlkit.parse("[tool.aerich]") | ||||||
|  |     table = tomlkit.table() | ||||||
|  |     table["tortoise_orm"] = tortoise_orm | ||||||
|  |     table["location"] = location | ||||||
|  |     table["src_folder"] = src_folder | ||||||
|  |     doc["tool"]["aerich"] = table | ||||||
|  |  | ||||||
|     parser.add_section(name) |     with open(config_file, "w") as f: | ||||||
|     parser.set(name, "tortoise_orm", tortoise_orm) |         f.write(tomlkit.dumps(doc)) | ||||||
|     parser.set(name, "location", location) |  | ||||||
|  |  | ||||||
|     with open(config_file, "w", encoding="utf-8") as f: |  | ||||||
|         parser.write(f) |  | ||||||
|  |  | ||||||
|     Path(location).mkdir(parents=True, exist_ok=True) |     Path(location).mkdir(parents=True, exist_ok=True) | ||||||
|  |  | ||||||
|     click.secho(f"Success create migrate location {location}", fg=Color.green) |     click.secho(f"Success create migrate location {location}", fg=Color.green) | ||||||
|     click.secho(f"Success generate config file {config_file}", fg=Color.green) |     click.secho(f"Success write config to {config_file}", fg=Color.green) | ||||||
|  |  | ||||||
|  |  | ||||||
| @cli.command(help="Generate schema and generate app migrate location.") | @cli.command(help="Generate schema and generate app migrate location.") | ||||||
| @@ -248,53 +224,35 @@ async def init( | |||||||
| @click.pass_context | @click.pass_context | ||||||
| @coro | @coro | ||||||
| async def init_db(ctx: Context, safe): | async def init_db(ctx: Context, safe): | ||||||
|     config = ctx.obj["config"] |     command = ctx.obj["command"] | ||||||
|     location = ctx.obj["location"] |     app = command.app | ||||||
|     app = ctx.obj["app"] |     dirname = Path(command.location, app) | ||||||
|  |  | ||||||
|     dirname = Path(location, app) |  | ||||||
|     try: |     try: | ||||||
|         dirname.mkdir(parents=True) |         await command.init_db(safe) | ||||||
|         click.secho(f"Success create app migrate location {dirname}", fg=Color.green) |         click.secho(f"Success create app migrate location {dirname}", fg=Color.green) | ||||||
|  |         click.secho(f'Success generate schema for app "{app}"', fg=Color.green) | ||||||
|     except FileExistsError: |     except FileExistsError: | ||||||
|         return click.secho( |         return click.secho( | ||||||
|             f"Inited {app} already, or delete {dirname} and try again.", fg=Color.yellow |             f"Inited {app} already, or delete {dirname} and try again.", fg=Color.yellow | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     await Tortoise.init(config=config) |  | ||||||
|     connection = get_app_connection(config, app) |  | ||||||
|     await generate_schema_for_client(connection, safe) |  | ||||||
|  |  | ||||||
|     schema = get_schema_sql(connection, safe) |  | ||||||
|  |  | ||||||
|     version = await Migrate.generate_version() |  | ||||||
|     await Aerich.create( |  | ||||||
|         version=version, app=app, content=get_models_describe(app), |  | ||||||
|     ) |  | ||||||
|     content = { |  | ||||||
|         "upgrade": [schema], |  | ||||||
|     } |  | ||||||
|     write_version_file(Path(dirname, version), content) |  | ||||||
|     click.secho(f'Success generate schema for app "{app}"', fg=Color.green) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @cli.command(help="Introspects the database tables to standard output as TortoiseORM model.") | @cli.command(help="Introspects the database tables to standard output as TortoiseORM model.") | ||||||
| @click.option( | @click.option( | ||||||
|     "-t", "--table", help="Which tables to inspect.", multiple=True, required=False, |     "-t", | ||||||
|  |     "--table", | ||||||
|  |     help="Which tables to inspect.", | ||||||
|  |     multiple=True, | ||||||
|  |     required=False, | ||||||
| ) | ) | ||||||
| @click.pass_context | @click.pass_context | ||||||
| @coro | @coro | ||||||
| async def inspectdb(ctx: Context, table: List[str]): | async def inspectdb(ctx: Context, table: List[str]): | ||||||
|     config = ctx.obj["config"] |     command = ctx.obj["command"] | ||||||
|     app = ctx.obj["app"] |     await command.inspectdb(table) | ||||||
|     connection = get_app_connection(config, app) |  | ||||||
|  |  | ||||||
|     inspect = InspectDb(connection, table) |  | ||||||
|     await inspect.inspect() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def main(): | def main(): | ||||||
|     sys.path.insert(0, ".") |  | ||||||
|     cli() |     cli() | ||||||
|  |  | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										31
									
								
								aerich/coder.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										31
									
								
								aerich/coder.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,31 @@ | |||||||
|  | import base64 | ||||||
|  | import json | ||||||
|  | import pickle  # nosec: B301 | ||||||
|  |  | ||||||
|  | from tortoise.indexes import Index | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class JsonEncoder(json.JSONEncoder): | ||||||
|  |     def default(self, obj): | ||||||
|  |         if isinstance(obj, Index): | ||||||
|  |             return { | ||||||
|  |                 "type": "index", | ||||||
|  |                 "val": base64.b64encode(pickle.dumps(obj)).decode(), | ||||||
|  |             }  # nosec: B301 | ||||||
|  |         else: | ||||||
|  |             return super().default(obj) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def object_hook(obj): | ||||||
|  |     _type = obj.get("type") | ||||||
|  |     if not _type: | ||||||
|  |         return obj | ||||||
|  |     return pickle.loads(base64.b64decode(obj["val"]))  # nosec: B301 | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def encoder(obj: dict): | ||||||
|  |     return json.dumps(obj, cls=JsonEncoder) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def decoder(obj: str): | ||||||
|  |     return json.loads(obj, object_hook=object_hook) | ||||||
| @@ -78,11 +78,15 @@ class BaseDDL: | |||||||
|         auto_now_add = field_describe.get("auto_now_add", False) |         auto_now_add = field_describe.get("auto_now_add", False) | ||||||
|         auto_now = field_describe.get("auto_now", False) |         auto_now = field_describe.get("auto_now", False) | ||||||
|         if default is not None or auto_now_add: |         if default is not None or auto_now_add: | ||||||
|             if field_describe.get("field_type") in [ |             if ( | ||||||
|  |                 field_describe.get("field_type") | ||||||
|  |                 in [ | ||||||
|                     "UUIDField", |                     "UUIDField", | ||||||
|                     "TextField", |                     "TextField", | ||||||
|                     "JSONField", |                     "JSONField", | ||||||
|             ] or is_default_function(default): |                 ] | ||||||
|  |                 or is_default_function(default) | ||||||
|  |             ): | ||||||
|                 default = "" |                 default = "" | ||||||
|             else: |             else: | ||||||
|                 try: |                 try: | ||||||
| @@ -115,7 +119,9 @@ class BaseDDL: | |||||||
|                 nullable="NOT NULL" if not field_describe.get("nullable") else "", |                 nullable="NOT NULL" if not field_describe.get("nullable") else "", | ||||||
|                 unique="UNIQUE" if field_describe.get("unique") else "", |                 unique="UNIQUE" if field_describe.get("unique") else "", | ||||||
|                 comment=self.schema_generator._column_comment_generator( |                 comment=self.schema_generator._column_comment_generator( | ||||||
|                     table=db_table, column=db_column, comment=field_describe.get("description"), |                     table=db_table, | ||||||
|  |                     column=db_column, | ||||||
|  |                     comment=field_describe.get("description"), | ||||||
|                 ) |                 ) | ||||||
|                 if description |                 if description | ||||||
|                 else "", |                 else "", | ||||||
| @@ -189,6 +195,12 @@ class BaseDDL: | |||||||
|             table_name=model._meta.db_table, |             table_name=model._meta.db_table, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |     def drop_index_by_name(self, model: "Type[Model]", index_name: str): | ||||||
|  |         return self._DROP_INDEX_TEMPLATE.format( | ||||||
|  |             index_name=index_name, | ||||||
|  |             table_name=model._meta.db_table, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     def add_fk(self, model: "Type[Model]", field_describe: dict, reference_table_describe: dict): |     def add_fk(self, model: "Type[Model]", field_describe: dict, reference_table_describe: dict): | ||||||
|         db_table = model._meta.db_table |         db_table = model._meta.db_table | ||||||
|  |  | ||||||
|   | |||||||
| @@ -11,9 +11,6 @@ class SqliteDDL(BaseDDL): | |||||||
|     schema_generator_cls = SqliteSchemaGenerator |     schema_generator_cls = SqliteSchemaGenerator | ||||||
|     DIALECT = SqliteSchemaGenerator.DIALECT |     DIALECT = SqliteSchemaGenerator.DIALECT | ||||||
|  |  | ||||||
|     def drop_column(self, model: "Type[Model]", column_name: str): |  | ||||||
|         raise NotSupportError("Drop column is unsupported in SQLite.") |  | ||||||
|  |  | ||||||
|     def modify_column(self, model: "Type[Model]", field_object: dict, is_pk: bool = True): |     def modify_column(self, model: "Type[Model]", field_object: dict, is_pk: bool = True): | ||||||
|         raise NotSupportError("Modify column is unsupported in SQLite.") |         raise NotSupportError("Modify column is unsupported in SQLite.") | ||||||
|  |  | ||||||
|   | |||||||
| @@ -2,3 +2,9 @@ class NotSupportError(Exception): | |||||||
|     """ |     """ | ||||||
|     raise when features not support |     raise when features not support | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DowngradeError(Exception): | ||||||
|  |     """ | ||||||
|  |     raise when downgrade error | ||||||
|  |     """ | ||||||
|   | |||||||
| @@ -16,6 +16,7 @@ class InspectDb: | |||||||
|         "TEXT": "    {field} = fields.TextField({null}{default}{comment})", |         "TEXT": "    {field} = fields.TextField({null}{default}{comment})", | ||||||
|         "DATETIME": "    {field} = fields.DatetimeField({null}{default}{comment})", |         "DATETIME": "    {field} = fields.DatetimeField({null}{default}{comment})", | ||||||
|         "FLOAT": "    {field} = fields.FloatField({null}{default}{comment})", |         "FLOAT": "    {field} = fields.FloatField({null}{default}{comment})", | ||||||
|  |         "DATE": "    {field} = fields.DateField({null}{default}{comment})", | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     def __init__(self, conn: BaseDBAsyncClient, tables: Optional[List[str]] = None): |     def __init__(self, conn: BaseDBAsyncClient, tables: Optional[List[str]] = None): | ||||||
|   | |||||||
| @@ -1,12 +1,14 @@ | |||||||
| import os | import os | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
|  | from hashlib import md5 | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| from typing import Dict, List, Optional, Tuple, Type | from typing import Dict, List, Optional, Tuple, Type, Union | ||||||
|  |  | ||||||
| import click | import click | ||||||
| from dictdiffer import diff | from dictdiffer import diff | ||||||
| from tortoise import BaseDBAsyncClient, Model, Tortoise | from tortoise import BaseDBAsyncClient, Model, Tortoise | ||||||
| from tortoise.exceptions import OperationalError | from tortoise.exceptions import OperationalError | ||||||
|  | from tortoise.indexes import Index | ||||||
|  |  | ||||||
| from aerich.ddl import BaseDDL | from aerich.ddl import BaseDDL | ||||||
| from aerich.models import MAX_VERSION_LENGTH, Aerich | from aerich.models import MAX_VERSION_LENGTH, Aerich | ||||||
| @@ -32,7 +34,7 @@ class Migrate: | |||||||
|     ddl: BaseDDL |     ddl: BaseDDL | ||||||
|     _last_version_content: Optional[dict] = None |     _last_version_content: Optional[dict] = None | ||||||
|     app: str |     app: str | ||||||
|     migrate_location: str |     migrate_location: Path | ||||||
|     dialect: str |     dialect: str | ||||||
|     _db_version: Optional[str] = None |     _db_version: Optional[str] = None | ||||||
|  |  | ||||||
| @@ -113,8 +115,8 @@ class Migrate: | |||||||
|             if version_file.startswith(version.split("_")[0]): |             if version_file.startswith(version.split("_")[0]): | ||||||
|                 os.unlink(Path(cls.migrate_location, version_file)) |                 os.unlink(Path(cls.migrate_location, version_file)) | ||||||
|         content = { |         content = { | ||||||
|             "upgrade": cls.upgrade_operators, |             "upgrade": list(dict.fromkeys(cls.upgrade_operators)), | ||||||
|             "downgrade": cls.downgrade_operators, |             "downgrade": list(dict.fromkeys(cls.downgrade_operators)), | ||||||
|         } |         } | ||||||
|         write_version_file(Path(cls.migrate_location, version), content) |         write_version_file(Path(cls.migrate_location, version), content) | ||||||
|         return version |         return version | ||||||
| @@ -138,25 +140,37 @@ class Migrate: | |||||||
|         return await cls._generate_diff_sql(name) |         return await cls._generate_diff_sql(name) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _add_operator(cls, operator: str, upgrade=True, fk_m2m=False): |     def _add_operator(cls, operator: str, upgrade=True, fk_m2m_index=False): | ||||||
|         """ |         """ | ||||||
|         add operator,differentiate fk because fk is order limit |         add operator,differentiate fk because fk is order limit | ||||||
|         :param operator: |         :param operator: | ||||||
|         :param upgrade: |         :param upgrade: | ||||||
|         :param fk_m2m: |         :param fk_m2m_index: | ||||||
|         :return: |         :return: | ||||||
|         """ |         """ | ||||||
|         if upgrade: |         if upgrade: | ||||||
|             if fk_m2m: |             if fk_m2m_index: | ||||||
|                 cls._upgrade_fk_m2m_index_operators.append(operator) |                 cls._upgrade_fk_m2m_index_operators.append(operator) | ||||||
|             else: |             else: | ||||||
|                 cls.upgrade_operators.append(operator) |                 cls.upgrade_operators.append(operator) | ||||||
|         else: |         else: | ||||||
|             if fk_m2m: |             if fk_m2m_index: | ||||||
|                 cls._downgrade_fk_m2m_index_operators.append(operator) |                 cls._downgrade_fk_m2m_index_operators.append(operator) | ||||||
|             else: |             else: | ||||||
|                 cls.downgrade_operators.append(operator) |                 cls.downgrade_operators.append(operator) | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def _handle_indexes(cls, model: Type[Model], indexes: List[Union[Tuple[str], Index]]): | ||||||
|  |         ret = [] | ||||||
|  |         for index in indexes: | ||||||
|  |             if isinstance(index, Index): | ||||||
|  |                 index.__hash__ = lambda self: md5(  # nosec: B303 | ||||||
|  |                     self.index_name(cls.ddl.schema_generator, model).encode() | ||||||
|  |                     + self.__class__.__name__.encode() | ||||||
|  |                 ).hexdigest() | ||||||
|  |             ret.append(index) | ||||||
|  |         return ret | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def diff_models(cls, old_models: Dict[str, dict], new_models: Dict[str, dict], upgrade=True): |     def diff_models(cls, old_models: Dict[str, dict], new_models: Dict[str, dict], upgrade=True): | ||||||
|         """ |         """ | ||||||
| @@ -192,7 +206,18 @@ class Migrate: | |||||||
|                 new_unique_together = set( |                 new_unique_together = set( | ||||||
|                     map(lambda x: tuple(x), new_model_describe.get("unique_together")) |                     map(lambda x: tuple(x), new_model_describe.get("unique_together")) | ||||||
|                 ) |                 ) | ||||||
|  |                 old_indexes = set( | ||||||
|  |                     map( | ||||||
|  |                         lambda x: x if isinstance(x, Index) else tuple(x), | ||||||
|  |                         cls._handle_indexes(model, old_model_describe.get("indexes", [])), | ||||||
|  |                     ) | ||||||
|  |                 ) | ||||||
|  |                 new_indexes = set( | ||||||
|  |                     map( | ||||||
|  |                         lambda x: x if isinstance(x, Index) else tuple(x), | ||||||
|  |                         cls._handle_indexes(model, new_model_describe.get("indexes", [])), | ||||||
|  |                     ) | ||||||
|  |                 ) | ||||||
|                 old_pk_field = old_model_describe.get("pk_field") |                 old_pk_field = old_model_describe.get("pk_field") | ||||||
|                 new_pk_field = new_model_describe.get("pk_field") |                 new_pk_field = new_model_describe.get("pk_field") | ||||||
|                 # pk field |                 # pk field | ||||||
| @@ -205,6 +230,8 @@ class Migrate: | |||||||
|                 old_m2m_fields = old_model_describe.get("m2m_fields") |                 old_m2m_fields = old_model_describe.get("m2m_fields") | ||||||
|                 new_m2m_fields = new_model_describe.get("m2m_fields") |                 new_m2m_fields = new_model_describe.get("m2m_fields") | ||||||
|                 for action, option, change in diff(old_m2m_fields, new_m2m_fields): |                 for action, option, change in diff(old_m2m_fields, new_m2m_fields): | ||||||
|  |                     if change[0][0] == "db_constraint": | ||||||
|  |                         continue | ||||||
|                     table = change[0][1].get("through") |                     table = change[0][1].get("through") | ||||||
|                     if action == "add": |                     if action == "add": | ||||||
|                         add = False |                         add = False | ||||||
| @@ -222,7 +249,7 @@ class Migrate: | |||||||
|                                     new_models.get(change[0][1].get("model_name")), |                                     new_models.get(change[0][1].get("model_name")), | ||||||
|                                 ), |                                 ), | ||||||
|                                 upgrade, |                                 upgrade, | ||||||
|                                 fk_m2m=True, |                                 fk_m2m_index=True, | ||||||
|                             ) |                             ) | ||||||
|                     elif action == "remove": |                     elif action == "remove": | ||||||
|                         add = False |                         add = False | ||||||
| @@ -233,18 +260,19 @@ class Migrate: | |||||||
|                             cls._downgrade_m2m.append(table) |                             cls._downgrade_m2m.append(table) | ||||||
|                             add = True |                             add = True | ||||||
|                         if add: |                         if add: | ||||||
|                             cls._add_operator(cls.drop_m2m(table), upgrade, fk_m2m=True) |                             cls._add_operator(cls.drop_m2m(table), upgrade, True) | ||||||
|                 # add unique_together |                 # add unique_together | ||||||
|                 for index in new_unique_together.difference(old_unique_together): |                 for index in new_unique_together.difference(old_unique_together): | ||||||
|                     cls._add_operator( |                     cls._add_operator(cls._add_index(model, index, True), upgrade, True) | ||||||
|                         cls._add_index(model, index, True), upgrade, |  | ||||||
|                     ) |  | ||||||
|                 # remove unique_together |                 # remove unique_together | ||||||
|                 for index in old_unique_together.difference(new_unique_together): |                 for index in old_unique_together.difference(new_unique_together): | ||||||
|                     cls._add_operator( |                     cls._add_operator(cls._drop_index(model, index, True), upgrade, True) | ||||||
|                         cls._drop_index(model, index, True), upgrade, |                 # add indexes | ||||||
|                     ) |                 for index in new_indexes.difference(old_indexes): | ||||||
|  |                     cls._add_operator(cls._add_index(model, index, False), upgrade, True) | ||||||
|  |                 # remove indexes | ||||||
|  |                 for index in old_indexes.difference(new_indexes): | ||||||
|  |                     cls._add_operator(cls._drop_index(model, index, False), upgrade, True) | ||||||
|                 old_data_fields = old_model_describe.get("data_fields") |                 old_data_fields = old_model_describe.get("data_fields") | ||||||
|                 new_data_fields = new_model_describe.get("data_fields") |                 new_data_fields = new_model_describe.get("data_fields") | ||||||
|  |  | ||||||
| @@ -266,7 +294,11 @@ class Migrate: | |||||||
|                             # rename field |                             # rename field | ||||||
|                             if ( |                             if ( | ||||||
|                                 changes[0] |                                 changes[0] | ||||||
|                                 == ("change", "name", (old_data_field_name, new_data_field_name),) |                                 == ( | ||||||
|  |                                     "change", | ||||||
|  |                                     "name", | ||||||
|  |                                     (old_data_field_name, new_data_field_name), | ||||||
|  |                                 ) | ||||||
|                                 and changes[1] |                                 and changes[1] | ||||||
|                                 == ( |                                 == ( | ||||||
|                                     "change", |                                     "change", | ||||||
| @@ -297,15 +329,23 @@ class Migrate: | |||||||
|                                         and cls._db_version.startswith("5.") |                                         and cls._db_version.startswith("5.") | ||||||
|                                     ): |                                     ): | ||||||
|                                         cls._add_operator( |                                         cls._add_operator( | ||||||
|                                             cls._modify_field(model, new_data_field), upgrade, |                                             cls._change_field( | ||||||
|  |                                                 model, old_data_field, new_data_field | ||||||
|  |                                             ), | ||||||
|  |                                             upgrade, | ||||||
|                                         ) |                                         ) | ||||||
|                                     else: |                                     else: | ||||||
|                                         cls._add_operator( |                                         cls._add_operator( | ||||||
|                                             cls._rename_field(model, *changes[1][2]), upgrade, |                                             cls._rename_field(model, *changes[1][2]), | ||||||
|  |                                             upgrade, | ||||||
|                                         ) |                                         ) | ||||||
|                     if not is_rename: |                     if not is_rename: | ||||||
|                         cls._add_operator( |                         cls._add_operator( | ||||||
|                             cls._add_field(model, new_data_field,), upgrade, |                             cls._add_field( | ||||||
|  |                                 model, | ||||||
|  |                                 new_data_field, | ||||||
|  |                             ), | ||||||
|  |                             upgrade, | ||||||
|                         ) |                         ) | ||||||
|                 # remove fields |                 # remove fields | ||||||
|                 for old_data_field_name in set(old_data_fields_name).difference( |                 for old_data_field_name in set(old_data_fields_name).difference( | ||||||
| @@ -340,10 +380,13 @@ class Migrate: | |||||||
|                     fk_field = next( |                     fk_field = next( | ||||||
|                         filter(lambda x: x.get("name") == new_fk_field_name, new_fk_fields) |                         filter(lambda x: x.get("name") == new_fk_field_name, new_fk_fields) | ||||||
|                     ) |                     ) | ||||||
|  |                     if fk_field.get("db_constraint"): | ||||||
|                         cls._add_operator( |                         cls._add_operator( | ||||||
|                         cls._add_fk(model, fk_field, new_models.get(fk_field.get("python_type"))), |                             cls._add_fk( | ||||||
|  |                                 model, fk_field, new_models.get(fk_field.get("python_type")) | ||||||
|  |                             ), | ||||||
|                             upgrade, |                             upgrade, | ||||||
|                         fk_m2m=True, |                             fk_m2m_index=True, | ||||||
|                         ) |                         ) | ||||||
|                 # drop fk |                 # drop fk | ||||||
|                 for old_fk_field_name in set(old_fk_fields_name).difference( |                 for old_fk_field_name in set(old_fk_fields_name).difference( | ||||||
| @@ -352,12 +395,13 @@ class Migrate: | |||||||
|                     old_fk_field = next( |                     old_fk_field = next( | ||||||
|                         filter(lambda x: x.get("name") == old_fk_field_name, old_fk_fields) |                         filter(lambda x: x.get("name") == old_fk_field_name, old_fk_fields) | ||||||
|                     ) |                     ) | ||||||
|  |                     if old_fk_field.get("db_constraint"): | ||||||
|                         cls._add_operator( |                         cls._add_operator( | ||||||
|                             cls._drop_fk( |                             cls._drop_fk( | ||||||
|                                 model, old_fk_field, old_models.get(old_fk_field.get("python_type")) |                                 model, old_fk_field, old_models.get(old_fk_field.get("python_type")) | ||||||
|                             ), |                             ), | ||||||
|                             upgrade, |                             upgrade, | ||||||
|                         fk_m2m=True, |                             fk_m2m_index=True, | ||||||
|                         ) |                         ) | ||||||
|                 # change fields |                 # change fields | ||||||
|                 for field_name in set(new_data_fields_name).intersection(set(old_data_fields_name)): |                 for field_name in set(new_data_fields_name).intersection(set(old_data_fields_name)): | ||||||
| @@ -375,11 +419,11 @@ class Migrate: | |||||||
|                             unique = new_data_field.get("unique") |                             unique = new_data_field.get("unique") | ||||||
|                             if old_new[0] is False and old_new[1] is True: |                             if old_new[0] is False and old_new[1] is True: | ||||||
|                                 cls._add_operator( |                                 cls._add_operator( | ||||||
|                                     cls._add_index(model, (field_name,), unique), upgrade, |                                     cls._add_index(model, (field_name,), unique), upgrade, True | ||||||
|                                 ) |                                 ) | ||||||
|                             else: |                             else: | ||||||
|                                 cls._add_operator( |                                 cls._add_operator( | ||||||
|                                     cls._drop_index(model, (field_name,), unique), upgrade, |                                     cls._drop_index(model, (field_name,), unique), upgrade, True | ||||||
|                                 ) |                                 ) | ||||||
|                         elif option == "db_field_types.": |                         elif option == "db_field_types.": | ||||||
|                             # continue since repeated with others |                             # continue since repeated with others | ||||||
| @@ -394,14 +438,15 @@ class Migrate: | |||||||
|                                 ) |                                 ) | ||||||
|                         elif option == "unique": |                         elif option == "unique": | ||||||
|                             # because indexed include it |                             # because indexed include it | ||||||
|                             pass |                             continue | ||||||
|                         elif option == "nullable": |                         elif option == "nullable": | ||||||
|                             # change nullable |                             # change nullable | ||||||
|                             cls._add_operator(cls._alter_null(model, new_data_field), upgrade) |                             cls._add_operator(cls._alter_null(model, new_data_field), upgrade) | ||||||
|                         else: |                         else: | ||||||
|                             # modify column |                             # modify column | ||||||
|                             cls._add_operator( |                             cls._add_operator( | ||||||
|                                 cls._modify_field(model, new_data_field), upgrade, |                                 cls._modify_field(model, new_data_field), | ||||||
|  |                                 upgrade, | ||||||
|                             ) |                             ) | ||||||
|  |  | ||||||
|         for old_model in old_models: |         for old_model in old_models: | ||||||
| @@ -432,19 +477,28 @@ class Migrate: | |||||||
|     def _resolve_fk_fields_name(cls, model: Type[Model], fields_name: Tuple[str]): |     def _resolve_fk_fields_name(cls, model: Type[Model], fields_name: Tuple[str]): | ||||||
|         ret = [] |         ret = [] | ||||||
|         for field_name in fields_name: |         for field_name in fields_name: | ||||||
|             if field_name in model._meta.fk_fields: |             field = model._meta.fields_map[field_name] | ||||||
|  |             if field.source_field: | ||||||
|  |                 ret.append(field.source_field) | ||||||
|  |             elif field_name in model._meta.fk_fields: | ||||||
|                 ret.append(field_name + "_id") |                 ret.append(field_name + "_id") | ||||||
|             else: |             else: | ||||||
|                 ret.append(field_name) |                 ret.append(field_name) | ||||||
|         return ret |         return ret | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _drop_index(cls, model: Type[Model], fields_name: Tuple[str], unique=False): |     def _drop_index(cls, model: Type[Model], fields_name: Union[Tuple[str], Index], unique=False): | ||||||
|  |         if isinstance(fields_name, Index): | ||||||
|  |             return cls.ddl.drop_index_by_name( | ||||||
|  |                 model, fields_name.index_name(cls.ddl.schema_generator, model) | ||||||
|  |             ) | ||||||
|         fields_name = cls._resolve_fk_fields_name(model, fields_name) |         fields_name = cls._resolve_fk_fields_name(model, fields_name) | ||||||
|         return cls.ddl.drop_index(model, fields_name, unique) |         return cls.ddl.drop_index(model, fields_name, unique) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _add_index(cls, model: Type[Model], fields_name: Tuple[str], unique=False): |     def _add_index(cls, model: Type[Model], fields_name: Union[Tuple[str], Index], unique=False): | ||||||
|  |         if isinstance(fields_name, Index): | ||||||
|  |             return fields_name.get_sql(cls.ddl.schema_generator, model, False) | ||||||
|         fields_name = cls._resolve_fk_fields_name(model, fields_name) |         fields_name = cls._resolve_fk_fields_name(model, fields_name) | ||||||
|         return cls.ddl.add_index(model, fields_name, unique) |         return cls.ddl.add_index(model, fields_name, unique) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,12 +1,14 @@ | |||||||
| from tortoise import Model, fields | from tortoise import Model, fields | ||||||
|  |  | ||||||
|  | from aerich.coder import decoder, encoder | ||||||
|  |  | ||||||
| MAX_VERSION_LENGTH = 255 | MAX_VERSION_LENGTH = 255 | ||||||
|  |  | ||||||
|  |  | ||||||
| class Aerich(Model): | class Aerich(Model): | ||||||
|     version = fields.CharField(max_length=MAX_VERSION_LENGTH) |     version = fields.CharField(max_length=MAX_VERSION_LENGTH) | ||||||
|     app = fields.CharField(max_length=20) |     app = fields.CharField(max_length=20) | ||||||
|     content = fields.JSONField() |     content = fields.JSONField(encoder=encoder, decoder=decoder) | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         ordering = ["-id"] |         ordering = ["-id"] | ||||||
|   | |||||||
| @@ -1,11 +1,30 @@ | |||||||
| import importlib | import importlib | ||||||
|  | import os | ||||||
| import re | import re | ||||||
| from typing import Dict | import sys | ||||||
|  | from pathlib import Path | ||||||
|  | from typing import Dict, Union | ||||||
|  |  | ||||||
| from click import BadOptionUsage, Context | from click import BadOptionUsage, ClickException, Context | ||||||
| from tortoise import BaseDBAsyncClient, Tortoise | from tortoise import BaseDBAsyncClient, Tortoise | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def add_src_path(path: str) -> str: | ||||||
|  |     """ | ||||||
|  |     add a folder to the paths so we can import from there | ||||||
|  |     :param path: path to add | ||||||
|  |     :return: absolute path | ||||||
|  |     """ | ||||||
|  |     if not os.path.isabs(path): | ||||||
|  |         # use the absolute path, otherwise some other things (e.g. __file__) won't work properly | ||||||
|  |         path = os.path.abspath(path) | ||||||
|  |     if not os.path.isdir(path): | ||||||
|  |         raise ClickException(f"Specified source folder does not exist: {path}") | ||||||
|  |     if path not in sys.path: | ||||||
|  |         sys.path.insert(0, path) | ||||||
|  |     return path | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_app_connection_name(config, app_name: str) -> str: | def get_app_connection_name(config, app_name: str) -> str: | ||||||
|     """ |     """ | ||||||
|     get connection name |     get connection name | ||||||
| @@ -17,7 +36,8 @@ def get_app_connection_name(config, app_name: str) -> str: | |||||||
|     if app: |     if app: | ||||||
|         return app.get("default_connection", "default") |         return app.get("default_connection", "default") | ||||||
|     raise BadOptionUsage( |     raise BadOptionUsage( | ||||||
|         option_name="--app", message=f'Can\'t get app named "{app_name}"', |         option_name="--app", | ||||||
|  |         message=f'Can\'t get app named "{app_name}"', | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -41,12 +61,11 @@ def get_tortoise_config(ctx: Context, tortoise_orm: str) -> dict: | |||||||
|     splits = tortoise_orm.split(".") |     splits = tortoise_orm.split(".") | ||||||
|     config_path = ".".join(splits[:-1]) |     config_path = ".".join(splits[:-1]) | ||||||
|     tortoise_config = splits[-1] |     tortoise_config = splits[-1] | ||||||
|  |  | ||||||
|     try: |     try: | ||||||
|         config_module = importlib.import_module(config_path) |         config_module = importlib.import_module(config_path) | ||||||
|     except (ModuleNotFoundError, AttributeError): |     except ModuleNotFoundError as e: | ||||||
|         raise BadOptionUsage( |         raise ClickException(f"Error while importing configuration module: {e}") from None | ||||||
|             ctx=ctx, message=f'No config named "{config_path}"', option_name="--config" |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     config = getattr(config_module, tortoise_config, None) |     config = getattr(config_module, tortoise_config, None) | ||||||
|     if not config: |     if not config: | ||||||
| @@ -62,7 +81,7 @@ _UPGRADE = "-- upgrade --\n" | |||||||
| _DOWNGRADE = "-- downgrade --\n" | _DOWNGRADE = "-- downgrade --\n" | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_version_content_from_file(version_file: str) -> Dict: | def get_version_content_from_file(version_file: Union[str, Path]) -> Dict: | ||||||
|     """ |     """ | ||||||
|     get version content |     get version content | ||||||
|     :param version_file: |     :param version_file: | ||||||
| @@ -84,7 +103,7 @@ def get_version_content_from_file(version_file: str) -> Dict: | |||||||
|         return ret |         return ret | ||||||
|  |  | ||||||
|  |  | ||||||
| def write_version_file(version_file: str, content: Dict): | def write_version_file(version_file: Path, content: Dict): | ||||||
|     """ |     """ | ||||||
|     write version file |     write version file | ||||||
|     :param version_file: |     :param version_file: | ||||||
| @@ -95,7 +114,9 @@ def write_version_file(version_file: str, content: Dict): | |||||||
|         f.write(_UPGRADE) |         f.write(_UPGRADE) | ||||||
|         upgrade = content.get("upgrade") |         upgrade = content.get("upgrade") | ||||||
|         if len(upgrade) > 1: |         if len(upgrade) > 1: | ||||||
|             f.write(";\n".join(upgrade) + ";\n") |             f.write(";\n".join(upgrade)) | ||||||
|  |             if not upgrade[-1].endswith(";"): | ||||||
|  |                 f.write(";\n") | ||||||
|         else: |         else: | ||||||
|             f.write(f"{upgrade[0]}") |             f.write(f"{upgrade[0]}") | ||||||
|             if not upgrade[0].endswith(";"): |             if not upgrade[0].endswith(";"): | ||||||
|   | |||||||
							
								
								
									
										1
									
								
								aerich/version.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								aerich/version.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | |||||||
|  | __version__ = "0.6.2" | ||||||
							
								
								
									
										897
									
								
								poetry.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										897
									
								
								poetry.lock
									
									
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,13 +1,13 @@ | |||||||
| [tool.poetry] | [tool.poetry] | ||||||
| name = "aerich" | name = "aerich" | ||||||
| version = "0.5.3" | version = "0.6.2" | ||||||
| description = "A database migrations tool for Tortoise ORM." | description = "A database migrations tool for Tortoise ORM." | ||||||
| authors = ["long2ice <long2ice@gmail.com>"] | authors = ["long2ice <long2ice@gmail.com>"] | ||||||
| license = "Apache-2.0" | license = "Apache-2.0" | ||||||
| readme = "README.md" | readme = "README.md" | ||||||
| homepage = "https://github.com/long2ice/aerich" | homepage = "https://github.com/tortoise/aerich" | ||||||
| repository = "https://github.com/long2ice/aerich.git" | repository = "https://github.com/tortoise/aerich.git" | ||||||
| documentation = "https://github.com/long2ice/aerich" | documentation = "https://github.com/tortoise/aerich" | ||||||
| keywords = ["migrate", "Tortoise-ORM", "mysql"] | keywords = ["migrate", "Tortoise-ORM", "mysql"] | ||||||
| packages = [ | packages = [ | ||||||
|     { include = "aerich" } |     { include = "aerich" } | ||||||
| @@ -18,16 +18,16 @@ include = ["CHANGELOG.md", "LICENSE", "README.md"] | |||||||
| python = "^3.7" | python = "^3.7" | ||||||
| tortoise-orm = "*" | tortoise-orm = "*" | ||||||
| click = "*" | click = "*" | ||||||
| pydantic = "*" |  | ||||||
| aiomysql = { version = "*", optional = true } |  | ||||||
| asyncpg = { version = "*", optional = true } | asyncpg = { version = "*", optional = true } | ||||||
|  | asyncmy = { version = "*", optional = true } | ||||||
| ddlparse = "*" | ddlparse = "*" | ||||||
| dictdiffer = "*" | dictdiffer = "*" | ||||||
|  | tomlkit = "*" | ||||||
|  |  | ||||||
| [tool.poetry.dev-dependencies] | [tool.poetry.dev-dependencies] | ||||||
| flake8 = "*" | flake8 = "*" | ||||||
| isort = "*" | isort = "*" | ||||||
| black = "19.10b0" | black = "*" | ||||||
| pytest = "*" | pytest = "*" | ||||||
| pytest-xdist = "*" | pytest-xdist = "*" | ||||||
| pytest-asyncio = "*" | pytest-asyncio = "*" | ||||||
| @@ -38,7 +38,6 @@ cryptography = "*" | |||||||
| [tool.poetry.extras] | [tool.poetry.extras] | ||||||
| asyncmy = ["asyncmy"] | asyncmy = ["asyncmy"] | ||||||
| asyncpg = ["asyncpg"] | asyncpg = ["asyncpg"] | ||||||
| aiomysql = ["aiomysql"] |  | ||||||
|  |  | ||||||
| [build-system] | [build-system] | ||||||
| requires = ["poetry>=0.12"] | requires = ["poetry>=0.12"] | ||||||
|   | |||||||
| @@ -56,13 +56,16 @@ class Product(Model): | |||||||
|     view_num = fields.IntField(description="View Num", default=0) |     view_num = fields.IntField(description="View Num", default=0) | ||||||
|     sort = fields.IntField() |     sort = fields.IntField() | ||||||
|     is_reviewed = fields.BooleanField(description="Is Reviewed") |     is_reviewed = fields.BooleanField(description="Is Reviewed") | ||||||
|     type = fields.IntEnumField(ProductType, description="Product Type") |     type = fields.IntEnumField( | ||||||
|  |         ProductType, description="Product Type", source_field="type_db_alias" | ||||||
|  |     ) | ||||||
|     pic = fields.CharField(max_length=200) |     pic = fields.CharField(max_length=200) | ||||||
|     body = fields.TextField() |     body = fields.TextField() | ||||||
|     created_at = fields.DatetimeField(auto_now_add=True) |     created_at = fields.DatetimeField(auto_now_add=True) | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         unique_together = (("name", "type"),) |         unique_together = (("name", "type"),) | ||||||
|  |         indexes = (("name", "type"),) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Config(Model): | class Config(Model): | ||||||
|   | |||||||
| @@ -50,7 +50,9 @@ class Product(Model): | |||||||
|     view_num = fields.IntField(description="View Num") |     view_num = fields.IntField(description="View Num") | ||||||
|     sort = fields.IntField() |     sort = fields.IntField() | ||||||
|     is_reviewed = fields.BooleanField(description="Is Reviewed") |     is_reviewed = fields.BooleanField(description="Is Reviewed") | ||||||
|     type = fields.IntEnumField(ProductType, description="Product Type") |     type = fields.IntEnumField( | ||||||
|  |         ProductType, description="Product Type", source_field="type_db_alias" | ||||||
|  |     ) | ||||||
|     image = fields.CharField(max_length=200) |     image = fields.CharField(max_length=200) | ||||||
|     body = fields.TextField() |     body = fields.TextField() | ||||||
|     created_at = fields.DatetimeField(auto_now_add=True) |     created_at = fields.DatetimeField(auto_now_add=True) | ||||||
|   | |||||||
| @@ -50,7 +50,9 @@ class Product(Model): | |||||||
|     view_num = fields.IntField(description="View Num") |     view_num = fields.IntField(description="View Num") | ||||||
|     sort = fields.IntField() |     sort = fields.IntField() | ||||||
|     is_reviewed = fields.BooleanField(description="Is Reviewed") |     is_reviewed = fields.BooleanField(description="Is Reviewed") | ||||||
|     type = fields.IntEnumField(ProductType, description="Product Type") |     type = fields.IntEnumField( | ||||||
|  |         ProductType, description="Product Type", source_field="type_db_alias" | ||||||
|  |     ) | ||||||
|     image = fields.CharField(max_length=200) |     image = fields.CharField(max_length=200) | ||||||
|     body = fields.TextField() |     body = fields.TextField() | ||||||
|     created_at = fields.DatetimeField(auto_now_add=True) |     created_at = fields.DatetimeField(auto_now_add=True) | ||||||
|   | |||||||
| @@ -1,9 +1,6 @@ | |||||||
| import pytest |  | ||||||
|  |  | ||||||
| from aerich.ddl.mysql import MysqlDDL | from aerich.ddl.mysql import MysqlDDL | ||||||
| from aerich.ddl.postgres import PostgresDDL | from aerich.ddl.postgres import PostgresDDL | ||||||
| from aerich.ddl.sqlite import SqliteDDL | from aerich.ddl.sqlite import SqliteDDL | ||||||
| from aerich.exceptions import NotSupportError |  | ||||||
| from aerich.migrate import Migrate | from aerich.migrate import Migrate | ||||||
| from tests.models import Category, Product, User | from tests.models import Category, Product, User | ||||||
|  |  | ||||||
| @@ -144,10 +141,6 @@ def test_set_comment(): | |||||||
|  |  | ||||||
|  |  | ||||||
| def test_drop_column(): | def test_drop_column(): | ||||||
|     if isinstance(Migrate.ddl, SqliteDDL): |  | ||||||
|         with pytest.raises(NotSupportError): |  | ||||||
|             ret = Migrate.ddl.drop_column(Category, "name") |  | ||||||
|     else: |  | ||||||
|     ret = Migrate.ddl.drop_column(Category, "name") |     ret = Migrate.ddl.drop_column(Category, "name") | ||||||
|     if isinstance(Migrate.ddl, MysqlDDL): |     if isinstance(Migrate.ddl, MysqlDDL): | ||||||
|         assert ret == "ALTER TABLE `category` DROP COLUMN `name`" |         assert ret == "ALTER TABLE `category` DROP COLUMN `name`" | ||||||
|   | |||||||
| @@ -17,6 +17,7 @@ old_models_describe = { | |||||||
|         "description": None, |         "description": None, | ||||||
|         "docstring": None, |         "docstring": None, | ||||||
|         "unique_together": [], |         "unique_together": [], | ||||||
|  |         "indexes": [], | ||||||
|         "pk_field": { |         "pk_field": { | ||||||
|             "name": "id", |             "name": "id", | ||||||
|             "field_type": "IntField", |             "field_type": "IntField", | ||||||
| @@ -151,6 +152,7 @@ old_models_describe = { | |||||||
|         "description": None, |         "description": None, | ||||||
|         "docstring": None, |         "docstring": None, | ||||||
|         "unique_together": [], |         "unique_together": [], | ||||||
|  |         "indexes": [], | ||||||
|         "pk_field": { |         "pk_field": { | ||||||
|             "name": "id", |             "name": "id", | ||||||
|             "field_type": "IntField", |             "field_type": "IntField", | ||||||
| @@ -242,6 +244,7 @@ old_models_describe = { | |||||||
|         "description": None, |         "description": None, | ||||||
|         "docstring": None, |         "docstring": None, | ||||||
|         "unique_together": [], |         "unique_together": [], | ||||||
|  |         "indexes": [], | ||||||
|         "pk_field": { |         "pk_field": { | ||||||
|             "name": "id", |             "name": "id", | ||||||
|             "field_type": "IntField", |             "field_type": "IntField", | ||||||
| @@ -334,6 +337,7 @@ old_models_describe = { | |||||||
|         "description": None, |         "description": None, | ||||||
|         "docstring": None, |         "docstring": None, | ||||||
|         "unique_together": [], |         "unique_together": [], | ||||||
|  |         "indexes": [], | ||||||
|         "pk_field": { |         "pk_field": { | ||||||
|             "name": "id", |             "name": "id", | ||||||
|             "field_type": "IntField", |             "field_type": "IntField", | ||||||
| @@ -413,7 +417,7 @@ old_models_describe = { | |||||||
|             { |             { | ||||||
|                 "name": "type", |                 "name": "type", | ||||||
|                 "field_type": "IntEnumFieldInstance", |                 "field_type": "IntEnumFieldInstance", | ||||||
|                 "db_column": "type", |                 "db_column": "type_db_alias", | ||||||
|                 "python_type": "int", |                 "python_type": "int", | ||||||
|                 "generated": False, |                 "generated": False, | ||||||
|                 "nullable": False, |                 "nullable": False, | ||||||
| @@ -512,6 +516,7 @@ old_models_describe = { | |||||||
|         "description": None, |         "description": None, | ||||||
|         "docstring": None, |         "docstring": None, | ||||||
|         "unique_together": [], |         "unique_together": [], | ||||||
|  |         "indexes": [], | ||||||
|         "pk_field": { |         "pk_field": { | ||||||
|             "name": "id", |             "name": "id", | ||||||
|             "field_type": "IntField", |             "field_type": "IntField", | ||||||
| @@ -681,6 +686,7 @@ old_models_describe = { | |||||||
|         "description": None, |         "description": None, | ||||||
|         "docstring": None, |         "docstring": None, | ||||||
|         "unique_together": [], |         "unique_together": [], | ||||||
|  |         "indexes": [], | ||||||
|         "pk_field": { |         "pk_field": { | ||||||
|             "name": "id", |             "name": "id", | ||||||
|             "field_type": "IntField", |             "field_type": "IntField", | ||||||
| @@ -793,9 +799,9 @@ def test_migrate(mocker: MockerFixture): | |||||||
|                 "ALTER TABLE `configs` RENAME TO `config`", |                 "ALTER TABLE `configs` RENAME TO `config`", | ||||||
|                 "ALTER TABLE `product` RENAME COLUMN `image` TO `pic`", |                 "ALTER TABLE `product` RENAME COLUMN `image` TO `pic`", | ||||||
|                 "ALTER TABLE `email` RENAME COLUMN `id` TO `email_id`", |                 "ALTER TABLE `email` RENAME COLUMN `id` TO `email_id`", | ||||||
|                 "ALTER TABLE `email` DROP FOREIGN KEY `fk_email_user_5b58673d`", |                 "ALTER TABLE `product` ADD INDEX `idx_product_name_869427` (`name`, `type_db_alias`)", | ||||||
|                 "ALTER TABLE `email` ADD INDEX `idx_email_email_4a1a33` (`email`)", |                 "ALTER TABLE `email` ADD INDEX `idx_email_email_4a1a33` (`email`)", | ||||||
|                 "ALTER TABLE `product` ADD UNIQUE INDEX `uid_product_name_f14935` (`name`, `type`)", |                 "ALTER TABLE `product` ADD UNIQUE INDEX `uid_product_name_869427` (`name`, `type_db_alias`)", | ||||||
|                 "ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0", |                 "ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0", | ||||||
|                 "ALTER TABLE `user` DROP COLUMN `avatar`", |                 "ALTER TABLE `user` DROP COLUMN `avatar`", | ||||||
|                 "ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(100) NOT NULL", |                 "ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(100) NOT NULL", | ||||||
| @@ -817,9 +823,9 @@ def test_migrate(mocker: MockerFixture): | |||||||
|                 "ALTER TABLE `config` RENAME TO `configs`", |                 "ALTER TABLE `config` RENAME TO `configs`", | ||||||
|                 "ALTER TABLE `product` RENAME COLUMN `pic` TO `image`", |                 "ALTER TABLE `product` RENAME COLUMN `pic` TO `image`", | ||||||
|                 "ALTER TABLE `email` RENAME COLUMN `email_id` TO `id`", |                 "ALTER TABLE `email` RENAME COLUMN `email_id` TO `id`", | ||||||
|                 "ALTER TABLE `email` ADD CONSTRAINT `fk_email_user_5b58673d` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE", |                 "ALTER TABLE `product` DROP INDEX `idx_product_name_869427`", | ||||||
|                 "ALTER TABLE `email` DROP INDEX `idx_email_email_4a1a33`", |                 "ALTER TABLE `email` DROP INDEX `idx_email_email_4a1a33`", | ||||||
|                 "ALTER TABLE `product` DROP INDEX `uid_product_name_f14935`", |                 "ALTER TABLE `product` DROP INDEX `uid_product_name_869427`", | ||||||
|                 "ALTER TABLE `product` ALTER COLUMN `view_num` DROP DEFAULT", |                 "ALTER TABLE `product` ALTER COLUMN `view_num` DROP DEFAULT", | ||||||
|                 "ALTER TABLE `user` ADD `avatar` VARCHAR(200) NOT NULL  DEFAULT ''", |                 "ALTER TABLE `user` ADD `avatar` VARCHAR(200) NOT NULL  DEFAULT ''", | ||||||
|                 "ALTER TABLE `user` DROP INDEX `idx_user_usernam_9987ab`", |                 "ALTER TABLE `user` DROP INDEX `idx_user_usernam_9987ab`", | ||||||
| @@ -837,41 +843,41 @@ def test_migrate(mocker: MockerFixture): | |||||||
|                 'ALTER TABLE "config" ADD "user_id" INT NOT NULL', |                 'ALTER TABLE "config" ADD "user_id" INT NOT NULL', | ||||||
|                 'ALTER TABLE "config" ADD CONSTRAINT "fk_config_user_17daa970" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE', |                 'ALTER TABLE "config" ADD CONSTRAINT "fk_config_user_17daa970" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE', | ||||||
|                 'ALTER TABLE "config" ALTER COLUMN "status" DROP DEFAULT', |                 'ALTER TABLE "config" ALTER COLUMN "status" DROP DEFAULT', | ||||||
|  |                 'ALTER TABLE "configs" RENAME TO "config"', | ||||||
|                 'ALTER TABLE "email" ADD "address" VARCHAR(200) NOT NULL', |                 'ALTER TABLE "email" ADD "address" VARCHAR(200) NOT NULL', | ||||||
|                 'ALTER TABLE "email" DROP COLUMN "user_id"', |                 'ALTER TABLE "email" DROP COLUMN "user_id"', | ||||||
|                 'ALTER TABLE "product" RENAME COLUMN "image" TO "pic"', |  | ||||||
|                 'ALTER TABLE "email" RENAME COLUMN "id" TO "email_id"', |                 'ALTER TABLE "email" RENAME COLUMN "id" TO "email_id"', | ||||||
|                 'ALTER TABLE "configs" RENAME TO "config"', |  | ||||||
|                 'ALTER TABLE "email" DROP CONSTRAINT "fk_email_user_5b58673d"', |  | ||||||
|                 'CREATE INDEX "idx_email_email_4a1a33" ON "email" ("email")', |  | ||||||
|                 'CREATE UNIQUE INDEX "uid_product_name_f14935" ON "product" ("name", "type")', |  | ||||||
|                 'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0', |                 'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0', | ||||||
|                 'ALTER TABLE "user" DROP COLUMN "avatar"', |                 'ALTER TABLE "product" RENAME COLUMN "image" TO "pic"', | ||||||
|                 'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(100) USING "password"::VARCHAR(100)', |                 'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(100) USING "password"::VARCHAR(100)', | ||||||
|                 'CREATE TABLE IF NOT EXISTS "newmodel" (\n    "id" SERIAL NOT NULL PRIMARY KEY,\n    "name" VARCHAR(50) NOT NULL\n);\nCOMMENT ON COLUMN "config"."user_id" IS \'User\';', |                 'ALTER TABLE "user" DROP COLUMN "avatar"', | ||||||
|                 'CREATE UNIQUE INDEX "uid_user_usernam_9987ab" ON "user" ("username")', |                 'CREATE INDEX "idx_product_name_869427" ON "product" ("name", "type_db_alias")', | ||||||
|  |                 'CREATE INDEX "idx_email_email_4a1a33" ON "email" ("email")', | ||||||
|                 'CREATE TABLE "email_user" ("email_id" INT NOT NULL REFERENCES "email" ("email_id") ON DELETE CASCADE,"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE)', |                 'CREATE TABLE "email_user" ("email_id" INT NOT NULL REFERENCES "email" ("email_id") ON DELETE CASCADE,"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE)', | ||||||
|  |                 'CREATE TABLE IF NOT EXISTS "newmodel" (\n    "id" SERIAL NOT NULL PRIMARY KEY,\n    "name" VARCHAR(50) NOT NULL\n);\nCOMMENT ON COLUMN "config"."user_id" IS \'User\';', | ||||||
|  |                 'CREATE UNIQUE INDEX "uid_product_name_869427" ON "product" ("name", "type_db_alias")', | ||||||
|  |                 'CREATE UNIQUE INDEX "uid_user_usernam_9987ab" ON "user" ("username")', | ||||||
|             ] |             ] | ||||||
|         ) |         ) | ||||||
|         assert sorted(Migrate.downgrade_operators) == sorted( |         assert sorted(Migrate.downgrade_operators) == sorted( | ||||||
|             [ |             [ | ||||||
|                 'ALTER TABLE "category" ALTER COLUMN "name" SET NOT NULL', |                 'ALTER TABLE "category" ALTER COLUMN "name" SET NOT NULL', | ||||||
|                 'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(200) USING "slug"::VARCHAR(200)', |                 'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(200) USING "slug"::VARCHAR(200)', | ||||||
|                 'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(200) USING "password"::VARCHAR(200)', |                 'ALTER TABLE "config" ALTER COLUMN "status" SET DEFAULT 1', | ||||||
|                 'ALTER TABLE "config" DROP COLUMN "user_id"', |                 'ALTER TABLE "config" DROP COLUMN "user_id"', | ||||||
|                 'ALTER TABLE "config" DROP CONSTRAINT "fk_config_user_17daa970"', |                 'ALTER TABLE "config" DROP CONSTRAINT "fk_config_user_17daa970"', | ||||||
|                 'ALTER TABLE "config" ALTER COLUMN "status" SET DEFAULT 1', |                 'ALTER TABLE "config" RENAME TO "configs"', | ||||||
|                 'ALTER TABLE "email" ADD "user_id" INT NOT NULL', |                 'ALTER TABLE "email" ADD "user_id" INT NOT NULL', | ||||||
|                 'ALTER TABLE "email" DROP COLUMN "address"', |                 'ALTER TABLE "email" DROP COLUMN "address"', | ||||||
|                 'ALTER TABLE "config" RENAME TO "configs"', |  | ||||||
|                 'ALTER TABLE "product" RENAME COLUMN "pic" TO "image"', |  | ||||||
|                 'ALTER TABLE "email" RENAME COLUMN "email_id" TO "id"', |                 'ALTER TABLE "email" RENAME COLUMN "email_id" TO "id"', | ||||||
|                 'ALTER TABLE "email" ADD CONSTRAINT "fk_email_user_5b58673d" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE', |  | ||||||
|                 'DROP INDEX "idx_email_email_4a1a33"', |  | ||||||
|                 'ALTER TABLE "product" ALTER COLUMN "view_num" DROP DEFAULT', |                 'ALTER TABLE "product" ALTER COLUMN "view_num" DROP DEFAULT', | ||||||
|  |                 'ALTER TABLE "product" RENAME COLUMN "pic" TO "image"', | ||||||
|                 'ALTER TABLE "user" ADD "avatar" VARCHAR(200) NOT NULL  DEFAULT \'\'', |                 'ALTER TABLE "user" ADD "avatar" VARCHAR(200) NOT NULL  DEFAULT \'\'', | ||||||
|  |                 'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(200) USING "password"::VARCHAR(200)', | ||||||
|  |                 'DROP INDEX "idx_product_name_869427"', | ||||||
|  |                 'DROP INDEX "idx_email_email_4a1a33"', | ||||||
|                 'DROP INDEX "idx_user_usernam_9987ab"', |                 'DROP INDEX "idx_user_usernam_9987ab"', | ||||||
|                 'DROP INDEX "uid_product_name_f14935"', |                 'DROP INDEX "uid_product_name_869427"', | ||||||
|                 'DROP TABLE IF EXISTS "email_user"', |                 'DROP TABLE IF EXISTS "email_user"', | ||||||
|                 'DROP TABLE IF EXISTS "newmodel"', |                 'DROP TABLE IF EXISTS "newmodel"', | ||||||
|             ] |             ] | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user