Compare commits
	
		
			75 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 002221e557 | ||
|  | 141d7205bf | ||
|  | af4d4be19a | ||
|  | 3b4d9b47ce | ||
|  | 4b0c4ae7d0 | ||
|  | dc821d8a02 | ||
|  | d18a6b5be0 | ||
|  | 1e56a70f21 | ||
|  | ab1e1aab75 | ||
|  | 00dd04f97d | ||
|  | fc914acc80 | ||
|  | ac03ecb002 | ||
|  | 235ef3f7ea | ||
|  | e00eb7f3d9 | ||
|  | d6c8941676 | ||
|  | cf062c9310 | ||
|  | 309adec8c9 | ||
|  | 8674142ba8 | ||
|  | cda9bd1c47 | ||
|  | 198e4e0032 | ||
|  | 1b440477a2 | ||
|  | 1263c6f735 | ||
|  | 6504384879 | ||
|  | 17ab0a1421 | ||
|  | e1ffcb609b | ||
|  | 18cb75f555 | ||
|  | dfe13ea250 | ||
|  | b97ce0ff2f | ||
|  | 21001c0eda | ||
|  | 9bd96a9487 | ||
|  | 5bdaa32a9e | ||
|  | d74e7b5630 | ||
|  | 119b15d597 | ||
|  | f93ab6bbff | ||
|  | bd8eb94a6e | ||
|  | 2fcb2626fd | ||
|  | 3728db4279 | ||
|  | 6ac1fb5332 | ||
|  | 87a17d443c | ||
|  | 55f18a69ed | ||
|  | 86a9c4cedd | ||
|  | 86e1d3defb | ||
|  | 01fa7fbbdb | ||
|  | 90196eb1bf | ||
|  | 3c111792a9 | ||
|  | 77e9d7bc91 | ||
|  | fe2ddff88b | ||
|  | 0d23297f46 | ||
|  | 6be6d55e5b | ||
|  | 25674bc73a | ||
|  | 1715eda1a3 | ||
|  | f5775049dd | ||
|  | 6fd0f8a42f | ||
|  | f52dc009af | ||
|  | 9248d456f9 | ||
|  | c24f2f6b09 | ||
|  | 19c9c2c30f | ||
|  | 73b75349ee | ||
|  | 7bc553221a | ||
|  | 7413a05e19 | ||
|  | bf194ca8ce | ||
|  | b06da0223a | ||
|  | 83554cdc5d | ||
|  | 6c76bfccad | ||
|  | a1746e457c | ||
|  | 2a0435dea9 | ||
|  | e87f67f1e1 | ||
|  | 7b4b7ac749 | ||
|  | 5b9b51db3f | ||
|  | ffeee3c901 | ||
|  | b4366d2427 | ||
|  | ec1c80f3a9 | ||
|  | d2083632eb | ||
|  | dc8b4c2263 | ||
|  | cb5dffeeb8 | 
							
								
								
									
										6
									
								
								.github/workflows/pypi.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/pypi.yml
									
									
									
									
										vendored
									
									
								
							| @@ -8,12 +8,12 @@ jobs: | |||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v2 |       - uses: actions/checkout@v2 | ||||||
|       - uses: actions/setup-python@v1 |       - uses: actions/setup-python@v2 | ||||||
|         with: |         with: | ||||||
|           python-version: '3.x' |           python-version: '3.x' | ||||||
|  |       - uses: dschep/install-poetry-action@v1.3 | ||||||
|       - name: Build dists |       - name: Build dists | ||||||
|         run: | |         run: make build | ||||||
|           python3 setup.py sdist |  | ||||||
|       - name: Pypi Publish |       - name: Pypi Publish | ||||||
|         uses: pypa/gh-action-pypi-publish@master |         uses: pypa/gh-action-pypi-publish@master | ||||||
|         with: |         with: | ||||||
|   | |||||||
							
								
								
									
										7
									
								
								.github/workflows/test.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										7
									
								
								.github/workflows/test.yml
									
									
									
									
										vendored
									
									
								
							| @@ -19,10 +19,7 @@ jobs: | |||||||
|       - uses: actions/setup-python@v2 |       - uses: actions/setup-python@v2 | ||||||
|         with: |         with: | ||||||
|           python-version: '3.x' |           python-version: '3.x' | ||||||
|       - name: Install dependencies |       - uses: dschep/install-poetry-action@v1.3 | ||||||
|         run: | |  | ||||||
|           python -m pip install --upgrade pip |  | ||||||
|           pip install -r requirements-dev.txt |  | ||||||
|       - name: CI |       - name: CI | ||||||
|         env: |         env: | ||||||
|           MYSQL_PASS: root |           MYSQL_PASS: root | ||||||
| @@ -31,4 +28,4 @@ jobs: | |||||||
|           POSTGRES_PASS: 123456 |           POSTGRES_PASS: 123456 | ||||||
|           POSTGRES_HOST: 127.0.0.1 |           POSTGRES_HOST: 127.0.0.1 | ||||||
|           POSTGRES_PORT: 5432 |           POSTGRES_PORT: 5432 | ||||||
|         run: make testall |         run: make ci | ||||||
| @@ -1,54 +1,80 @@ | |||||||
| ========= | # ChangeLog | ||||||
| ChangeLog | 
 | ||||||
| ========= | ## 0.2 | ||||||
|  | 
 | ||||||
|  | ### 0.2.4 | ||||||
|  | 
 | ||||||
|  | - Raise error with SQLite unsupported features. | ||||||
|  | - Fix Postgres alter table. (#48) | ||||||
|  | - Add `Rename` support. | ||||||
|  | 
 | ||||||
|  | ### 0.2.3 | ||||||
|  | 
 | ||||||
|  | - Fix tortoise ssl config. | ||||||
|  | - PostgreSQL add/drop index/unique. | ||||||
|  | 
 | ||||||
|  | ### 0.2.2 | ||||||
|  | 
 | ||||||
|  | - Fix postgres drop fk. | ||||||
|  | - Fix version sort. | ||||||
|  | 
 | ||||||
|  | ### 0.2.1 | ||||||
|  | 
 | ||||||
|  | - Fix bug in windows. | ||||||
|  | - Enhance PostgreSQL support. | ||||||
|  | 
 | ||||||
|  | ### 0.2.0 | ||||||
|  | 
 | ||||||
|  | - Update model file find method. | ||||||
|  | - Set `--safe` bool. | ||||||
|  | 
 | ||||||
|  | ## 0.1 | ||||||
|  | 
 | ||||||
|  | ### 0.1.9 | ||||||
| 
 | 
 | ||||||
| 0.1 |  | ||||||
| === |  | ||||||
| 0.1.9 |  | ||||||
| ----- |  | ||||||
| - Fix default_connection when upgrade | - Fix default_connection when upgrade | ||||||
| - Find default app instead of default. | - Find default app instead of default. | ||||||
| - Diff MySQL ddl. | - Diff MySQL ddl. | ||||||
| - Check tortoise config. | - Check tortoise config. | ||||||
| 
 | 
 | ||||||
| 0.1.8 | ### 0.1.8 | ||||||
| ----- | 
 | ||||||
| - Fix upgrade error when migrate. | - Fix upgrade error when migrate. | ||||||
| - Fix init db sql error. | - Fix init db sql error. | ||||||
| - Support change column. | - Support change column. | ||||||
| 
 | 
 | ||||||
| 0.1.7 | ### 0.1.7 | ||||||
| ----- | 
 | ||||||
| - Exclude models.Aerich. | - Exclude models.Aerich. | ||||||
| - Add init record when init-db. | - Add init record when init-db. | ||||||
| - Fix version num str. | - Fix version num str. | ||||||
| 
 | 
 | ||||||
| 0.1.6 | ### 0.1.6 | ||||||
| ----- | 
 | ||||||
| - update dependency_links | - update dependency_links | ||||||
| 
 | 
 | ||||||
| 0.1.5 | ### 0.1.5 | ||||||
| ----- | 
 | ||||||
| - Add sqlite and postgres support. | - Add sqlite and postgres support. | ||||||
| - Fix dependency import. | - Fix dependency import. | ||||||
| - Store versions in db. | - Store versions in db. | ||||||
| 
 | 
 | ||||||
| 0.1.4 | ### 0.1.4 | ||||||
| ----- | 
 | ||||||
| - Fix transaction and fields import. | - Fix transaction and fields import. | ||||||
| - Make unique index worked. | - Make unique index worked. | ||||||
| - Add cli --version. | - Add cli --version. | ||||||
| 
 | 
 | ||||||
| 0.1.3 | ### 0.1.3 | ||||||
| ----- | 
 | ||||||
| - Support indexes and unique_together. | - Support indexes and unique_together. | ||||||
| 
 | 
 | ||||||
| 0.1.2 | ### 0.1.2 | ||||||
| ----- | 
 | ||||||
| - Now aerich support m2m. | - Now aerich support m2m. | ||||||
| - Add cli cmd init-db. | - Add cli cmd init-db. | ||||||
| - Change cli options. | - Change cli options. | ||||||
| 
 | 
 | ||||||
| 0.1.1 | ### 0.1.1 | ||||||
| ----- | 
 | ||||||
| - Now aerich is basic worked. | - Now aerich is basic worked. | ||||||
							
								
								
									
										214
									
								
								LICENSE
									
									
									
									
									
								
							
							
						
						
									
										214
									
								
								LICENSE
									
									
									
									
									
								
							| @@ -1,21 +1,201 @@ | |||||||
| The MIT License (MIT) |                                  Apache License | ||||||
|  |                            Version 2.0, January 2004 | ||||||
|  |                         http://www.apache.org/licenses/ | ||||||
|  |  | ||||||
| Copyright (c) 2020 long2ice |    TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION | ||||||
|  |  | ||||||
| Permission is hereby granted, free of charge, to any person obtaining a copy |    1. Definitions. | ||||||
| of this software and associated documentation files (the "Software"), to deal |  | ||||||
| in the Software without restriction, including without limitation the rights |  | ||||||
| to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |  | ||||||
| copies of the Software, and to permit persons to whom the Software is |  | ||||||
| furnished to do so, subject to the following conditions: |  | ||||||
|  |  | ||||||
| The above copyright notice and this permission notice shall be included in |       "License" shall mean the terms and conditions for use, reproduction, | ||||||
| all copies or substantial portions of the Software. |       and distribution as defined by Sections 1 through 9 of this document. | ||||||
|  |  | ||||||
| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |       "Licensor" shall mean the copyright owner or entity authorized by | ||||||
| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |       the copyright owner that is granting the License. | ||||||
| FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |  | ||||||
| AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |       "Legal Entity" shall mean the union of the acting entity and all | ||||||
| LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |       other entities that control, are controlled by, or are under common | ||||||
| OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN |       control with that entity. For the purposes of this definition, | ||||||
| THE SOFTWARE. |       "control" means (i) the power, direct or indirect, to cause the | ||||||
|  |       direction or management of such entity, whether by contract or | ||||||
|  |       otherwise, or (ii) ownership of fifty percent (50%) or more of the | ||||||
|  |       outstanding shares, or (iii) beneficial ownership of such entity. | ||||||
|  |  | ||||||
|  |       "You" (or "Your") shall mean an individual or Legal Entity | ||||||
|  |       exercising permissions granted by this License. | ||||||
|  |  | ||||||
|  |       "Source" form shall mean the preferred form for making modifications, | ||||||
|  |       including but not limited to software source code, documentation | ||||||
|  |       source, and configuration files. | ||||||
|  |  | ||||||
|  |       "Object" form shall mean any form resulting from mechanical | ||||||
|  |       transformation or translation of a Source form, including but | ||||||
|  |       not limited to compiled object code, generated documentation, | ||||||
|  |       and conversions to other media types. | ||||||
|  |  | ||||||
|  |       "Work" shall mean the work of authorship, whether in Source or | ||||||
|  |       Object form, made available under the License, as indicated by a | ||||||
|  |       copyright notice that is included in or attached to the work | ||||||
|  |       (an example is provided in the Appendix below). | ||||||
|  |  | ||||||
|  |       "Derivative Works" shall mean any work, whether in Source or Object | ||||||
|  |       form, that is based on (or derived from) the Work and for which the | ||||||
|  |       editorial revisions, annotations, elaborations, or other modifications | ||||||
|  |       represent, as a whole, an original work of authorship. For the purposes | ||||||
|  |       of this License, Derivative Works shall not include works that remain | ||||||
|  |       separable from, or merely link (or bind by name) to the interfaces of, | ||||||
|  |       the Work and Derivative Works thereof. | ||||||
|  |  | ||||||
|  |       "Contribution" shall mean any work of authorship, including | ||||||
|  |       the original version of the Work and any modifications or additions | ||||||
|  |       to that Work or Derivative Works thereof, that is intentionally | ||||||
|  |       submitted to Licensor for inclusion in the Work by the copyright owner | ||||||
|  |       or by an individual or Legal Entity authorized to submit on behalf of | ||||||
|  |       the copyright owner. For the purposes of this definition, "submitted" | ||||||
|  |       means any form of electronic, verbal, or written communication sent | ||||||
|  |       to the Licensor or its representatives, including but not limited to | ||||||
|  |       communication on electronic mailing lists, source code control systems, | ||||||
|  |       and issue tracking systems that are managed by, or on behalf of, the | ||||||
|  |       Licensor for the purpose of discussing and improving the Work, but | ||||||
|  |       excluding communication that is conspicuously marked or otherwise | ||||||
|  |       designated in writing by the copyright owner as "Not a Contribution." | ||||||
|  |  | ||||||
|  |       "Contributor" shall mean Licensor and any individual or Legal Entity | ||||||
|  |       on behalf of whom a Contribution has been received by Licensor and | ||||||
|  |       subsequently incorporated within the Work. | ||||||
|  |  | ||||||
|  |    2. Grant of Copyright License. Subject to the terms and conditions of | ||||||
|  |       this License, each Contributor hereby grants to You a perpetual, | ||||||
|  |       worldwide, non-exclusive, no-charge, royalty-free, irrevocable | ||||||
|  |       copyright license to reproduce, prepare Derivative Works of, | ||||||
|  |       publicly display, publicly perform, sublicense, and distribute the | ||||||
|  |       Work and such Derivative Works in Source or Object form. | ||||||
|  |  | ||||||
|  |    3. Grant of Patent License. Subject to the terms and conditions of | ||||||
|  |       this License, each Contributor hereby grants to You a perpetual, | ||||||
|  |       worldwide, non-exclusive, no-charge, royalty-free, irrevocable | ||||||
|  |       (except as stated in this section) patent license to make, have made, | ||||||
|  |       use, offer to sell, sell, import, and otherwise transfer the Work, | ||||||
|  |       where such license applies only to those patent claims licensable | ||||||
|  |       by such Contributor that are necessarily infringed by their | ||||||
|  |       Contribution(s) alone or by combination of their Contribution(s) | ||||||
|  |       with the Work to which such Contribution(s) was submitted. If You | ||||||
|  |       institute patent litigation against any entity (including a | ||||||
|  |       cross-claim or counterclaim in a lawsuit) alleging that the Work | ||||||
|  |       or a Contribution incorporated within the Work constitutes direct | ||||||
|  |       or contributory patent infringement, then any patent licenses | ||||||
|  |       granted to You under this License for that Work shall terminate | ||||||
|  |       as of the date such litigation is filed. | ||||||
|  |  | ||||||
|  |    4. Redistribution. You may reproduce and distribute copies of the | ||||||
|  |       Work or Derivative Works thereof in any medium, with or without | ||||||
|  |       modifications, and in Source or Object form, provided that You | ||||||
|  |       meet the following conditions: | ||||||
|  |  | ||||||
|  |       (a) You must give any other recipients of the Work or | ||||||
|  |           Derivative Works a copy of this License; and | ||||||
|  |  | ||||||
|  |       (b) You must cause any modified files to carry prominent notices | ||||||
|  |           stating that You changed the files; and | ||||||
|  |  | ||||||
|  |       (c) You must retain, in the Source form of any Derivative Works | ||||||
|  |           that You distribute, all copyright, patent, trademark, and | ||||||
|  |           attribution notices from the Source form of the Work, | ||||||
|  |           excluding those notices that do not pertain to any part of | ||||||
|  |           the Derivative Works; and | ||||||
|  |  | ||||||
|  |       (d) If the Work includes a "NOTICE" text file as part of its | ||||||
|  |           distribution, then any Derivative Works that You distribute must | ||||||
|  |           include a readable copy of the attribution notices contained | ||||||
|  |           within such NOTICE file, excluding those notices that do not | ||||||
|  |           pertain to any part of the Derivative Works, in at least one | ||||||
|  |           of the following places: within a NOTICE text file distributed | ||||||
|  |           as part of the Derivative Works; within the Source form or | ||||||
|  |           documentation, if provided along with the Derivative Works; or, | ||||||
|  |           within a display generated by the Derivative Works, if and | ||||||
|  |           wherever such third-party notices normally appear. The contents | ||||||
|  |           of the NOTICE file are for informational purposes only and | ||||||
|  |           do not modify the License. You may add Your own attribution | ||||||
|  |           notices within Derivative Works that You distribute, alongside | ||||||
|  |           or as an addendum to the NOTICE text from the Work, provided | ||||||
|  |           that such additional attribution notices cannot be construed | ||||||
|  |           as modifying the License. | ||||||
|  |  | ||||||
|  |       You may add Your own copyright statement to Your modifications and | ||||||
|  |       may provide additional or different license terms and conditions | ||||||
|  |       for use, reproduction, or distribution of Your modifications, or | ||||||
|  |       for any such Derivative Works as a whole, provided Your use, | ||||||
|  |       reproduction, and distribution of the Work otherwise complies with | ||||||
|  |       the conditions stated in this License. | ||||||
|  |  | ||||||
|  |    5. Submission of Contributions. Unless You explicitly state otherwise, | ||||||
|  |       any Contribution intentionally submitted for inclusion in the Work | ||||||
|  |       by You to the Licensor shall be under the terms and conditions of | ||||||
|  |       this License, without any additional terms or conditions. | ||||||
|  |       Notwithstanding the above, nothing herein shall supersede or modify | ||||||
|  |       the terms of any separate license agreement you may have executed | ||||||
|  |       with Licensor regarding such Contributions. | ||||||
|  |  | ||||||
|  |    6. Trademarks. This License does not grant permission to use the trade | ||||||
|  |       names, trademarks, service marks, or product names of the Licensor, | ||||||
|  |       except as required for reasonable and customary use in describing the | ||||||
|  |       origin of the Work and reproducing the content of the NOTICE file. | ||||||
|  |  | ||||||
|  |    7. Disclaimer of Warranty. Unless required by applicable law or | ||||||
|  |       agreed to in writing, Licensor provides the Work (and each | ||||||
|  |       Contributor provides its Contributions) on an "AS IS" BASIS, | ||||||
|  |       WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or | ||||||
|  |       implied, including, without limitation, any warranties or conditions | ||||||
|  |       of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A | ||||||
|  |       PARTICULAR PURPOSE. You are solely responsible for determining the | ||||||
|  |       appropriateness of using or redistributing the Work and assume any | ||||||
|  |       risks associated with Your exercise of permissions under this License. | ||||||
|  |  | ||||||
|  |    8. Limitation of Liability. In no event and under no legal theory, | ||||||
|  |       whether in tort (including negligence), contract, or otherwise, | ||||||
|  |       unless required by applicable law (such as deliberate and grossly | ||||||
|  |       negligent acts) or agreed to in writing, shall any Contributor be | ||||||
|  |       liable to You for damages, including any direct, indirect, special, | ||||||
|  |       incidental, or consequential damages of any character arising as a | ||||||
|  |       result of this License or out of the use or inability to use the | ||||||
|  |       Work (including but not limited to damages for loss of goodwill, | ||||||
|  |       work stoppage, computer failure or malfunction, or any and all | ||||||
|  |       other commercial damages or losses), even if such Contributor | ||||||
|  |       has been advised of the possibility of such damages. | ||||||
|  |  | ||||||
|  |    9. Accepting Warranty or Additional Liability. While redistributing | ||||||
|  |       the Work or Derivative Works thereof, You may choose to offer, | ||||||
|  |       and charge a fee for, acceptance of support, warranty, indemnity, | ||||||
|  |       or other liability obligations and/or rights consistent with this | ||||||
|  |       License. However, in accepting such obligations, You may act only | ||||||
|  |       on Your own behalf and on Your sole responsibility, not on behalf | ||||||
|  |       of any other Contributor, and only if You agree to indemnify, | ||||||
|  |       defend, and hold each Contributor harmless for any liability | ||||||
|  |       incurred by, or claims asserted against, such Contributor by reason | ||||||
|  |       of your accepting any such warranty or additional liability. | ||||||
|  |  | ||||||
|  |    END OF TERMS AND CONDITIONS | ||||||
|  |  | ||||||
|  |    APPENDIX: How to apply the Apache License to your work. | ||||||
|  |  | ||||||
|  |       To apply the Apache License to your work, attach the following | ||||||
|  |       boilerplate notice, with the fields enclosed by brackets "[]" | ||||||
|  |       replaced with your own identifying information. (Don't include | ||||||
|  |       the brackets!)  The text should be enclosed in the appropriate | ||||||
|  |       comment syntax for the file format. We also recommend that a | ||||||
|  |       file or class name and description of purpose be included on the | ||||||
|  |       same "printed page" as the copyright notice for easier | ||||||
|  |       identification within third-party archives. | ||||||
|  |  | ||||||
|  |    Copyright 2020 long2ice | ||||||
|  |  | ||||||
|  |    Licensed under the Apache License, Version 2.0 (the "License"); | ||||||
|  |    you may not use this file except in compliance with the License. | ||||||
|  |    You may obtain a copy of the License at | ||||||
|  |  | ||||||
|  |        http://www.apache.org/licenses/LICENSE-2.0 | ||||||
|  |  | ||||||
|  |    Unless required by applicable law or agreed to in writing, software | ||||||
|  |    distributed under the License is distributed on an "AS IS" BASIS, | ||||||
|  |    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||||
|  |    See the License for the specific language governing permissions and | ||||||
|  |    limitations under the License. | ||||||
|   | |||||||
| @@ -1,3 +0,0 @@ | |||||||
| include LICENSE |  | ||||||
| include README.rst |  | ||||||
| include requirements.txt |  | ||||||
							
								
								
									
										28
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										28
									
								
								Makefile
									
									
									
									
									
								
							| @@ -18,23 +18,20 @@ help: | |||||||
| 	@echo  "    test		Runs all tests" | 	@echo  "    test		Runs all tests" | ||||||
| 	@echo  "    style		Auto-formats the code" | 	@echo  "    style		Auto-formats the code" | ||||||
|  |  | ||||||
|  | up: | ||||||
|  | 	@poetry update | ||||||
|  |  | ||||||
| deps: | deps: | ||||||
| 	@which pip-sync > /dev/null || pip install -q pip-tools | 	@poetry install -E dbdrivers --no-root | ||||||
| 	@pip install -r requirements-dev.txt |  | ||||||
|  |  | ||||||
| style: deps | style: deps | ||||||
| 	isort -rc $(checkfiles) | 	isort -src $(checkfiles) | ||||||
| 	black $(black_opts) $(checkfiles) | 	black $(black_opts) $(checkfiles) | ||||||
|  |  | ||||||
| check: deps | check: deps | ||||||
| ifneq ($(shell which black),) |  | ||||||
| 	black --check $(black_opts) $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false) | 	black --check $(black_opts) $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false) | ||||||
| endif |  | ||||||
| 	flake8 $(checkfiles) | 	flake8 $(checkfiles) | ||||||
| 	mypy $(checkfiles) | 	bandit -x tests -r $(checkfiles) | ||||||
| 	pylint -d C,W,R $(checkfiles) |  | ||||||
| 	bandit -r $(checkfiles) |  | ||||||
| 	python setup.py check -mrs |  | ||||||
|  |  | ||||||
| test: deps | test: deps | ||||||
| 	$(py_warn) TEST_DB=sqlite://:memory: py.test | 	$(py_warn) TEST_DB=sqlite://:memory: py.test | ||||||
| @@ -43,17 +40,14 @@ test_sqlite: | |||||||
| 	$(py_warn) TEST_DB=sqlite://:memory: py.test | 	$(py_warn) TEST_DB=sqlite://:memory: py.test | ||||||
|  |  | ||||||
| test_mysql: | test_mysql: | ||||||
| 	$(py_warn) TEST_DB="mysql://root:$(MYSQL_PASS)@$(MYSQL_HOST):$(MYSQL_PORT)/test_\{\}" py.test | 	$(py_warn) TEST_DB="mysql://root:$(MYSQL_PASS)@$(MYSQL_HOST):$(MYSQL_PORT)/test_\{\}" pytest -v -s | ||||||
|  |  | ||||||
| test_postgres: | test_postgres: | ||||||
| 	$(py_warn) TEST_DB="postgres://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" py.test | 	$(py_warn) TEST_DB="postgres://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" pytest | ||||||
|  |  | ||||||
| testall: deps test_sqlite test_postgres test_mysql | testall: deps test_sqlite test_postgres test_mysql | ||||||
|  |  | ||||||
| publish: deps | build: deps | ||||||
| 	rm -fR dist/ | 	@poetry build | ||||||
| 	python setup.py sdist |  | ||||||
| 	twine upload dist/* |  | ||||||
|  |  | ||||||
| ci: | ci: check testall | ||||||
| 	@act -P ubuntu-latest=nektos/act-environments-ubuntu:18.04 -b |  | ||||||
|   | |||||||
							
								
								
									
										164
									
								
								README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										164
									
								
								README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,164 @@ | |||||||
|  | # Aerich | ||||||
|  |  | ||||||
|  | [](https://pypi.python.org/pypi/aerich) | ||||||
|  | [](https://github.com/long2ice/aerich) | ||||||
|  | [](https://github.com/long2ice/aerich/actions?query=workflow:pypi) | ||||||
|  | [](https://github.com/long2ice/aerich/actions?query=workflow:test) | ||||||
|  |  | ||||||
|  | ## Introduction | ||||||
|  |  | ||||||
|  | Tortoise-ORM is the best asyncio ORM now, but it lacks a database | ||||||
|  | migrations tool like alembic for SQLAlchemy, or Django ORM with it\'s | ||||||
|  | own migrations tool. | ||||||
|  |  | ||||||
|  | This project aim to be a best migrations tool for Tortoise-ORM and which | ||||||
|  | written by one of contributors of Tortoise-ORM. | ||||||
|  |  | ||||||
|  | ## Install | ||||||
|  |  | ||||||
|  | Just install from pypi: | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | > pip install aerich | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ## Quick Start | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | $ aerich -h | ||||||
|  |  | ||||||
|  | Usage: aerich [OPTIONS] COMMAND [ARGS]... | ||||||
|  |  | ||||||
|  | Options: | ||||||
|  |   -c, --config TEXT  Config file.  [default: aerich.ini] | ||||||
|  |   --app TEXT         Tortoise-ORM app name.  [default: models] | ||||||
|  |   -n, --name TEXT    Name of section in .ini file to use for aerich config. | ||||||
|  |                      [default: aerich] | ||||||
|  |   -h, --help         Show this message and exit. | ||||||
|  |  | ||||||
|  | Commands: | ||||||
|  |   downgrade  Downgrade to previous version. | ||||||
|  |   heads      Show current available heads in migrate location. | ||||||
|  |   history    List all migrate items. | ||||||
|  |   init       Init config file and generate root migrate location. | ||||||
|  |   init-db    Generate schema and generate app migrate location. | ||||||
|  |   migrate    Generate migrate changes file. | ||||||
|  |   upgrade    Upgrade to latest version. | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ## Usage | ||||||
|  |  | ||||||
|  | You need add `aerich.models` to your `Tortoise-ORM` config first, | ||||||
|  | example: | ||||||
|  |  | ||||||
|  | ```python | ||||||
|  | TORTOISE_ORM = { | ||||||
|  |     "connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"}, | ||||||
|  |     "apps": { | ||||||
|  |         "models": { | ||||||
|  |             "models": ["tests.models", "aerich.models"], | ||||||
|  |             "default_connection": "default", | ||||||
|  |         }, | ||||||
|  |     }, | ||||||
|  | } | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ### Initialization | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | $ aerich init -h | ||||||
|  |  | ||||||
|  | Usage: aerich init [OPTIONS] | ||||||
|  |  | ||||||
|  |   Init config file and generate root migrate location. | ||||||
|  |  | ||||||
|  | Options: | ||||||
|  |   -t, --tortoise-orm TEXT  Tortoise-ORM config module dict variable, like settings.TORTOISE_ORM. | ||||||
|  |                            [required] | ||||||
|  |   --location TEXT          Migrate store location.  [default: ./migrations] | ||||||
|  |   -h, --help               Show this message and exit. | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Init config file and location: | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | $ aerich init -t tests.backends.mysql.TORTOISE_ORM | ||||||
|  |  | ||||||
|  | Success create migrate location ./migrations | ||||||
|  | Success generate config file aerich.ini | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ### Init db | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | $ aerich init-db | ||||||
|  |  | ||||||
|  | Success create app migrate location ./migrations/models | ||||||
|  | Success generate schema for app "models" | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | If your Tortoise-ORM app is not default `models`, you must specify | ||||||
|  | `--app` like `aerich --app other_models init-db`. | ||||||
|  |  | ||||||
|  | ### Update models and make migrate | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | $ aerich migrate --name drop_column | ||||||
|  |  | ||||||
|  | Success migrate 1_202029051520102929_drop_column.json | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Format of migrate filename is | ||||||
|  | `{version_num}_{datetime}_{name|update}.json`. | ||||||
|  |  | ||||||
|  | And if `aerich` guess you are renaming a column, it will ask `Rename {old_column} to {new_column} [True]`, you can choice `True` to rename column without column drop, or choice `False` to drop column then create. | ||||||
|  |  | ||||||
|  | ### Upgrade to latest version | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | $ aerich upgrade | ||||||
|  |  | ||||||
|  | Success upgrade 1_202029051520102929_drop_column.json | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Now your db is migrated to latest. | ||||||
|  |  | ||||||
|  | ### Downgrade to previous version | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | $ aerich downgrade | ||||||
|  |  | ||||||
|  | Success downgrade 1_202029051520102929_drop_column.json | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Now your db rollback to previous version. | ||||||
|  |  | ||||||
|  | ### Show history | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | $ aerich history | ||||||
|  |  | ||||||
|  | 1_202029051520102929_drop_column.json | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ### Show heads to be migrated | ||||||
|  |  | ||||||
|  | ```shell | ||||||
|  | $ aerich heads | ||||||
|  |  | ||||||
|  | 1_202029051520102929_drop_column.json | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ## Support this project | ||||||
|  |  | ||||||
|  | - Just give a star! | ||||||
|  | - Donation. | ||||||
|  |  | ||||||
|  | | AliPay                                                                                 | WeChatPay                                                                                 | PayPal                                                           | | ||||||
|  | | -------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------- | ---------------------------------------------------------------- | | ||||||
|  | | <img width="200" src="https://github.com/long2ice/aerich/raw/dev/images/alipay.jpeg"/> | <img width="200" src="https://github.com/long2ice/aerich/raw/dev/images/wechatpay.jpeg"/> | [PayPal](https://www.paypal.me/long2ice) to my account long2ice. | | ||||||
|  |  | ||||||
|  | ## License | ||||||
|  |  | ||||||
|  | This project is licensed under the | ||||||
|  | [Apache-2.0](https://github.com/long2ice/aerich/blob/master/LICENSE) License. | ||||||
							
								
								
									
										169
									
								
								README.rst
									
									
									
									
									
								
							
							
						
						
									
										169
									
								
								README.rst
									
									
									
									
									
								
							| @@ -1,169 +0,0 @@ | |||||||
| ====== |  | ||||||
| Aerich |  | ||||||
| ====== |  | ||||||
|  |  | ||||||
| .. image:: https://img.shields.io/pypi/v/aerich.svg?style=flat |  | ||||||
|    :target: https://pypi.python.org/pypi/aerich |  | ||||||
| .. image:: https://img.shields.io/github/license/long2ice/aerich |  | ||||||
|    :target: https://github.com/long2ice/aerich |  | ||||||
| .. image:: https://github.com/long2ice/aerich/workflows/pypi/badge.svg |  | ||||||
|    :target: https://github.com/long2ice/aerich/actions?query=workflow:pypi |  | ||||||
| .. image:: https://github.com/long2ice/aerich/workflows/test/badge.svg |  | ||||||
|    :target: https://github.com/long2ice/aerich/actions?query=workflow:test |  | ||||||
|  |  | ||||||
| Introduction |  | ||||||
| ============ |  | ||||||
|  |  | ||||||
| Tortoise-ORM is the best asyncio ORM now, but it lacks a database migrations tool like alembic for SQLAlchemy, or Django ORM with it's own migrations tool. |  | ||||||
|  |  | ||||||
| This project aim to be a best migrations tool for Tortoise-ORM and which written by one of contributors of Tortoise-ORM. |  | ||||||
|  |  | ||||||
| Install |  | ||||||
| ======= |  | ||||||
|  |  | ||||||
| Just install from pypi: |  | ||||||
|  |  | ||||||
| .. code-block:: shell |  | ||||||
|  |  | ||||||
|     $ pip install aerich |  | ||||||
|  |  | ||||||
| Quick Start |  | ||||||
| =========== |  | ||||||
|  |  | ||||||
| .. code-block:: shell |  | ||||||
|  |  | ||||||
|     $ aerich -h |  | ||||||
|  |  | ||||||
|     Usage: aerich [OPTIONS] COMMAND [ARGS]... |  | ||||||
|  |  | ||||||
|     Options: |  | ||||||
|       -c, --config TEXT  Config file.  [default: aerich.ini] |  | ||||||
|       --app TEXT         Tortoise-ORM app name.  [default: models] |  | ||||||
|       -n, --name TEXT    Name of section in .ini file to use for aerich config. |  | ||||||
|                          [default: aerich] |  | ||||||
|       -h, --help         Show this message and exit. |  | ||||||
|  |  | ||||||
|     Commands: |  | ||||||
|       downgrade  Downgrade to previous version. |  | ||||||
|       heads      Show current available heads in migrate location. |  | ||||||
|       history    List all migrate items. |  | ||||||
|       init       Init config file and generate root migrate location. |  | ||||||
|       init-db    Generate schema and generate app migrate location. |  | ||||||
|       migrate    Generate migrate changes file. |  | ||||||
|       upgrade    Upgrade to latest version. |  | ||||||
|  |  | ||||||
| Usage |  | ||||||
| ===== |  | ||||||
| You need add ``aerich.models`` to your ``Tortoise-ORM`` config first, example: |  | ||||||
|  |  | ||||||
| .. code-block:: python |  | ||||||
|  |  | ||||||
|     TORTOISE_ORM = { |  | ||||||
|         "connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"}, |  | ||||||
|         "apps": { |  | ||||||
|             "models": { |  | ||||||
|                 "models": ["tests.models", "aerich.models"], |  | ||||||
|                 "default_connection": "default", |  | ||||||
|             }, |  | ||||||
|         }, |  | ||||||
|     } |  | ||||||
|  |  | ||||||
| Initialization |  | ||||||
| -------------- |  | ||||||
|  |  | ||||||
| .. code-block:: shell |  | ||||||
|  |  | ||||||
|     $ aerich init -h |  | ||||||
|  |  | ||||||
|     Usage: aerich init [OPTIONS] |  | ||||||
|  |  | ||||||
|       Init config file and generate root migrate location. |  | ||||||
|  |  | ||||||
|     Options: |  | ||||||
|       -t, --tortoise-orm TEXT  Tortoise-ORM config module dict variable, like settings.TORTOISE_ORM. |  | ||||||
|                                [required] |  | ||||||
|       --location TEXT          Migrate store location.  [default: ./migrations] |  | ||||||
|       -h, --help               Show this message and exit. |  | ||||||
|  |  | ||||||
| Init config file and location: |  | ||||||
|  |  | ||||||
| .. code-block:: shell |  | ||||||
|  |  | ||||||
|     $ aerich init -t tests.backends.mysql.TORTOISE_ORM |  | ||||||
|  |  | ||||||
|     Success create migrate location ./migrations |  | ||||||
|     Success generate config file aerich.ini |  | ||||||
|  |  | ||||||
| Init db |  | ||||||
| ------- |  | ||||||
|  |  | ||||||
| .. code-block:: shell |  | ||||||
|  |  | ||||||
|     $ aerich init-db |  | ||||||
|  |  | ||||||
|     Success create app migrate location ./migrations/models |  | ||||||
|     Success generate schema for app "models" |  | ||||||
|  |  | ||||||
| .. note:: |  | ||||||
|  |  | ||||||
|     If your Tortoise-ORM app is not default ``models``, you must specify ``--app`` like ``aerich --app other_models init-db``. |  | ||||||
|  |  | ||||||
| Update models and make migrate |  | ||||||
| ------------------------------ |  | ||||||
|  |  | ||||||
| .. code-block:: shell |  | ||||||
|  |  | ||||||
|     $ aerich migrate --name drop_column |  | ||||||
|  |  | ||||||
|     Success migrate 1_202029051520102929_drop_column.json |  | ||||||
|  |  | ||||||
| Format of migrate filename is ``{version_num}_{datetime}_{name|update}.json`` |  | ||||||
|  |  | ||||||
| Upgrade to latest version |  | ||||||
| ------------------------- |  | ||||||
|  |  | ||||||
| .. code-block:: shell |  | ||||||
|  |  | ||||||
|     $ aerich upgrade |  | ||||||
|  |  | ||||||
|     Success upgrade 1_202029051520102929_drop_column.json |  | ||||||
|  |  | ||||||
| Now your db is migrated to latest. |  | ||||||
|  |  | ||||||
| Downgrade to previous version |  | ||||||
| ----------------------------- |  | ||||||
|  |  | ||||||
| .. code-block:: shell |  | ||||||
|  |  | ||||||
|     $ aerich downgrade |  | ||||||
|  |  | ||||||
|     Success downgrade 1_202029051520102929_drop_column.json |  | ||||||
|  |  | ||||||
| Now your db rollback to previous version. |  | ||||||
|  |  | ||||||
| Show history |  | ||||||
| ------------ |  | ||||||
|  |  | ||||||
| .. code-block:: shell |  | ||||||
|  |  | ||||||
|     $ aerich history |  | ||||||
|  |  | ||||||
|     1_202029051520102929_drop_column.json |  | ||||||
|  |  | ||||||
| Show heads to be migrated |  | ||||||
| ------------------------- |  | ||||||
|  |  | ||||||
| .. code-block:: shell |  | ||||||
|  |  | ||||||
|     $ aerich heads |  | ||||||
|  |  | ||||||
|     1_202029051520102929_drop_column.json |  | ||||||
|  |  | ||||||
| Limitations |  | ||||||
| =========== |  | ||||||
| * Not support ``rename column`` now. |  | ||||||
| * ``Sqlite`` and ``Postgres`` may not work as expected because I don't use those in my work. |  | ||||||
|  |  | ||||||
| License |  | ||||||
| ======= |  | ||||||
| This project is licensed under the `MIT <https://github.com/long2ice/aerich/blob/master/LICENSE>`_ License. |  | ||||||
| @@ -1 +1 @@ | |||||||
| __version__ = "0.1.9" | __version__ = "0.2.4" | ||||||
|   | |||||||
| @@ -2,7 +2,6 @@ import json | |||||||
| import os | import os | ||||||
| import sys | import sys | ||||||
| from configparser import ConfigParser | from configparser import ConfigParser | ||||||
| from enum import Enum |  | ||||||
|  |  | ||||||
| import asyncclick as click | import asyncclick as click | ||||||
| from asyncclick import Context, UsageError | from asyncclick import Context, UsageError | ||||||
| @@ -15,20 +14,14 @@ from aerich.migrate import Migrate | |||||||
| from aerich.utils import get_app_connection, get_app_connection_name, get_tortoise_config | from aerich.utils import get_app_connection, get_app_connection_name, get_tortoise_config | ||||||
|  |  | ||||||
| from . import __version__ | from . import __version__ | ||||||
|  | from .enums import Color | ||||||
| from .models import Aerich | from .models import Aerich | ||||||
|  |  | ||||||
|  |  | ||||||
| class Color(str, Enum): |  | ||||||
|     green = "green" |  | ||||||
|     red = "red" |  | ||||||
|     yellow = "yellow" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| parser = ConfigParser() | parser = ConfigParser() | ||||||
|  |  | ||||||
|  |  | ||||||
| @click.group(context_settings={"help_option_names": ["-h", "--help"]}) | @click.group(context_settings={"help_option_names": ["-h", "--help"]}) | ||||||
| @click.version_option(__version__) | @click.version_option(__version__, "-V", "--version") | ||||||
| @click.option( | @click.option( | ||||||
|     "-c", "--config", default="aerich.ini", show_default=True, help="Config file.", |     "-c", "--config", default="aerich.ini", show_default=True, help="Config file.", | ||||||
| ) | ) | ||||||
| @@ -74,7 +67,6 @@ async def migrate(ctx: Context, name): | |||||||
|     config = ctx.obj["config"] |     config = ctx.obj["config"] | ||||||
|     location = ctx.obj["location"] |     location = ctx.obj["location"] | ||||||
|     app = ctx.obj["app"] |     app = ctx.obj["app"] | ||||||
|  |  | ||||||
|     ret = await Migrate.migrate(name) |     ret = await Migrate.migrate(name) | ||||||
|     if not ret: |     if not ret: | ||||||
|         return click.secho("No changes detected", fg=Color.yellow) |         return click.secho("No changes detected", fg=Color.yellow) | ||||||
| @@ -96,11 +88,11 @@ async def upgrade(ctx: Context): | |||||||
|         if not exists: |         if not exists: | ||||||
|             async with in_transaction(get_app_connection_name(config, app)) as conn: |             async with in_transaction(get_app_connection_name(config, app)) as conn: | ||||||
|                 file_path = os.path.join(Migrate.migrate_location, version) |                 file_path = os.path.join(Migrate.migrate_location, version) | ||||||
|                 with open(file_path, "r") as f: |                 with open(file_path, "r", encoding="utf-8") as f: | ||||||
|                     content = json.load(f) |                     content = json.load(f) | ||||||
|                     upgrade_query_list = content.get("upgrade") |                     upgrade_query_list = content.get("upgrade") | ||||||
|                     for upgrade_query in upgrade_query_list: |                     for upgrade_query in upgrade_query_list: | ||||||
|                         await conn.execute_query(upgrade_query) |                         await conn.execute_script(upgrade_query) | ||||||
|                 await Aerich.create(version=version, app=app) |                 await Aerich.create(version=version, app=app) | ||||||
|             click.secho(f"Success upgrade {version}", fg=Color.green) |             click.secho(f"Success upgrade {version}", fg=Color.green) | ||||||
|             migrated = True |             migrated = True | ||||||
| @@ -119,11 +111,11 @@ async def downgrade(ctx: Context): | |||||||
|     file = last_version.version |     file = last_version.version | ||||||
|     async with in_transaction(get_app_connection_name(config, app)) as conn: |     async with in_transaction(get_app_connection_name(config, app)) as conn: | ||||||
|         file_path = os.path.join(Migrate.migrate_location, file) |         file_path = os.path.join(Migrate.migrate_location, file) | ||||||
|         with open(file_path, "r") as f: |         with open(file_path, "r", encoding="utf-8") as f: | ||||||
|             content = json.load(f) |             content = json.load(f) | ||||||
|             downgrade_query_list = content.get("downgrade") |             downgrade_query_list = content.get("downgrade") | ||||||
|             if not downgrade_query_list: |             if not downgrade_query_list: | ||||||
|                 return click.secho(f"No downgrade item dound", fg=Color.yellow) |                 return click.secho("No downgrade item found", fg=Color.yellow) | ||||||
|             for downgrade_query in downgrade_query_list: |             for downgrade_query in downgrade_query_list: | ||||||
|                 await conn.execute_query(downgrade_query) |                 await conn.execute_query(downgrade_query) | ||||||
|             await last_version.delete() |             await last_version.delete() | ||||||
| @@ -146,7 +138,7 @@ async def heads(ctx: Context): | |||||||
|  |  | ||||||
| @cli.command(help="List all migrate items.") | @cli.command(help="List all migrate items.") | ||||||
| @click.pass_context | @click.pass_context | ||||||
| def history(ctx): | async def history(ctx: Context): | ||||||
|     versions = Migrate.get_all_version_files() |     versions = Migrate.get_all_version_files() | ||||||
|     for version in versions: |     for version in versions: | ||||||
|         click.secho(version, fg=Color.green) |         click.secho(version, fg=Color.green) | ||||||
| @@ -177,7 +169,7 @@ async def init( | |||||||
|     parser.set(name, "tortoise_orm", tortoise_orm) |     parser.set(name, "tortoise_orm", tortoise_orm) | ||||||
|     parser.set(name, "location", location) |     parser.set(name, "location", location) | ||||||
|  |  | ||||||
|     with open(config_file, "w") as f: |     with open(config_file, "w", encoding="utf-8") as f: | ||||||
|         parser.write(f) |         parser.write(f) | ||||||
|  |  | ||||||
|     if not os.path.isdir(location): |     if not os.path.isdir(location): | ||||||
| @@ -190,7 +182,7 @@ async def init( | |||||||
| @cli.command(help="Generate schema and generate app migrate location.") | @cli.command(help="Generate schema and generate app migrate location.") | ||||||
| @click.option( | @click.option( | ||||||
|     "--safe", |     "--safe", | ||||||
|     is_flag=True, |     type=bool, | ||||||
|     default=True, |     default=True, | ||||||
|     help="When set to true, creates the table only when it does not already exist.", |     help="When set to true, creates the table only when it does not already exist.", | ||||||
|     show_default=True, |     show_default=True, | ||||||
| @@ -218,7 +210,7 @@ async def init_db(ctx: Context, safe): | |||||||
|  |  | ||||||
|     version = await Migrate.generate_version() |     version = await Migrate.generate_version() | ||||||
|     await Aerich.create(version=version, app=app) |     await Aerich.create(version=version, app=app) | ||||||
|     with open(os.path.join(dirname, version), "w") as f: |     with open(os.path.join(dirname, version), "w", encoding="utf-8") as f: | ||||||
|         content = { |         content = { | ||||||
|             "upgrade": [schema], |             "upgrade": [schema], | ||||||
|         } |         } | ||||||
|   | |||||||
| @@ -11,6 +11,9 @@ class BaseDDL: | |||||||
|     _DROP_TABLE_TEMPLATE = 'DROP TABLE IF EXISTS "{table_name}"' |     _DROP_TABLE_TEMPLATE = 'DROP TABLE IF EXISTS "{table_name}"' | ||||||
|     _ADD_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ADD {column}' |     _ADD_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ADD {column}' | ||||||
|     _DROP_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" DROP COLUMN "{column_name}"' |     _DROP_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" DROP COLUMN "{column_name}"' | ||||||
|  |     _RENAME_COLUMN_TEMPLATE = ( | ||||||
|  |         'ALTER TABLE "{table_name}" RENAME COLUMN "{old_column_name}" TO "{new_column_name}"' | ||||||
|  |     ) | ||||||
|     _ADD_INDEX_TEMPLATE = ( |     _ADD_INDEX_TEMPLATE = ( | ||||||
|         'ALTER TABLE "{table_name}" ADD {unique} INDEX "{index_name}" ({column_names})' |         'ALTER TABLE "{table_name}" ADD {unique} INDEX "{index_name}" ({column_names})' | ||||||
|     ) |     ) | ||||||
| @@ -125,6 +128,13 @@ class BaseDDL: | |||||||
|             ), |             ), | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |     def rename_column(self, model: "Type[Model]", old_column_name: str, new_column_name: str): | ||||||
|  |         return self._RENAME_COLUMN_TEMPLATE.format( | ||||||
|  |             table_name=model._meta.db_table, | ||||||
|  |             old_column_name=old_column_name, | ||||||
|  |             new_column_name=new_column_name, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     def add_index(self, model: "Type[Model]", field_names: List[str], unique=False): |     def add_index(self, model: "Type[Model]", field_names: List[str], unique=False): | ||||||
|         return self._ADD_INDEX_TEMPLATE.format( |         return self._ADD_INDEX_TEMPLATE.format( | ||||||
|             unique="UNIQUE" if unique else "", |             unique="UNIQUE" if unique else "", | ||||||
| @@ -179,3 +189,12 @@ class BaseDDL: | |||||||
|                 to_field=to_field_name, |                 to_field=to_field_name, | ||||||
|             ), |             ), | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |     def alter_column_default(self, model: "Type[Model]", field_object: Field): | ||||||
|  |         pass | ||||||
|  |  | ||||||
|  |     def alter_column_null(self, model: "Type[Model]", field_object: Field): | ||||||
|  |         pass | ||||||
|  |  | ||||||
|  |     def set_comment(self, model: "Type[Model]", field_object: Field): | ||||||
|  |         pass | ||||||
|   | |||||||
| @@ -9,6 +9,9 @@ class MysqlDDL(BaseDDL): | |||||||
|     _DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS `{table_name}`" |     _DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS `{table_name}`" | ||||||
|     _ADD_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` ADD {column}" |     _ADD_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` ADD {column}" | ||||||
|     _DROP_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` DROP COLUMN `{column_name}`" |     _DROP_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` DROP COLUMN `{column_name}`" | ||||||
|  |     _RENAME_COLUMN_TEMPLATE = ( | ||||||
|  |         "ALTER TABLE `{table_name}` RENAME COLUMN `{old_column_name}` TO `{new_column_name}`" | ||||||
|  |     ) | ||||||
|     _ADD_INDEX_TEMPLATE = ( |     _ADD_INDEX_TEMPLATE = ( | ||||||
|         "ALTER TABLE `{table_name}` ADD {unique} INDEX `{index_name}` ({column_names})" |         "ALTER TABLE `{table_name}` ADD {unique} INDEX `{index_name}` ({column_names})" | ||||||
|     ) |     ) | ||||||
|   | |||||||
| @@ -1,4 +1,8 @@ | |||||||
|  | from typing import List, Type | ||||||
|  |  | ||||||
|  | from tortoise import Model | ||||||
| from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator | from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator | ||||||
|  | from tortoise.fields import Field | ||||||
|  |  | ||||||
| from aerich.ddl import BaseDDL | from aerich.ddl import BaseDDL | ||||||
|  |  | ||||||
| @@ -6,3 +10,66 @@ from aerich.ddl import BaseDDL | |||||||
| class PostgresDDL(BaseDDL): | class PostgresDDL(BaseDDL): | ||||||
|     schema_generator_cls = AsyncpgSchemaGenerator |     schema_generator_cls = AsyncpgSchemaGenerator | ||||||
|     DIALECT = AsyncpgSchemaGenerator.DIALECT |     DIALECT = AsyncpgSchemaGenerator.DIALECT | ||||||
|  |     _ADD_INDEX_TEMPLATE = 'CREATE INDEX "{index_name}" ON "{table_name}" ({column_names})' | ||||||
|  |     _ADD_UNIQUE_TEMPLATE = ( | ||||||
|  |         'ALTER TABLE "{table_name}" ADD CONSTRAINT "{index_name}" UNIQUE ({column_names})' | ||||||
|  |     ) | ||||||
|  |     _DROP_INDEX_TEMPLATE = 'DROP INDEX "{index_name}"' | ||||||
|  |     _DROP_UNIQUE_TEMPLATE = 'ALTER TABLE "{table_name}" DROP CONSTRAINT "{index_name}"' | ||||||
|  |     _ALTER_DEFAULT_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {default}' | ||||||
|  |     _ALTER_NULL_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {set_drop} NOT NULL' | ||||||
|  |     _MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" TYPE {datatype}' | ||||||
|  |     _SET_COMMENT_TEMPLATE = 'COMMENT ON COLUMN "{table_name}"."{column}" IS {comment}' | ||||||
|  |     _DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP CONSTRAINT "{fk_name}"' | ||||||
|  |  | ||||||
|  |     def alter_column_default(self, model: "Type[Model]", field_object: Field): | ||||||
|  |         db_table = model._meta.db_table | ||||||
|  |         default = self._get_default(model, field_object) | ||||||
|  |         return self._ALTER_DEFAULT_TEMPLATE.format( | ||||||
|  |             table_name=db_table, | ||||||
|  |             column=field_object.model_field_name, | ||||||
|  |             default="SET" + default if default else "DROP DEFAULT", | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def alter_column_null(self, model: "Type[Model]", field_object: Field): | ||||||
|  |         db_table = model._meta.db_table | ||||||
|  |         return self._ALTER_NULL_TEMPLATE.format( | ||||||
|  |             table_name=db_table, | ||||||
|  |             column=field_object.model_field_name, | ||||||
|  |             set_drop="DROP" if field_object.null else "SET", | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def modify_column(self, model: "Type[Model]", field_object: Field): | ||||||
|  |         db_table = model._meta.db_table | ||||||
|  |         return self._MODIFY_COLUMN_TEMPLATE.format( | ||||||
|  |             table_name=db_table, | ||||||
|  |             column=field_object.model_field_name, | ||||||
|  |             datatype=field_object.get_for_dialect(self.DIALECT, "SQL_TYPE"), | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def add_index(self, model: "Type[Model]", field_names: List[str], unique=False): | ||||||
|  |         template = self._ADD_UNIQUE_TEMPLATE if unique else self._ADD_INDEX_TEMPLATE | ||||||
|  |         return template.format( | ||||||
|  |             index_name=self.schema_generator._generate_index_name( | ||||||
|  |                 "uid" if unique else "idx", model, field_names | ||||||
|  |             ), | ||||||
|  |             table_name=model._meta.db_table, | ||||||
|  |             column_names=", ".join([self.schema_generator.quote(f) for f in field_names]), | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def drop_index(self, model: "Type[Model]", field_names: List[str], unique=False): | ||||||
|  |         template = self._DROP_UNIQUE_TEMPLATE if unique else self._DROP_INDEX_TEMPLATE | ||||||
|  |         return template.format( | ||||||
|  |             index_name=self.schema_generator._generate_index_name( | ||||||
|  |                 "uid" if unique else "idx", model, field_names | ||||||
|  |             ), | ||||||
|  |             table_name=model._meta.db_table, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def set_comment(self, model: "Type[Model]", field_object: Field): | ||||||
|  |         db_table = model._meta.db_table | ||||||
|  |         return self._SET_COMMENT_TEMPLATE.format( | ||||||
|  |             table_name=db_table, | ||||||
|  |             column=field_object.model_field_name, | ||||||
|  |             comment="'{}'".format(field_object.description) if field_object.description else "NULL", | ||||||
|  |         ) | ||||||
|   | |||||||
| @@ -1,8 +1,19 @@ | |||||||
|  | from typing import Type | ||||||
|  |  | ||||||
|  | from tortoise import Model | ||||||
| from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator | from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator | ||||||
|  | from tortoise.fields import Field | ||||||
|  |  | ||||||
| from aerich.ddl import BaseDDL | from aerich.ddl import BaseDDL | ||||||
|  | from aerich.exceptions import NotSupportError | ||||||
|  |  | ||||||
|  |  | ||||||
| class SqliteDDL(BaseDDL): | class SqliteDDL(BaseDDL): | ||||||
|     schema_generator_cls = SqliteSchemaGenerator |     schema_generator_cls = SqliteSchemaGenerator | ||||||
|     DIALECT = SqliteSchemaGenerator.DIALECT |     DIALECT = SqliteSchemaGenerator.DIALECT | ||||||
|  |  | ||||||
|  |     def drop_column(self, model: "Type[Model]", column_name: str): | ||||||
|  |         raise NotSupportError("Drop column is not support in SQLite.") | ||||||
|  |  | ||||||
|  |     def modify_column(self, model: "Type[Model]", field_object: Field): | ||||||
|  |         raise NotSupportError("Modify column is not support in SQLite.") | ||||||
|   | |||||||
							
								
								
									
										7
									
								
								aerich/enums.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								aerich/enums.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | |||||||
|  | from enum import Enum | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class Color(str, Enum): | ||||||
|  |     green = "green" | ||||||
|  |     red = "red" | ||||||
|  |     yellow = "yellow" | ||||||
| @@ -1,6 +1,4 @@ | |||||||
| class ConfigurationError(Exception): | class NotSupportError(Exception): | ||||||
|     """ |     """ | ||||||
|     config error |     raise when features not support | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     pass |  | ||||||
|   | |||||||
| @@ -1,10 +1,11 @@ | |||||||
| import json | import json | ||||||
| import os | import os | ||||||
| import re | import re | ||||||
| from copy import deepcopy |  | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
|  | from importlib import import_module | ||||||
| from typing import Dict, List, Tuple, Type | from typing import Dict, List, Tuple, Type | ||||||
|  |  | ||||||
|  | import click | ||||||
| from tortoise import ( | from tortoise import ( | ||||||
|     BackwardFKRelation, |     BackwardFKRelation, | ||||||
|     BackwardOneToOneRelation, |     BackwardOneToOneRelation, | ||||||
| @@ -16,7 +17,7 @@ from tortoise import ( | |||||||
| from tortoise.fields import Field | from tortoise.fields import Field | ||||||
|  |  | ||||||
| from aerich.ddl import BaseDDL | from aerich.ddl import BaseDDL | ||||||
| from aerich.models import Aerich | from aerich.models import MAX_VERSION_LENGTH, Aerich | ||||||
| from aerich.utils import get_app_connection | from aerich.utils import get_app_connection | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -28,6 +29,8 @@ class Migrate: | |||||||
|     _upgrade_m2m: List[str] = [] |     _upgrade_m2m: List[str] = [] | ||||||
|     _downgrade_m2m: List[str] = [] |     _downgrade_m2m: List[str] = [] | ||||||
|     _aerich = Aerich.__name__ |     _aerich = Aerich.__name__ | ||||||
|  |     _rename_old = [] | ||||||
|  |     _rename_new = [] | ||||||
|  |  | ||||||
|     ddl: BaseDDL |     ddl: BaseDDL | ||||||
|     migrate_config: dict |     migrate_config: dict | ||||||
| @@ -35,6 +38,7 @@ class Migrate: | |||||||
|     diff_app = "diff_models" |     diff_app = "diff_models" | ||||||
|     app: str |     app: str | ||||||
|     migrate_location: str |     migrate_location: str | ||||||
|  |     dialect: str | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def get_old_model_file(cls): |     def get_old_model_file(cls): | ||||||
| @@ -42,7 +46,10 @@ class Migrate: | |||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def get_all_version_files(cls) -> List[str]: |     def get_all_version_files(cls) -> List[str]: | ||||||
|         return sorted(filter(lambda x: x.endswith("json"), os.listdir(cls.migrate_location))) |         return sorted( | ||||||
|  |             filter(lambda x: x.endswith("json"), os.listdir(cls.migrate_location)), | ||||||
|  |             key=lambda x: int(x.split("_")[0]), | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     async def get_last_version(cls) -> Aerich: |     async def get_last_version(cls) -> Aerich: | ||||||
| @@ -59,20 +66,19 @@ class Migrate: | |||||||
|         await Tortoise.init(config=migrate_config) |         await Tortoise.init(config=migrate_config) | ||||||
|  |  | ||||||
|         connection = get_app_connection(config, app) |         connection = get_app_connection(config, app) | ||||||
|         if connection.schema_generator.DIALECT == "mysql": |         cls.dialect = connection.schema_generator.DIALECT | ||||||
|  |         if cls.dialect == "mysql": | ||||||
|             from aerich.ddl.mysql import MysqlDDL |             from aerich.ddl.mysql import MysqlDDL | ||||||
|  |  | ||||||
|             cls.ddl = MysqlDDL(connection) |             cls.ddl = MysqlDDL(connection) | ||||||
|         elif connection.schema_generator.DIALECT == "sqlite": |         elif cls.dialect == "sqlite": | ||||||
|             from aerich.ddl.sqlite import SqliteDDL |             from aerich.ddl.sqlite import SqliteDDL | ||||||
|  |  | ||||||
|             cls.ddl = SqliteDDL(connection) |             cls.ddl = SqliteDDL(connection) | ||||||
|         elif connection.schema_generator.DIALECT == "postgres": |         elif cls.dialect == "postgres": | ||||||
|             from aerich.ddl.postgres import PostgresDDL |             from aerich.ddl.postgres import PostgresDDL | ||||||
|  |  | ||||||
|             cls.ddl = PostgresDDL(connection) |             cls.ddl = PostgresDDL(connection) | ||||||
|         else: |  | ||||||
|             raise NotImplementedError("Current only support MySQL") |  | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     async def _get_last_version_num(cls): |     async def _get_last_version_num(cls): | ||||||
| @@ -80,15 +86,18 @@ class Migrate: | |||||||
|         if not last_version: |         if not last_version: | ||||||
|             return None |             return None | ||||||
|         version = last_version.version |         version = last_version.version | ||||||
|         return int(version.split("_")[0]) |         return int(version.split("_", 1)[0]) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     async def generate_version(cls, name=None): |     async def generate_version(cls, name=None): | ||||||
|         now = datetime.now().strftime("%Y%M%D%H%M%S").replace("/", "") |         now = datetime.now().strftime("%Y%m%d%H%M%S").replace("/", "") | ||||||
|         last_version_num = await cls._get_last_version_num() |         last_version_num = await cls._get_last_version_num() | ||||||
|         if last_version_num is None: |         if last_version_num is None: | ||||||
|             return f"0_{now}_init.json" |             return f"0_{now}_init.json" | ||||||
|         return f"{last_version_num + 1}_{now}_{name}.json" |         version = f"{last_version_num + 1}_{now}_{name}.json" | ||||||
|  |         if len(version) > MAX_VERSION_LENGTH: | ||||||
|  |             raise ValueError(f"Version name exceeds maximum length ({MAX_VERSION_LENGTH})") | ||||||
|  |         return version | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     async def _generate_diff_sql(cls, name): |     async def _generate_diff_sql(cls, name): | ||||||
| @@ -97,7 +106,7 @@ class Migrate: | |||||||
|             "upgrade": cls.upgrade_operators, |             "upgrade": cls.upgrade_operators, | ||||||
|             "downgrade": cls.downgrade_operators, |             "downgrade": cls.downgrade_operators, | ||||||
|         } |         } | ||||||
|         with open(os.path.join(cls.migrate_location, version), "w") as f: |         with open(os.path.join(cls.migrate_location, version), "w", encoding="utf-8") as f: | ||||||
|             json.dump(content, f, indent=2, ensure_ascii=False) |             json.dump(content, f, indent=2, ensure_ascii=False) | ||||||
|         return version |         return version | ||||||
|  |  | ||||||
| @@ -155,12 +164,12 @@ class Migrate: | |||||||
|         """ |         """ | ||||||
|         pattern = rf"(\n)?('|\")({app})(.\w+)('|\")" |         pattern = rf"(\n)?('|\")({app})(.\w+)('|\")" | ||||||
|         for i, model_file in enumerate(model_files): |         for i, model_file in enumerate(model_files): | ||||||
|             with open(model_file, "r") as f: |             with open(model_file, "r", encoding="utf-8") as f: | ||||||
|                 content = f.read() |                 content = f.read() | ||||||
|             ret = re.sub(pattern, rf"\2{cls.diff_app}\4\5", content) |             ret = re.sub(pattern, rf"\2{cls.diff_app}\4\5", content) | ||||||
|             mode = "w" if i == 0 else "a" |             mode = "w" if i == 0 else "a" | ||||||
|             with open(old_model_file, mode) as f: |             with open(old_model_file, mode, encoding="utf-8") as f: | ||||||
|                 f.write(ret) |                 f.write(f"{ret}\n") | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _get_migrate_config(cls, config: dict, app: str, location: str): |     def _get_migrate_config(cls, config: dict, app: str, location: str): | ||||||
| @@ -171,14 +180,13 @@ class Migrate: | |||||||
|         :param location: |         :param location: | ||||||
|         :return: |         :return: | ||||||
|         """ |         """ | ||||||
|         temp_config = deepcopy(config) |  | ||||||
|         path = os.path.join(location, app, cls.old_models) |         path = os.path.join(location, app, cls.old_models) | ||||||
|         path = path.replace("/", ".").lstrip(".") |         path = path.replace(os.sep, ".").lstrip(".") | ||||||
|         temp_config["apps"][cls.diff_app] = { |         config["apps"][cls.diff_app] = { | ||||||
|             "models": [path], |             "models": [path], | ||||||
|             "default_connection": config.get("apps").get(app).get("default_connection", "default"), |             "default_connection": config.get("apps").get(app).get("default_connection", "default"), | ||||||
|         } |         } | ||||||
|         return temp_config |         return config | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def write_old_models(cls, config: dict, app: str, location: str): |     def write_old_models(cls, config: dict, app: str, location: str): | ||||||
| @@ -194,8 +202,7 @@ class Migrate: | |||||||
|         old_model_files = [] |         old_model_files = [] | ||||||
|         models = config.get("apps").get(app).get("models") |         models = config.get("apps").get(app).get("models") | ||||||
|         for model in models: |         for model in models: | ||||||
|             if model != "aerich.models": |             old_model_files.append(import_module(model).__file__) | ||||||
|                 old_model_files.append(model.replace(".", "/") + ".py") |  | ||||||
|  |  | ||||||
|         cls.cp_models(app, old_model_files, os.path.join(location, app, cls.get_old_model_file())) |         cls.cp_models(app, old_model_files, os.path.join(location, app, cls.get_old_model_file())) | ||||||
|  |  | ||||||
| @@ -260,6 +267,32 @@ class Migrate: | |||||||
|             if cls._exclude_field(new_field, upgrade): |             if cls._exclude_field(new_field, upgrade): | ||||||
|                 continue |                 continue | ||||||
|             if new_key not in old_keys: |             if new_key not in old_keys: | ||||||
|  |                 new_field_dict = new_field.describe(serializable=True) | ||||||
|  |                 new_field_dict.pop("name") | ||||||
|  |                 new_field_dict.pop("db_column") | ||||||
|  |                 for diff_key in old_keys - new_keys: | ||||||
|  |                     old_field = old_fields_map.get(diff_key) | ||||||
|  |                     old_field_dict = old_field.describe(serializable=True) | ||||||
|  |                     old_field_dict.pop("name") | ||||||
|  |                     old_field_dict.pop("db_column") | ||||||
|  |                     if old_field_dict == new_field_dict: | ||||||
|  |                         if upgrade: | ||||||
|  |                             is_rename = click.prompt( | ||||||
|  |                                 f"Rename {diff_key} to {new_key}", | ||||||
|  |                                 default=True, | ||||||
|  |                                 type=bool, | ||||||
|  |                                 show_choices=True, | ||||||
|  |                             ) | ||||||
|  |                             cls._rename_new.append(new_key) | ||||||
|  |                             cls._rename_old.append(diff_key) | ||||||
|  |                         else: | ||||||
|  |                             is_rename = diff_key in cls._rename_new | ||||||
|  |                         if is_rename: | ||||||
|  |                             cls._add_operator( | ||||||
|  |                                 cls._rename_field(new_model, old_field, new_field), upgrade, | ||||||
|  |                             ) | ||||||
|  |                             break | ||||||
|  |                 else: | ||||||
|                     cls._add_operator( |                     cls._add_operator( | ||||||
|                         cls._add_field(new_model, new_field), |                         cls._add_field(new_model, new_field), | ||||||
|                         upgrade, |                         upgrade, | ||||||
| @@ -274,6 +307,24 @@ class Migrate: | |||||||
|                 old_field_dict.pop("unique") |                 old_field_dict.pop("unique") | ||||||
|                 old_field_dict.pop("indexed") |                 old_field_dict.pop("indexed") | ||||||
|                 if not cls._is_fk_m2m(new_field) and new_field_dict != old_field_dict: |                 if not cls._is_fk_m2m(new_field) and new_field_dict != old_field_dict: | ||||||
|  |                     if cls.dialect == "postgres": | ||||||
|  |                         if new_field.null != old_field.null: | ||||||
|  |                             cls._add_operator( | ||||||
|  |                                 cls._alter_null(new_model, new_field), upgrade=upgrade | ||||||
|  |                             ) | ||||||
|  |                         if new_field.default != old_field.default: | ||||||
|  |                             cls._add_operator( | ||||||
|  |                                 cls._alter_default(new_model, new_field), upgrade=upgrade | ||||||
|  |                             ) | ||||||
|  |                         if new_field.description != old_field.description: | ||||||
|  |                             cls._add_operator( | ||||||
|  |                                 cls._set_comment(new_model, new_field), upgrade=upgrade | ||||||
|  |                             ) | ||||||
|  |                         if new_field.field_type != old_field.field_type: | ||||||
|  |                             cls._add_operator( | ||||||
|  |                                 cls._modify_field(new_model, new_field), upgrade=upgrade | ||||||
|  |                             ) | ||||||
|  |                     else: | ||||||
|                         cls._add_operator(cls._modify_field(new_model, new_field), upgrade=upgrade) |                         cls._add_operator(cls._modify_field(new_model, new_field), upgrade=upgrade) | ||||||
|                 if (old_field.index and not new_field.index) or ( |                 if (old_field.index and not new_field.index) or ( | ||||||
|                     old_field.unique and not new_field.unique |                     old_field.unique and not new_field.unique | ||||||
| @@ -297,6 +348,9 @@ class Migrate: | |||||||
|         for old_key in old_keys: |         for old_key in old_keys: | ||||||
|             field = old_fields_map.get(old_key) |             field = old_fields_map.get(old_key) | ||||||
|             if old_key not in new_keys and not cls._exclude_field(field, upgrade): |             if old_key not in new_keys and not cls._exclude_field(field, upgrade): | ||||||
|  |                 if (upgrade and old_key not in cls._rename_old) or ( | ||||||
|  |                     not upgrade and old_key not in cls._rename_new | ||||||
|  |                 ): | ||||||
|                     cls._add_operator( |                     cls._add_operator( | ||||||
|                         cls._remove_field(old_model, field), upgrade, cls._is_fk_m2m(field), |                         cls._remove_field(old_model, field), upgrade, cls._is_fk_m2m(field), | ||||||
|                     ) |                     ) | ||||||
| @@ -367,6 +421,18 @@ class Migrate: | |||||||
|             return cls.ddl.create_m2m_table(model, field) |             return cls.ddl.create_m2m_table(model, field) | ||||||
|         return cls.ddl.add_column(model, field) |         return cls.ddl.add_column(model, field) | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def _alter_default(cls, model: Type[Model], field: Field): | ||||||
|  |         return cls.ddl.alter_column_default(model, field) | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def _alter_null(cls, model: Type[Model], field: Field): | ||||||
|  |         return cls.ddl.alter_column_null(model, field) | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def _set_comment(cls, model: Type[Model], field: Field): | ||||||
|  |         return cls.ddl.set_comment(model, field) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _modify_field(cls, model: Type[Model], field: Field): |     def _modify_field(cls, model: Type[Model], field: Field): | ||||||
|         return cls.ddl.modify_column(model, field) |         return cls.ddl.modify_column(model, field) | ||||||
| @@ -379,6 +445,10 @@ class Migrate: | |||||||
|             return cls.ddl.drop_m2m(field) |             return cls.ddl.drop_m2m(field) | ||||||
|         return cls.ddl.drop_column(model, field.model_field_name) |         return cls.ddl.drop_column(model, field.model_field_name) | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def _rename_field(cls, model: Type[Model], old_field: Field, new_field: Field): | ||||||
|  |         return cls.ddl.rename_column(model, old_field.model_field_name, new_field.model_field_name) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _add_fk(cls, model: Type[Model], field: ForeignKeyFieldInstance): |     def _add_fk(cls, model: Type[Model], field: ForeignKeyFieldInstance): | ||||||
|         """ |         """ | ||||||
|   | |||||||
| @@ -1,8 +1,10 @@ | |||||||
| from tortoise import Model, fields | from tortoise import Model, fields | ||||||
|  |  | ||||||
|  | MAX_VERSION_LENGTH = 255 | ||||||
|  |  | ||||||
|  |  | ||||||
| class Aerich(Model): | class Aerich(Model): | ||||||
|     version = fields.CharField(max_length=50) |     version = fields.CharField(max_length=MAX_VERSION_LENGTH) | ||||||
|     app = fields.CharField(max_length=20) |     app = fields.CharField(max_length=20) | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|   | |||||||
| @@ -11,7 +11,7 @@ def get_app_connection_name(config, app) -> str: | |||||||
|     :param app: |     :param app: | ||||||
|     :return: |     :return: | ||||||
|     """ |     """ | ||||||
|     return config.get("apps").get(app).get("default_connection") |     return config.get("apps").get(app).get("default_connection", "default") | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_app_connection(config, app) -> BaseDBAsyncClient: | def get_app_connection(config, app) -> BaseDBAsyncClient: | ||||||
|   | |||||||
							
								
								
									
										27
									
								
								conftest.py
									
									
									
									
									
								
							
							
						
						
									
										27
									
								
								conftest.py
									
									
									
									
									
								
							| @@ -16,7 +16,7 @@ db_url = os.getenv("TEST_DB", "sqlite://:memory:") | |||||||
| tortoise_orm = { | tortoise_orm = { | ||||||
|     "connections": {"default": expand_db_url(db_url, True)}, |     "connections": {"default": expand_db_url(db_url, True)}, | ||||||
|     "apps": { |     "apps": { | ||||||
|         "models": {"models": ["tests.models", "aerich.models"], "default_connection": "default",}, |         "models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"}, | ||||||
|     }, |     }, | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -31,24 +31,29 @@ def reset_migrate(): | |||||||
|     Migrate._downgrade_m2m = [] |     Migrate._downgrade_m2m = [] | ||||||
|  |  | ||||||
|  |  | ||||||
| @pytest.fixture(scope="session") | @pytest.yield_fixture(scope="session") | ||||||
| def loop(): | def event_loop(): | ||||||
|     loop = asyncio.get_event_loop() |     policy = asyncio.get_event_loop_policy() | ||||||
|     return loop |     res = policy.new_event_loop() | ||||||
|  |     asyncio.set_event_loop(res) | ||||||
|  |     res._close = res.close | ||||||
|  |     res.close = lambda: None | ||||||
|  |  | ||||||
|  |     yield res | ||||||
|  |  | ||||||
|  |     res._close() | ||||||
|  |  | ||||||
|  |  | ||||||
| @pytest.fixture(scope="session", autouse=True) | @pytest.fixture(scope="session", autouse=True) | ||||||
| def initialize_tests(loop, request): | async def initialize_tests(event_loop, request): | ||||||
|     tortoise_orm["connections"]["diff_models"] = "sqlite://:memory:" |     tortoise_orm["connections"]["diff_models"] = "sqlite://:memory:" | ||||||
|     tortoise_orm["apps"]["diff_models"] = { |     tortoise_orm["apps"]["diff_models"] = { | ||||||
|         "models": ["tests.diff_models"], |         "models": ["tests.diff_models"], | ||||||
|         "default_connection": "diff_models", |         "default_connection": "diff_models", | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     loop.run_until_complete(Tortoise.init(config=tortoise_orm, _create_db=True)) |     await Tortoise.init(config=tortoise_orm, _create_db=True) | ||||||
|     loop.run_until_complete( |     await generate_schema_for_client(Tortoise.get_connection("default"), safe=True) | ||||||
|         generate_schema_for_client(Tortoise.get_connection("default"), safe=True) |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     client = Tortoise.get_connection("default") |     client = Tortoise.get_connection("default") | ||||||
|     if client.schema_generator is MySQLSchemaGenerator: |     if client.schema_generator is MySQLSchemaGenerator: | ||||||
| @@ -58,4 +63,4 @@ def initialize_tests(loop, request): | |||||||
|     elif client.schema_generator is AsyncpgSchemaGenerator: |     elif client.schema_generator is AsyncpgSchemaGenerator: | ||||||
|         Migrate.ddl = PostgresDDL(client) |         Migrate.ddl = PostgresDDL(client) | ||||||
|  |  | ||||||
|     request.addfinalizer(lambda: loop.run_until_complete(Tortoise._drop_databases())) |     request.addfinalizer(lambda: event_loop.run_until_complete(Tortoise._drop_databases())) | ||||||
|   | |||||||
							
								
								
									
										
											BIN
										
									
								
								images/alipay.jpeg
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								images/alipay.jpeg
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 75 KiB | 
							
								
								
									
										
											BIN
										
									
								
								images/wechatpay.jpeg
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								images/wechatpay.jpeg
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 76 KiB | 
							
								
								
									
										712
									
								
								poetry.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										712
									
								
								poetry.lock
									
									
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,32 +1,43 @@ | |||||||
| [tool.poetry] | [tool.poetry] | ||||||
| name = "aerich" | name = "aerich" | ||||||
| version = "0.1.9" | version = "0.2.4" | ||||||
| description = "A database migrations tool for Tortoise ORM." | description = "A database migrations tool for Tortoise ORM." | ||||||
| authors = ["long2ice <long2ice@gmail.com>"] | authors = ["long2ice <long2ice@gmail.com>"] | ||||||
|  | license = "Apache-2.0" | ||||||
|  | readme = "README.md" | ||||||
|  | homepage = "https://github.com/long2ice/aerich" | ||||||
|  | repository = "https://github.com/long2ice/aerich.git" | ||||||
|  | documentation = "https://github.com/long2ice/aerich" | ||||||
|  | keywords = ["migrate", "Tortoise-ORM", "mysql"] | ||||||
|  | packages = [ | ||||||
|  |     { include = "aerich" } | ||||||
|  | ] | ||||||
|  | include = ["CHANGELOG.md", "LICENSE", "README.md"] | ||||||
|  |  | ||||||
| [tool.poetry.dependencies] | [tool.poetry.dependencies] | ||||||
| python = "^3.8" | python = "^3.7" | ||||||
| tortoise-orm = "*" | tortoise-orm = "*" | ||||||
| asyncclick = "*" | asyncclick = "*" | ||||||
| pydantic = "*" | pydantic = "*" | ||||||
|  | aiomysql = {version = "*", optional = true} | ||||||
|  | asyncpg = {version = "*", optional = true} | ||||||
|  |  | ||||||
| [tool.poetry.dev-dependencies] | [tool.poetry.dev-dependencies] | ||||||
| taskipy = "*" |  | ||||||
| flake8 = "*" | flake8 = "*" | ||||||
| isort = "*" | isort = "*" | ||||||
| black = "^19.10b0" | black = "^19.10b0" | ||||||
| pytest = "*" | pytest = "*" | ||||||
| aiomysql = "*" |  | ||||||
| asyncpg = "*" |  | ||||||
| pytest-xdist = "*" | pytest-xdist = "*" | ||||||
| mypy = "*" |  | ||||||
| pytest-asyncio = "*" | pytest-asyncio = "*" | ||||||
|  | bandit = "*" | ||||||
|  | pytest-mock = "*" | ||||||
|  |  | ||||||
| [tool.taskipy.tasks] | [tool.poetry.extras] | ||||||
| export = "poetry export -f requirements.txt --without-hashes > requirements.txt" | dbdrivers = ["aiomysql", "asyncpg"] | ||||||
| export-dev = "poetry export -f requirements.txt --dev --without-hashes > requirements-dev.txt" |  | ||||||
|  |  | ||||||
| [build-system] | [build-system] | ||||||
| requires = ["poetry>=0.12"] | requires = ["poetry>=0.12"] | ||||||
| build-backend = "poetry.masonry.api" | build-backend = "poetry.masonry.api" | ||||||
|  |  | ||||||
|  | [tool.poetry.scripts] | ||||||
|  | aerich = "aerich.cli:main" | ||||||
|   | |||||||
| @@ -1,2 +0,0 @@ | |||||||
| [pytest] |  | ||||||
| addopts = -p no:warnings --ignore=src |  | ||||||
| @@ -1,48 +0,0 @@ | |||||||
| aiomysql==0.0.20 |  | ||||||
| aiosqlite==0.13.0 |  | ||||||
| anyio==1.3.0 |  | ||||||
| apipkg==1.5 |  | ||||||
| appdirs==1.4.4 |  | ||||||
| async-generator==1.10 |  | ||||||
| asyncclick==7.0.9 |  | ||||||
| asyncpg==0.20.1 |  | ||||||
| atomicwrites==1.4.0; sys_platform == "win32" |  | ||||||
| attrs==19.3.0 |  | ||||||
| black==19.10b0 |  | ||||||
| cffi==1.14.0 |  | ||||||
| ciso8601==2.1.3; sys_platform != "win32" and implementation_name == "cpython" |  | ||||||
| click==7.1.2 |  | ||||||
| colorama==0.4.3; sys_platform == "win32" |  | ||||||
| cryptography==2.9.2 |  | ||||||
| execnet==1.7.1 |  | ||||||
| flake8==3.8.2 |  | ||||||
| iso8601==0.1.12; sys_platform == "win32" or implementation_name != "cpython" |  | ||||||
| isort==4.3.21 |  | ||||||
| mccabe==0.6.1 |  | ||||||
| more-itertools==8.3.0 |  | ||||||
| mypy==0.770 |  | ||||||
| mypy-extensions==0.4.3 |  | ||||||
| packaging==20.4 |  | ||||||
| pathspec==0.8.0 |  | ||||||
| pluggy==0.13.1 |  | ||||||
| py==1.8.1 |  | ||||||
| pycodestyle==2.6.0 |  | ||||||
| pycparser==2.20 |  | ||||||
| pydantic==1.5.1 |  | ||||||
| pyflakes==2.2.0 |  | ||||||
| pymysql==0.9.2 |  | ||||||
| pyparsing==2.4.7 |  | ||||||
| pypika==0.37.6 |  | ||||||
| pytest==5.4.2 |  | ||||||
| pytest-asyncio==0.12.0 |  | ||||||
| pytest-forked==1.1.3 |  | ||||||
| pytest-xdist==1.32.0 |  | ||||||
| regex==2020.5.14 |  | ||||||
| six==1.15.0 |  | ||||||
| sniffio==1.1.0 |  | ||||||
| taskipy==1.2.1 |  | ||||||
| toml==0.10.1 |  | ||||||
| tortoise-orm==0.16.12 |  | ||||||
| typed-ast==1.4.1 |  | ||||||
| typing-extensions==3.7.4.2 |  | ||||||
| wcwidth==0.1.9 |  | ||||||
| @@ -1,11 +0,0 @@ | |||||||
| aiosqlite==0.13.0 |  | ||||||
| anyio==1.3.0 |  | ||||||
| async-generator==1.10 |  | ||||||
| asyncclick==7.0.9 |  | ||||||
| ciso8601==2.1.3; sys_platform != "win32" and implementation_name == "cpython" |  | ||||||
| iso8601==0.1.12; sys_platform == "win32" or implementation_name != "cpython" |  | ||||||
| pydantic==1.5.1 |  | ||||||
| pypika==0.37.6 |  | ||||||
| sniffio==1.1.0 |  | ||||||
| tortoise-orm==0.16.12 |  | ||||||
| typing-extensions==3.7.4.2 |  | ||||||
							
								
								
									
										47
									
								
								setup.cfg
									
									
									
									
									
								
							
							
						
						
									
										47
									
								
								setup.cfg
									
									
									
									
									
								
							| @@ -1,47 +1,2 @@ | |||||||
| [flake8] | [flake8] | ||||||
| max-line-length = 100 | ignore = E501,W503 | ||||||
| exclude = |  | ||||||
| ignore = E501,W503,DAR101,DAR201,DAR402 |  | ||||||
|  |  | ||||||
| [darglint] |  | ||||||
| docstring_style=sphinx |  | ||||||
|  |  | ||||||
| [isort] |  | ||||||
| not_skip=__init__.py |  | ||||||
| multi_line_output=3 |  | ||||||
| include_trailing_comma=True |  | ||||||
| force_grid_wrap=0 |  | ||||||
| use_parentheses=True |  | ||||||
| line_length=100 |  | ||||||
|  |  | ||||||
| [tool:pytest] |  | ||||||
| addopts = -n auto --tb=native -q |  | ||||||
|  |  | ||||||
| [mypy] |  | ||||||
| pretty = True |  | ||||||
| ignore_missing_imports = True |  | ||||||
| check_untyped_defs = True |  | ||||||
| disallow_subclassing_any = True |  | ||||||
| disallow_untyped_calls = True |  | ||||||
| disallow_untyped_defs = False |  | ||||||
| disallow_incomplete_defs = False |  | ||||||
| disallow_untyped_decorators = True |  | ||||||
| no_implicit_optional = True |  | ||||||
| warn_redundant_casts = True |  | ||||||
| warn_unused_ignores = True |  | ||||||
| warn_no_return = True |  | ||||||
| warn_return_any = False |  | ||||||
| warn_unused_configs = True |  | ||||||
| warn_unreachable = True |  | ||||||
| allow_redefinition = True |  | ||||||
| strict_equality = True |  | ||||||
| show_error_context = True |  | ||||||
|  |  | ||||||
| [mypy-tests.*] |  | ||||||
| check_untyped_defs = False |  | ||||||
| disallow_untyped_defs = False |  | ||||||
| disallow_incomplete_defs = False |  | ||||||
| warn_unreachable = False |  | ||||||
|  |  | ||||||
| [mypy-conftest] |  | ||||||
| disallow_untyped_defs = False |  | ||||||
							
								
								
									
										44
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										44
									
								
								setup.py
									
									
									
									
									
								
							| @@ -1,44 +0,0 @@ | |||||||
| import os |  | ||||||
| import re |  | ||||||
| from setuptools import find_packages, setup |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def version(): |  | ||||||
|     ver_str_line = open('aerich/__init__.py', 'rt').read() |  | ||||||
|     mob = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", ver_str_line, re.M) |  | ||||||
|     if not mob: |  | ||||||
|         raise RuntimeError("Unable to find version string") |  | ||||||
|     return mob.group(1) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as f: |  | ||||||
|     long_description = f.read() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def requirements(): |  | ||||||
|     return open('requirements.txt', 'rt').read().splitlines() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| setup( |  | ||||||
|     name='aerich', |  | ||||||
|     version=version(), |  | ||||||
|     description='A database migrations tool for Tortoise-ORM.', |  | ||||||
|     author='long2ice', |  | ||||||
|     long_description_content_type='text/x-rst', |  | ||||||
|     long_description=long_description, |  | ||||||
|     author_email='long2ice@gmail.com', |  | ||||||
|     url='https://github.com/long2ice/aerich', |  | ||||||
|     license='MIT License', |  | ||||||
|     packages=find_packages(include=['aerich*']), |  | ||||||
|     include_package_data=True, |  | ||||||
|     zip_safe=True, |  | ||||||
|     entry_points={ |  | ||||||
|         'console_scripts': ['aerich = aerich.cli:main'], |  | ||||||
|     }, |  | ||||||
|     platforms='any', |  | ||||||
|     keywords=( |  | ||||||
|         'migrate Tortoise-ORM mysql' |  | ||||||
|     ), |  | ||||||
|     dependency_links=['https://github.com/tortoise-orm/tortoise-orm.git@develop#egg=tortoise-orm'], |  | ||||||
|     install_requires=requirements(), |  | ||||||
| ) |  | ||||||
| @@ -22,9 +22,9 @@ class Status(IntEnum): | |||||||
|  |  | ||||||
|  |  | ||||||
| class User(Model): | class User(Model): | ||||||
|     username = fields.CharField(max_length=20,) |     username = fields.CharField(max_length=20) | ||||||
|     password = fields.CharField(max_length=200) |     password = fields.CharField(max_length=200) | ||||||
|     last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now) |     last_login_at = fields.DatetimeField(description="Last Login", default=datetime.datetime.now) | ||||||
|     is_active = fields.BooleanField(default=True, description="Is Active") |     is_active = fields.BooleanField(default=True, description="Is Active") | ||||||
|     is_superuser = fields.BooleanField(default=False, description="Is SuperUser") |     is_superuser = fields.BooleanField(default=False, description="Is SuperUser") | ||||||
|     avatar = fields.CharField(max_length=200, default="") |     avatar = fields.CharField(max_length=200, default="") | ||||||
|   | |||||||
| @@ -1,8 +1,11 @@ | |||||||
|  | import pytest | ||||||
|  |  | ||||||
| from aerich.ddl.mysql import MysqlDDL | from aerich.ddl.mysql import MysqlDDL | ||||||
| from aerich.ddl.postgres import PostgresDDL | from aerich.ddl.postgres import PostgresDDL | ||||||
| from aerich.ddl.sqlite import SqliteDDL | from aerich.ddl.sqlite import SqliteDDL | ||||||
|  | from aerich.exceptions import NotSupportError | ||||||
| from aerich.migrate import Migrate | from aerich.migrate import Migrate | ||||||
| from tests.models import Category | from tests.models import Category, User | ||||||
|  |  | ||||||
|  |  | ||||||
| def test_create_table(): | def test_create_table(): | ||||||
| @@ -63,18 +66,81 @@ def test_add_column(): | |||||||
|  |  | ||||||
|  |  | ||||||
| def test_modify_column(): | def test_modify_column(): | ||||||
|     ret = Migrate.ddl.modify_column(Category, Category._meta.fields_map.get("name")) |     if isinstance(Migrate.ddl, SqliteDDL): | ||||||
|     if isinstance(Migrate.ddl, MysqlDDL): |         with pytest.raises(NotSupportError): | ||||||
|         assert ret == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200) NOT NULL" |             ret0 = Migrate.ddl.modify_column(Category, Category._meta.fields_map.get("name")) | ||||||
|  |             ret1 = Migrate.ddl.modify_column(User, User._meta.fields_map.get("is_active")) | ||||||
|  |  | ||||||
|     else: |     else: | ||||||
|         assert ret == 'ALTER TABLE "category" MODIFY COLUMN "name" VARCHAR(200) NOT NULL' |         ret0 = Migrate.ddl.modify_column(Category, Category._meta.fields_map.get("name")) | ||||||
|  |         ret1 = Migrate.ddl.modify_column(User, User._meta.fields_map.get("is_active")) | ||||||
|  |     if isinstance(Migrate.ddl, MysqlDDL): | ||||||
|  |         assert ret0 == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200) NOT NULL" | ||||||
|  |     elif isinstance(Migrate.ddl, PostgresDDL): | ||||||
|  |         assert ret0 == 'ALTER TABLE "category" ALTER COLUMN "name" TYPE VARCHAR(200)' | ||||||
|  |  | ||||||
|  |     if isinstance(Migrate.ddl, MysqlDDL): | ||||||
|  |         assert ( | ||||||
|  |             ret1 | ||||||
|  |             == "ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL  COMMENT 'Is Active' DEFAULT 1" | ||||||
|  |         ) | ||||||
|  |     elif isinstance(Migrate.ddl, PostgresDDL): | ||||||
|  |         assert ret1 == 'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL' | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_alter_column_default(): | ||||||
|  |     ret = Migrate.ddl.alter_column_default(Category, Category._meta.fields_map.get("name")) | ||||||
|  |     if isinstance(Migrate.ddl, PostgresDDL): | ||||||
|  |         assert ret == 'ALTER TABLE "category" ALTER COLUMN "name" DROP DEFAULT' | ||||||
|  |     else: | ||||||
|  |         assert ret is None | ||||||
|  |  | ||||||
|  |     ret = Migrate.ddl.alter_column_default(Category, Category._meta.fields_map.get("created_at")) | ||||||
|  |     if isinstance(Migrate.ddl, PostgresDDL): | ||||||
|  |         assert ( | ||||||
|  |             ret == 'ALTER TABLE "category" ALTER COLUMN "created_at" SET DEFAULT CURRENT_TIMESTAMP' | ||||||
|  |         ) | ||||||
|  |     else: | ||||||
|  |         assert ret is None | ||||||
|  |  | ||||||
|  |     ret = Migrate.ddl.alter_column_default(User, User._meta.fields_map.get("avatar")) | ||||||
|  |     if isinstance(Migrate.ddl, PostgresDDL): | ||||||
|  |         assert ret == 'ALTER TABLE "user" ALTER COLUMN "avatar" SET DEFAULT \'\'' | ||||||
|  |     else: | ||||||
|  |         assert ret is None | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_alter_column_null(): | ||||||
|  |     ret = Migrate.ddl.alter_column_null(Category, Category._meta.fields_map.get("name")) | ||||||
|  |     if isinstance(Migrate.ddl, PostgresDDL): | ||||||
|  |         assert ret == 'ALTER TABLE "category" ALTER COLUMN "name" SET NOT NULL' | ||||||
|  |     else: | ||||||
|  |         assert ret is None | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_set_comment(): | ||||||
|  |     ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("name")) | ||||||
|  |     if isinstance(Migrate.ddl, PostgresDDL): | ||||||
|  |         assert ret == 'COMMENT ON COLUMN "category"."name" IS NULL' | ||||||
|  |     else: | ||||||
|  |         assert ret is None | ||||||
|  |  | ||||||
|  |     ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("user")) | ||||||
|  |     if isinstance(Migrate.ddl, PostgresDDL): | ||||||
|  |         assert ret == 'COMMENT ON COLUMN "category"."user" IS \'User\'' | ||||||
|  |     else: | ||||||
|  |         assert ret is None | ||||||
|  |  | ||||||
|  |  | ||||||
| def test_drop_column(): | def test_drop_column(): | ||||||
|  |     if isinstance(Migrate.ddl, SqliteDDL): | ||||||
|  |         with pytest.raises(NotSupportError): | ||||||
|  |             ret = Migrate.ddl.drop_column(Category, "name") | ||||||
|  |     else: | ||||||
|         ret = Migrate.ddl.drop_column(Category, "name") |         ret = Migrate.ddl.drop_column(Category, "name") | ||||||
|     if isinstance(Migrate.ddl, MysqlDDL): |     if isinstance(Migrate.ddl, MysqlDDL): | ||||||
|         assert ret == "ALTER TABLE `category` DROP COLUMN `name`" |         assert ret == "ALTER TABLE `category` DROP COLUMN `name`" | ||||||
|     else: |     elif isinstance(Migrate.ddl, PostgresDDL): | ||||||
|         assert ret == 'ALTER TABLE "category" DROP COLUMN "name"' |         assert ret == 'ALTER TABLE "category" DROP COLUMN "name"' | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -86,6 +152,12 @@ def test_add_index(): | |||||||
|         assert ( |         assert ( | ||||||
|             index_u == "ALTER TABLE `category` ADD UNIQUE INDEX `uid_category_name_8b0cb9` (`name`)" |             index_u == "ALTER TABLE `category` ADD UNIQUE INDEX `uid_category_name_8b0cb9` (`name`)" | ||||||
|         ) |         ) | ||||||
|  |     elif isinstance(Migrate.ddl, PostgresDDL): | ||||||
|  |         assert index == 'CREATE INDEX "idx_category_name_8b0cb9" ON "category" ("name")' | ||||||
|  |         assert ( | ||||||
|  |             index_u | ||||||
|  |             == 'ALTER TABLE "category" ADD CONSTRAINT "uid_category_name_8b0cb9" UNIQUE ("name")' | ||||||
|  |         ) | ||||||
|     else: |     else: | ||||||
|         assert index == 'ALTER TABLE "category" ADD  INDEX "idx_category_name_8b0cb9" ("name")' |         assert index == 'ALTER TABLE "category" ADD  INDEX "idx_category_name_8b0cb9" ("name")' | ||||||
|         assert ( |         assert ( | ||||||
| @@ -95,10 +167,16 @@ def test_add_index(): | |||||||
|  |  | ||||||
| def test_drop_index(): | def test_drop_index(): | ||||||
|     ret = Migrate.ddl.drop_index(Category, ["name"]) |     ret = Migrate.ddl.drop_index(Category, ["name"]) | ||||||
|  |     ret_u = Migrate.ddl.drop_index(Category, ["name"], True) | ||||||
|     if isinstance(Migrate.ddl, MysqlDDL): |     if isinstance(Migrate.ddl, MysqlDDL): | ||||||
|         assert ret == "ALTER TABLE `category` DROP INDEX `idx_category_name_8b0cb9`" |         assert ret == "ALTER TABLE `category` DROP INDEX `idx_category_name_8b0cb9`" | ||||||
|  |         assert ret_u == "ALTER TABLE `category` DROP INDEX `uid_category_name_8b0cb9`" | ||||||
|  |     elif isinstance(Migrate.ddl, PostgresDDL): | ||||||
|  |         assert ret == 'DROP INDEX "idx_category_name_8b0cb9"' | ||||||
|  |         assert ret_u == 'ALTER TABLE "category" DROP CONSTRAINT "uid_category_name_8b0cb9"' | ||||||
|     else: |     else: | ||||||
|         assert ret == 'ALTER TABLE "category" DROP INDEX "idx_category_name_8b0cb9"' |         assert ret == 'ALTER TABLE "category" DROP INDEX "idx_category_name_8b0cb9"' | ||||||
|  |         assert ret_u == 'ALTER TABLE "category" DROP INDEX "uid_category_name_8b0cb9"' | ||||||
|  |  | ||||||
|  |  | ||||||
| def test_add_fk(): | def test_add_fk(): | ||||||
| @@ -119,5 +197,7 @@ def test_drop_fk(): | |||||||
|     ret = Migrate.ddl.drop_fk(Category, Category._meta.fields_map.get("user")) |     ret = Migrate.ddl.drop_fk(Category, Category._meta.fields_map.get("user")) | ||||||
|     if isinstance(Migrate.ddl, MysqlDDL): |     if isinstance(Migrate.ddl, MysqlDDL): | ||||||
|         assert ret == "ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_e2e3874c`" |         assert ret == "ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_e2e3874c`" | ||||||
|  |     elif isinstance(Migrate.ddl, PostgresDDL): | ||||||
|  |         assert ret == 'ALTER TABLE "category" DROP CONSTRAINT "fk_category_user_e2e3874c"' | ||||||
|     else: |     else: | ||||||
|         assert ret == 'ALTER TABLE "category" DROP FOREIGN KEY "fk_category_user_e2e3874c"' |         assert ret == 'ALTER TABLE "category" DROP FOREIGN KEY "fk_category_user_e2e3874c"' | ||||||
|   | |||||||
| @@ -1,30 +1,72 @@ | |||||||
|  | import pytest | ||||||
|  | from pytest_mock import MockerFixture | ||||||
| from tortoise import Tortoise | from tortoise import Tortoise | ||||||
|  |  | ||||||
| from aerich.ddl.mysql import MysqlDDL | from aerich.ddl.mysql import MysqlDDL | ||||||
|  | from aerich.ddl.postgres import PostgresDDL | ||||||
|  | from aerich.ddl.sqlite import SqliteDDL | ||||||
|  | from aerich.exceptions import NotSupportError | ||||||
| from aerich.migrate import Migrate | from aerich.migrate import Migrate | ||||||
|  |  | ||||||
|  |  | ||||||
| def test_migrate(): | def test_migrate(mocker: MockerFixture): | ||||||
|  |     mocker.patch("click.prompt", return_value=True) | ||||||
|     apps = Tortoise.apps |     apps = Tortoise.apps | ||||||
|     models = apps.get("models") |     models = apps.get("models") | ||||||
|     diff_models = apps.get("diff_models") |     diff_models = apps.get("diff_models") | ||||||
|     Migrate.diff_models(diff_models, models) |     Migrate.diff_models(diff_models, models) | ||||||
|  |     if isinstance(Migrate.ddl, SqliteDDL): | ||||||
|  |         with pytest.raises(NotSupportError): | ||||||
|  |             Migrate.diff_models(models, diff_models, False) | ||||||
|  |     else: | ||||||
|         Migrate.diff_models(models, diff_models, False) |         Migrate.diff_models(models, diff_models, False) | ||||||
|     if isinstance(Migrate.ddl, MysqlDDL): |     if isinstance(Migrate.ddl, MysqlDDL): | ||||||
|         assert Migrate.upgrade_operators == [ |         assert Migrate.upgrade_operators == [ | ||||||
|             "ALTER TABLE `category` ADD `name` VARCHAR(200) NOT NULL", |             "ALTER TABLE `category` ADD `name` VARCHAR(200) NOT NULL", | ||||||
|             "ALTER TABLE `user` ADD UNIQUE INDEX `uid_user_usernam_9987ab` (`username`)", |             "ALTER TABLE `user` ADD UNIQUE INDEX `uid_user_usernam_9987ab` (`username`)", | ||||||
|  |             "ALTER TABLE `user` RENAME COLUMN `last_login_at` TO `last_login`", | ||||||
|         ] |         ] | ||||||
|         assert Migrate.downgrade_operators == [ |         assert Migrate.downgrade_operators == [ | ||||||
|             "ALTER TABLE `category` DROP COLUMN `name`", |             "ALTER TABLE `category` DROP COLUMN `name`", | ||||||
|             "ALTER TABLE `user` DROP INDEX `uid_user_usernam_9987ab`", |             "ALTER TABLE `user` DROP INDEX `uid_user_usernam_9987ab`", | ||||||
|  |             "ALTER TABLE `user` RENAME COLUMN `last_login` TO `last_login_at`", | ||||||
|         ] |         ] | ||||||
|     else: |     elif isinstance(Migrate.ddl, PostgresDDL): | ||||||
|         assert Migrate.upgrade_operators == [ |         assert Migrate.upgrade_operators == [ | ||||||
|             'ALTER TABLE "category" ADD "name" VARCHAR(200) NOT NULL', |             'ALTER TABLE "category" ADD "name" VARCHAR(200) NOT NULL', | ||||||
|             'ALTER TABLE "user" ADD UNIQUE INDEX "uid_user_usernam_9987ab" ("username")', |             'ALTER TABLE "user" ADD CONSTRAINT "uid_user_usernam_9987ab" UNIQUE ("username")', | ||||||
|  |             'ALTER TABLE "user" RENAME COLUMN "last_login_at" TO "last_login"', | ||||||
|         ] |         ] | ||||||
|         assert Migrate.downgrade_operators == [ |         assert Migrate.downgrade_operators == [ | ||||||
|             'ALTER TABLE "category" DROP COLUMN "name"', |             'ALTER TABLE "category" DROP COLUMN "name"', | ||||||
|             'ALTER TABLE "user" DROP INDEX "uid_user_usernam_9987ab"', |             'ALTER TABLE "user" DROP CONSTRAINT "uid_user_usernam_9987ab"', | ||||||
|  |             'ALTER TABLE "user" RENAME COLUMN "last_login" TO "last_login_at"', | ||||||
|  |         ] | ||||||
|  |     elif isinstance(Migrate.ddl, SqliteDDL): | ||||||
|  |         assert Migrate.upgrade_operators == [ | ||||||
|  |             'ALTER TABLE "category" ADD "name" VARCHAR(200) NOT NULL', | ||||||
|  |             'ALTER TABLE "user" ADD UNIQUE INDEX "uid_user_usernam_9987ab" ("username")', | ||||||
|  |             'ALTER TABLE "user" RENAME COLUMN "last_login_at" TO "last_login"', | ||||||
|  |         ] | ||||||
|  |         assert Migrate.downgrade_operators == [] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_sort_all_version_files(mocker): | ||||||
|  |     mocker.patch( | ||||||
|  |         "os.listdir", | ||||||
|  |         return_value=[ | ||||||
|  |             "1_datetime_update.json", | ||||||
|  |             "11_datetime_update.json", | ||||||
|  |             "10_datetime_update.json", | ||||||
|  |             "2_datetime_update.json", | ||||||
|  |         ], | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     Migrate.migrate_location = "." | ||||||
|  |  | ||||||
|  |     assert Migrate.get_all_version_files() == [ | ||||||
|  |         "1_datetime_update.json", | ||||||
|  |         "2_datetime_update.json", | ||||||
|  |         "10_datetime_update.json", | ||||||
|  |         "11_datetime_update.json", | ||||||
|     ] |     ] | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user