314 Commits
v0.1.5 ... main

Author SHA1 Message Date
long2ice
40d0823c01 refactor: make in_transaction default True 2023-08-04 10:35:46 +08:00
long2ice
467406ed20 Merge pull request #303 from karichevi/dev
Added Documentation in Russian Language
2023-05-23 11:35:09 +08:00
karichevi
484b5900ce Added Documentation in Russian Language 2023-05-22 14:30:36 +03:00
long2ice
b8b6df0b65 chore: update deps 2023-05-12 15:27:50 +08:00
long2ice
f0bc3126e9 fix: generates two semicolons in a row. (#301) 2023-05-12 14:52:17 +08:00
long2ice
dbc0d9e7ef Merge pull request #296 from evstratbg/migrate-transaction
add in-transaction for upgrade
2023-05-05 23:12:08 +08:00
Bogdan
818dd29991 fix styles 2023-05-05 17:29:34 +04:00
Bogdan
e199e03b53 added -i param 2023-05-03 19:48:54 +04:00
Bogdan
d79dc25ee8 enriched changelog 2023-05-03 19:48:25 +04:00
Bogdan
c6d51a4dcf bump version 2023-05-03 19:48:15 +04:00
Bogdan
241b30a710 add in-transaction for upgrade 2023-04-03 20:55:12 +04:00
long2ice
8cf50c58d7 test: fix ci 2023-01-27 15:17:41 +08:00
long2ice
1c9b65cc37 fix: modify multiple times. (#279) 2023-01-27 13:49:07 +08:00
long2ice
3fbf9febfb Merge pull request #281 from CortexPE/patch-1
Fix #280 by removing trailing semicolon
2022-12-20 14:28:51 +08:00
marshall
7b6545d4e1 Fix #280 by removing trailing semicolon 2022-12-20 04:28:40 +08:00
long2ice
52b50a2161 feat: support virtual fields 2022-11-18 23:23:04 +08:00
long2ice
90943a473c Merge pull request #269 from jtraub/dev
Close connections in the init_db wrapper
2022-09-29 08:39:33 +08:00
Konstantin Mikhailov
d7ecd97e88 Close connections in the init_db wrapper 2022-09-29 08:09:59 +10:00
long2ice
20aebc4413 chore: update version 2022-09-27 22:44:12 +08:00
long2ice
f8e1f9ff44 fix: initialize an empty database. (#267) 2022-09-27 22:42:54 +08:00
long2ice
ab31445fb2 fix: test 2022-09-26 18:39:48 +08:00
long2ice
28d19a4b7b - Fix syntax error with python3.8.10. (#265)
- Fix sql generate error. (#263)
2022-09-26 18:36:57 +08:00
long2ice
9da99824fe fix: postgres sql error (#263) 2022-09-23 23:33:49 +08:00
long2ice
75db7cea60 fix: test error 2022-09-23 10:35:33 +08:00
long2ice
d777c9c278 Merge remote-tracking branch 'origin/dev' into dev
# Conflicts:
#	aerich/utils.py
#	tests/test_utils.py
2022-09-23 10:30:46 +08:00
long2ice
e9b76bdd35 feat: use .py version files 2022-09-23 10:29:48 +08:00
long2ice
8b7864d886 Merge pull request #199 from ehdgua01/fix-ddl-format-and-writing-version-file
[Enhancement] Fix version file formats
2022-09-16 09:30:27 +08:00
KDH
bef45941f2 Fix testcase 2022-09-16 10:26:21 +09:00
KDH
7b472d7a84 Fix testcase 2022-09-16 10:08:34 +09:00
KDH
1f0a6dfb50 Fix typo 2022-09-16 09:58:04 +09:00
KDH
36282f123f Merge branch 'dev' into fix-ddl-format-and-writing-version-file
# Conflicts:
#	aerich/utils.py
#	tests/test_migrate.py
2022-09-16 09:54:51 +09:00
KDH
3cd4e24050 Merge branch 'dev' into fix-ddl-format-and-writing-version-file 2022-09-16 09:43:57 +09:00
long2ice
f8c2f1b551 Merge pull request #205 from GDGSNF/dev
Merge repeated `if` statements into single `if`
2022-09-16 08:40:28 +08:00
Yasser Tahiri
131d97a3d6 Merge branch 'dev' into dev 2022-09-15 23:19:59 +01:00
Yasser Tahiri
1a0371e977 Update aerich/utils.py
Co-authored-by: KDH <ehdgua01@naver.com>
2022-09-15 23:19:18 +01:00
long2ice
e5b092fd08 Merge pull request #260 from waketzheng/dev
refactor: use pathlib to read and write text
2022-09-12 11:44:02 +08:00
Waket Zheng
7a109f3c79 refactor: use pathlib to read and write text 2022-09-12 00:57:46 +08:00
Jinlong Peng
8c2ecbaef1 feat: support add/remove field with index 2022-08-26 18:04:20 +08:00
long2ice
b141363c51 Merge pull request #242 from ssilaev/dev
Hotfix for cli group function in v0.6.3
2022-07-22 08:39:02 +08:00
long2ice
9dd474d79f Merge remote-tracking branch 'origin/dev' into dev 2022-06-27 11:42:37 +08:00
long2ice
e4bb9d838e docs: update changelog 2022-06-27 11:41:48 +08:00
long2ice
029d522c79 Merge pull request #249 from tortoise/fix-decimal
Fix decimal field change
2022-06-27 11:38:03 +08:00
long2ice
d6627906c7 test: fix test_migrate 2022-06-27 11:36:09 +08:00
long2ice
3c88833154 fix: decimal field change (#246) 2022-06-27 11:29:47 +08:00
long2ice
8f68f08eba Merge pull request #248 from isaquealves/feature/load_ddl_class_per_dialect
feat: Add support for dynamically load DDL classes
2022-06-22 20:25:42 +08:00
Isaque Alves
60ba6963fd Update changelog 2022-06-22 09:22:26 -03:00
Isaque Alves
4c35c44bd2 feat: Add support for dynamically load DDL classes
Adopt a strategy of loading classes based on their names, allowing to
easily add new database support without changing Migrate class logic
2022-06-22 09:16:11 -03:00
long2ice
bdeaf5495e Merge pull request #247 from isaquealves/feature/postgresql-numeric-type-translate
refactor: Improve db inspection
2022-06-22 08:41:15 +08:00
Isaque Alves
db33059ec9 Resolve style issue 2022-06-20 15:42:21 -03:00
Isaque Alves
44b96058f8 fix(tests/test_migrate.py): Resolve issue with broken tests 2022-06-17 12:36:04 -03:00
Isaque Alves
abff753b6a refactor: Improve postgresql migrate operators tests 2022-06-17 09:45:02 -03:00
Isaque Alves
dcd8441a05 fix: add space following python style guide" 2022-06-17 02:03:41 -03:00
Isaque Alves
b4a735b814 fix: Adjust changelog formatting 2022-06-17 02:02:37 -03:00
Isaque Alves
83ba13e99a Update changelog 2022-06-17 02:00:35 -03:00
Isaque Alves
d7b1c07d13 fix: Add comma to separate value in join 2022-06-17 01:51:47 -03:00
Isaque Alves
1ac16188fc refactor: Improve db inspection
- Add support to postgresql numeric type.
- Improve field configuration handling for numeric and decimal types
2022-06-17 01:38:39 -03:00
long2ice
4abc464ce0 feat: add is_flag to init-db 2022-05-24 11:20:12 +08:00
Sergey Silaev
d4430cec0d Hotfix for cli group function in v0.6.3 2022-05-10 01:20:11 +04:00
long2ice
0b01fa38d8 feat: add index inspect 2022-04-05 19:38:08 +08:00
long2ice
801dde15be feat: inspectdb support sqlite 2022-04-01 20:30:36 +08:00
long2ice
75480e2041 Merge remote-tracking branch 'origin/dev' into dev 2022-04-01 19:57:03 +08:00
long2ice
45129cef9f feat: improve inspectdb and support postgres 2022-04-01 19:56:48 +08:00
long2ice
3a0dd2355d Merge pull request #230 from ssilaev/dev
Increase max length of app column
2022-02-09 15:01:39 +08:00
Sergey Silaev
0e71bc16ae Increase max length of app column 2022-02-08 22:14:55 +03:00
long2ice
c39462820c upgrade deps 2022-01-17 22:26:13 +08:00
long2ice
f15cbaf9e0 Support migration for specified index. (#203) 2021-12-29 21:36:23 +08:00
long2ice
15131469df upgrade deps 2021-12-22 16:26:13 +08:00
long2ice
c60c1610f0 Fix pyproject.toml not existing error. (#217) 2021-12-12 22:11:51 +08:00
long2ice
63e8d06157 remove aiomysql 2021-12-08 14:43:33 +08:00
long2ice
68ef8ac676 Fix ci 2021-12-08 14:38:16 +08:00
long2ice
8b5cf6faa0 inspectdb support DATE. (#215) 2021-12-08 14:33:27 +08:00
Yasser Tahiri
40c7ef7fd6 Merge repeated if statements into single if 2021-10-21 15:43:18 +01:00
KDH
7a826df43f Fix duplicated semicolon in table creation DDL 2021-10-12 11:24:37 +09:00
KDH
b1b9cc1454 Fix M2M table template 2021-10-12 11:23:29 +09:00
long2ice
fac00d45cc Remove pydantic dependency. (#198) 2021-10-04 23:05:20 +08:00
long2ice
6f7893d376 Fix section name 2021-09-28 15:07:10 +08:00
long2ice
b1521c4cc7 update version 2021-09-27 19:55:38 +08:00
long2ice
24c1f4cb7d Change default config file from aerich.ini to pyproject.toml. (#197) 2021-09-27 11:05:20 +08:00
long2ice
661f241dac Compatible with old version in indexes 2021-08-31 17:53:17 +08:00
long2ice
01787558d6 Fix test 2021-08-31 17:41:13 +08:00
long2ice
699b0321a4 Support indexes change. (#193) 2021-08-31 17:36:25 +08:00
long2ice
4a83021892 Update FUNDING.yml 2021-08-26 20:39:31 +08:00
long2ice
af63221875 Fix no module found error. (#188) (#189) 2021-08-16 11:14:43 +08:00
long2ice
359525716c update README.md 2021-08-12 15:42:54 +08:00
long2ice
7d3eb2e151 Merge pull request #181 from Vovetta/dev
Fix: migrate doesn't use source_field in unique_together
2021-08-04 09:42:18 +08:00
Vovetta
d8abf79449 Updated changelog and version 2021-08-03 10:38:31 -07:00
Vovetta
aa9f40ae27 Fix: migrate doesn't use source_field in unique_together 2021-08-03 10:36:06 -07:00
long2ice
79b7ae343a update README.md 2021-08-03 16:25:06 +08:00
long2ice
6f5a9ab78c Add Command class. (#148) (#141) (#123) (#106) 2021-08-03 16:18:07 +08:00
long2ice
1e5a83c281 update deps 2021-07-26 17:44:18 +08:00
long2ice
180420843d update README.md 2021-07-26 15:27:49 +08:00
long2ice
58f66b91cf Fix redundant semicolons 2021-07-23 17:07:10 +08:00
long2ice
064d7ff675 Fix ci 2021-07-22 15:32:07 +08:00
long2ice
2da794d823 Fix db_constraint when fk changed. (#179) 2021-07-22 14:37:49 +08:00
long2ice
77005f3793 Fix MySQL 5.X rename column. 2021-07-09 10:53:13 +08:00
long2ice
5a873b8b69 Merge pull request #177 from yusukefs/add-default-src-folder-config
Add default value for src_folder config
2021-07-08 17:27:29 +08:00
Yusuke Sakai
3989b7c674 Update version and changelog 2021-07-08 18:01:59 +09:00
Yusuke Sakai
694b05356f Add default src_folder cofig value 2021-07-08 17:35:44 +09:00
long2ice
919d56c936 add ci branches-ignore master 2021-07-07 10:29:38 +08:00
long2ice
7bcf9b2fed Support drop column for sqlite. (#40) 2021-07-03 13:51:01 +08:00
long2ice
9f663299cf Merge pull request #174 from sasha00123/dev
Fixed typo in README.md concerning dowgrade usage
2021-06-25 13:52:34 +08:00
Alexander Batyrgariev
28dbdf2663 Fixed typo in README.md concerning dowgrade usage 2021-06-25 08:00:00 +03:00
long2ice
e71a4b60a5 Merge pull request #166 from spacemanspiff2007/dev
Added config option to specify source folder
2021-06-13 14:26:26 +08:00
-
62840136be used old black version 2021-06-11 15:36:54 +02:00
-
185514f711 reformatted with black 2021-06-11 15:18:06 +02:00
-
8e783e031e updated readme 2021-06-10 16:56:30 +02:00
-
10b7272ca8 Added an configuration option to specify the path of the source folder.
This will make aerich work with various folder structures (e.g. ./src/MyPythonModule)
Additionally this will try to import in init and show the user the error message on failure.
2021-06-10 16:52:03 +02:00
long2ice
0c763c6024 Fix repeat 2021-06-09 13:56:25 +08:00
long2ice
c6371a5c16 Fix repeat 2021-06-09 11:43:32 +08:00
long2ice
1dbf9185b6 Not catch exception when import config. (#164) 2021-06-04 17:47:39 +08:00
long2ice
9bf2de0b9a Fix incorrect index creation order. (#151) 2021-06-01 17:09:45 +08:00
long2ice
bf1cf21324 Merge pull request #158 from manzato/pyproject-update
Update URLs
2021-05-22 22:43:52 +08:00
Guillermo Manzato
8b08329493 Update URLs 2021-05-22 11:39:49 -03:00
long2ice
5bc7d23d95 Merge pull request #157 from tortoise/dependabot/pip/pydantic-1.8.2
Bump pydantic from 1.8.1 to 1.8.2
2021-05-14 09:30:47 +08:00
dependabot[bot]
a253aa96cb Bump pydantic from 1.8.1 to 1.8.2
Bumps [pydantic](https://github.com/samuelcolvin/pydantic) from 1.8.1 to 1.8.2.
- [Release notes](https://github.com/samuelcolvin/pydantic/releases)
- [Changelog](https://github.com/samuelcolvin/pydantic/blob/master/HISTORY.md)
- [Commits](https://github.com/samuelcolvin/pydantic/compare/v1.8.1...v1.8.2)

Signed-off-by: dependabot[bot] <support@github.com>
2021-05-13 20:51:51 +00:00
long2ice
15a6e874dd update deps 2021-05-03 14:23:27 +08:00
long2ice
19a5dcbf3f update deps 2021-04-26 21:01:40 +08:00
long2ice
922e3eef16 Fix CI 2021-04-05 17:11:28 +08:00
long2ice
44fd2fe6ae Fix default function when migrate. (#147) 2021-04-05 14:10:42 +08:00
long2ice
b147859960 Fix default function when migrate 2021-04-04 05:46:34 +00:00
long2ice
793cf2532c Create FUNDING.yml 2021-04-03 21:34:24 +08:00
long2ice
fa85e05d1d Fix postgre alter null. (#142) 2021-03-28 16:22:49 +08:00
long2ice
3f52ac348b Support rename table. (#139) 2021-03-25 21:21:49 +08:00
long2ice
f8aa7a8f34 Fix inspectdb for FloatField. (#138) 2021-03-22 14:16:59 +08:00
long2ice
44d520cc82 Fix postgres field type change error. (#135) 2021-03-21 21:18:08 +08:00
long2ice
364735f804 Fix rename field on the field add. (#134) 2021-03-21 20:43:05 +08:00
long2ice
505d361597 Fix drop model in the downgrade. (#132) 2021-03-18 23:40:13 +08:00
long2ice
a19edd3a35 update ci name 2021-03-13 16:45:35 +08:00
long2ice
84d1f78019 update workflow name and add cryptography 2021-03-13 16:43:22 +08:00
long2ice
8fb07a6c9e update deps 2021-03-13 16:40:27 +08:00
long2ice
54da8b22af update aiomysql to asyncmy 2021-03-13 16:37:45 +08:00
long2ice
4c0308ff22 update test.yml 2021-03-03 22:03:38 +08:00
long2ice
38c4a15661 update test.yml 2021-03-03 20:42:18 +08:00
long2ice
52151270e0 Fix bug for field change. (#119) 2021-03-03 20:36:54 +08:00
long2ice
49897dc4fd Merge pull request #121 from AulonSal/close-tortoise-connections
Close Tortoise connections properly
2021-02-28 14:47:58 +08:00
AulonSal
d4ad0e270f Update version and changelog 2021-02-28 12:13:59 +05:30
AulonSal
e74fc304a5 Don't close db connections when group function \(cli\) is run 2021-02-27 00:43:55 +05:30
AulonSal
14d20455e6 Replace coro logic with tortoise.run_async 2021-02-23 13:06:40 +05:30
long2ice
bd9ecfd6e1 Merge pull request #122 from personalcomputer/personalcomputer/improve_readme_english
Improve English grammar / clarity in README.md
2021-02-22 12:31:15 +08:00
John Miller
de8500b9a1 Improve English grammar / clarity in README.md 2021-02-21 19:46:04 -08:00
AulonSal
90b47c5af7 Close connections even if command raises exception 2021-02-22 07:40:18 +05:30
AulonSal
02fe5a9d31 Close Tortoise connections properly 2021-02-20 13:11:29 +05:30
long2ice
be41a1332a update tortoise-orm version 2021-02-04 20:53:04 +08:00
long2ice
09661c1d46 Fix unique_together 2021-02-04 14:39:07 +08:00
long2ice
abfa60133f Fix drop table 2021-02-04 14:23:46 +08:00
long2ice
048e428eac update tortoise-orm 2021-02-03 22:52:01 +08:00
long2ice
38a3df9b5a add support m2m 2021-02-03 22:22:22 +08:00
long2ice
0d94b22b3f Remove unused functions 2021-02-03 18:06:43 +08:00
long2ice
f1f0074255 Support rename field 2021-02-03 17:56:30 +08:00
long2ice
e3a14a2f60 Fix postgres index 2021-02-03 16:34:07 +08:00
long2ice
608ff8f071 update conftest.py 2021-02-03 15:49:40 +08:00
long2ice
d3a1342293 update README.md 2021-02-03 15:48:06 +08:00
long2ice
01e3de9522 basically completed 2021-02-03 15:43:04 +08:00
long2ice
c6c398fdf0 update 2021-02-02 22:52:50 +08:00
long2ice
c60bdd290e add fk and drop fk 2021-02-02 20:35:05 +08:00
long2ice
f443dc68db WIP 2021-02-01 16:54:35 +08:00
long2ice
36f84702b7 update 2021-02-01 14:00:12 +08:00
long2ice
b4cc2de0e3 v0.5 refactoring 2021-01-31 23:10:30 +08:00
long2ice
4780b90c1c add close_connections to fix stuck 2021-01-29 22:58:12 +08:00
long2ice
cd176c1fd6 Merge pull request #111 from lqmanh/bugfixes/fix-tortoise-orm-0.16.19
Fix Aerich b/c of a new feature in Tortoise ORM v0.16.19
2021-01-04 14:59:11 +08:00
long2ice
c2819fc8dc update CHANGELOG.md 2020-12-29 19:13:37 +08:00
long2ice
530e7cfce5 Fixed unnecessary import. (#113) 2020-12-29 19:12:36 +08:00
Lương Quang Mạnh
47824a100b Fix Aerich b/c of Tortoise ORM v0.16.19 2020-12-26 10:31:10 +07:00
long2ice
78a15f9f19 Merge pull request #108 from lqmanh/features/make-parent-dirs-as-needed
Make parent directories as needed
2020-12-25 22:10:56 +08:00
long2ice
5ae8b9e85f complete InspectDb 2020-12-25 21:44:26 +08:00
long2ice
55a6d4bbc7 add InspectDb and show_create_tables 2020-12-24 23:32:58 +08:00
long2ice
c5535f16e1 TODO: Add inspectdb command 2020-12-23 23:38:45 +08:00
long2ice
840cd71e44 Replace migrations separator to sql standard comment 2020-12-23 23:30:35 +08:00
Lương Quang Mạnh
e0d52b1210 Fix make style 2020-12-21 15:36:29 +07:00
Lương Quang Mạnh
4dc45f723a Make parent directories as needed 2020-12-21 15:13:26 +07:00
long2ice
d2e0a68351 Fix packaging error. (#92) 2020-12-02 23:03:15 +08:00
long2ice
ee6cc20c7d Fix empty items 2020-11-30 11:14:09 +08:00
long2ice
4e917495a0 Fix upgrade in new db. (#96) 2020-11-30 11:02:48 +08:00
long2ice
bfa66f6dd4 update changelog 2020-11-29 11:15:43 +08:00
long2ice
f00715d4c4 Merge pull request #97 from TrDex/pathlib-for-path-resolving
Use `pathlib` for path resolving
2020-11-29 11:02:44 +08:00
Mykola Solodukha
6e3105690a Use pathlib for path resolving 2020-11-28 19:23:34 +02:00
long2ice
c707f7ecb2 bug fix 2020-11-28 14:31:41 +08:00
long2ice
0bbc471e00 Fix sqlite stuck. (#90) 2020-11-26 23:38:57 +08:00
long2ice
fb6cc62047 update README and CHANGELOG 2020-11-23 16:44:16 +08:00
long2ice
e9ceaf471f Merge pull request #87 from ALexALed/remove-default-detections-for-callable
Remove callable detection for defaults
2020-11-23 16:41:30 +08:00
alexaled
85fc3b2aa2 Remove callable detection for defaults 2020-11-23 10:35:40 +02:00
long2ice
a677d506a9 Fix ci error 2020-11-19 10:41:52 +08:00
long2ice
9879004fee Add rename column support MySQL5 2020-11-19 10:11:52 +08:00
long2ice
5760fe2040 Merge pull request #83 from SakuraSound/fix-migrate-unlink
Catch OSError (if read-only file system)
2020-11-18 15:40:29 +08:00
Joir-dan Gumbs
b229c30558 Catch OSError (if read-only file system) 2020-11-17 23:28:00 -08:00
long2ice
5d2f1604c3 update github action poetry 2020-11-17 10:57:56 +08:00
long2ice
499c4e1c02 Fix black 2020-11-17 10:50:57 +08:00
long2ice
1463ee30bc update deps 2020-11-17 10:43:27 +08:00
long2ice
3b801932f5 Merge remote-tracking branch 'origin/dev' into dev 2020-11-17 10:36:14 +08:00
long2ice
c2eb4dc9e3 update poetry in github actions 2020-11-17 10:35:51 +08:00
long2ice
5927febd0c Delete .DS_Store 2020-11-17 10:10:32 +08:00
long2ice
a1c10ff330 exclude .DS_store 2020-11-17 10:09:37 +08:00
long2ice
f2013c931a Fix test error 2020-11-16 22:32:19 +08:00
long2ice
b21b954d32 Use .sql instead of .json to store version file. (#79) 2020-11-16 22:25:01 +08:00
long2ice
f5588a35c5 update deps 2020-11-12 21:27:58 +08:00
long2ice
f5dff84476 Fix encoding error. (#75) 2020-11-08 23:00:44 +08:00
long2ice
e399821116 update deps 2020-11-05 17:43:41 +08:00
long2ice
648f25a951 Compatible with models file in directory. (#70) 2020-10-30 19:51:46 +08:00
long2ice
fa73e132e2 remove .vscode 2020-10-30 16:45:12 +08:00
long2ice
1bac33cd33 add confirmation_option when downgrade 2020-10-30 16:39:14 +08:00
long2ice
4e76f12ccf update README.md 2020-10-28 17:12:23 +08:00
long2ice
724379700e Support multiple databases. (#68) 2020-10-28 17:02:02 +08:00
long2ice
bb929f2b55 update deps 2020-10-25 17:48:05 +08:00
long2ice
6339dc86a8 Fix migrate to new database error 2020-10-14 20:33:23 +08:00
long2ice
768747140a update changelog 2020-10-12 21:01:53 +08:00
long2ice
1fde3cd04e fix init KeyError (#61) 2020-10-12 20:59:13 +08:00
long2ice
d0ce545ff5 Fix first version error 2020-10-10 15:07:09 +08:00
long2ice
09b89ed7d0 update README 2020-10-09 15:41:37 +08:00
long2ice
86c8382593 update README 2020-10-09 15:34:48 +08:00
long2ice
48e3ff48a3 update README.md 2020-10-09 12:04:36 +08:00
long2ice
1bf6d45bb0 Merge pull request #60 from tortoise/refactoring
Refactoring migrate logic
2020-10-09 11:54:37 +08:00
long2ice
342f4cdd3b complete refactoring 2020-10-09 11:53:50 +08:00
long2ice
8cace21fde refactoring migrate logic 2020-10-09 00:05:22 +08:00
long2ice
9889d9492b add ? when ask rename 2020-09-28 17:22:05 +08:00
long2ice
823368aea8 fix event loop error 2020-09-28 17:16:35 +08:00
long2ice
6b1ad46cf1 update README.md 2020-09-28 12:50:43 +08:00
long2ice
ce8c0b1f06 Support db_constraint in fk 2020-09-28 10:40:04 +08:00
long2ice
43922d3734 update CHANGELOG.md 2020-09-27 14:18:19 +08:00
long2ice
48c5318737 remove asyncclick 2020-09-25 18:27:08 +08:00
long2ice
002221e557 update README.md 2020-09-25 17:51:31 +08:00
long2ice
141d7205bf Add Rename support 2020-09-25 17:48:32 +08:00
long2ice
af4d4be19a Fix Postgres alter table 2020-09-24 15:02:28 +08:00
long2ice
3b4d9b47ce update version 2020-09-23 18:33:31 +08:00
long2ice
4b0c4ae7d0 fix test error 2020-09-21 15:03:16 +08:00
long2ice
dc821d8a02 Merge remote-tracking branch 'origin/dev' into dev 2020-09-21 11:46:21 +08:00
long2ice
d18a6b5be0 add sqlite not support error 2020-09-21 11:44:34 +08:00
long2ice
1e56a70f21 Merge pull request #44 from saintlyzero/dev
append new line in cp_models
2020-09-16 22:21:09 +08:00
saintlyzero
ab1e1aab75 append new line in cp_models 2020-09-16 19:30:03 +05:30
long2ice
00dd04f97d update conftest.py 2020-09-10 18:42:19 +08:00
long2ice
fc914acc80 update README.md 2020-09-09 21:10:10 +08:00
long2ice
ac03ecb002 update README.md 2020-09-02 13:34:53 +08:00
long2ice
235ef3f7ea fix datetime string format 2020-08-19 09:28:36 +08:00
long2ice
e00eb7f3d9 update README.md 2020-08-17 12:40:39 +08:00
long2ice
d6c8941676 update CHANGELOG.rst 2020-08-07 18:12:40 +08:00
long2ice
cf062c9310 update CHANGELOG.rst 2020-08-07 09:29:30 +08:00
long2ice
309adec8c9 Merge pull request #34 from moubctez/postgresql_unique
PostgreSQL add/drop index/unique
2020-08-07 09:27:03 +08:00
Adam Ciarciński
8674142ba8 Fix test_migrate 2020-08-06 17:59:12 +02:00
Adam Ciarciński
cda9bd1c47 Save 1 space in tests 2020-08-06 17:50:28 +02:00
Adam Ciarciński
198e4e0032 Save 1 space 2020-08-06 17:49:26 +02:00
Adam Ciarciński
1b440477a2 PostgreSQL add/drop index/unique 2020-08-06 17:45:56 +02:00
long2ice
1263c6f735 Fix tortoise ssl config 2020-07-30 11:30:02 +08:00
long2ice
6504384879 update README.md 2020-07-29 10:30:25 +08:00
long2ice
17ab0a1421 fix version sort and add test 2020-07-29 10:21:43 +08:00
long2ice
e1ffcb609b fix version sort 2020-07-28 18:31:45 +08:00
long2ice
18cb75f555 Merge pull request #32 from psbleep/close-tortoise-connections
Close database connections
2020-07-26 16:52:08 +08:00
Patrick Schneeweis
dfe13ea250 add type hint 2020-07-25 15:22:05 -04:00
Patrick Schneeweis
b97ce0ff2f Use close_db decorator on history command 2020-07-24 09:26:29 -04:00
Patrick Schneeweis
21001c0eda Make history command into async function
The close_db decorator requires async functions
2020-07-24 09:24:43 -04:00
Patrick Schneeweis
9bd96a9487 Use close_db decorator on command functions 2020-07-24 09:22:57 -04:00
Patrick Schneeweis
5bdaa32a9e Decorator for closing db connection after func run 2020-07-24 09:17:19 -04:00
long2ice
d74e7b5630 Merge pull request #30 from psbleep/longer-migration-version-names
Allow longer migration version names
2020-07-24 10:28:52 +08:00
Patrick Schneeweis
119b15d597 style fixes 2020-07-23 21:29:26 -04:00
Patrick Schneeweis
f93ab6bbff Raise name length error when creating migration.
Currently this error gets raised when trying to apply the migration, but
it seems better to learn about it when trying to create the migration.
2020-07-23 21:22:30 -04:00
Patrick Schneeweis
bd8eb94a6e Increase max length of version column 2020-07-23 21:21:58 -04:00
long2ice
2fcb2626fd Fix postgres drop fk 2020-07-20 10:20:08 +08:00
long2ice
3728db4279 Merge remote-tracking branch 'origin/dev' into dev 2020-07-15 12:58:56 +08:00
long2ice
6ac1fb5332 add ads 2020-07-15 12:58:34 +08:00
long2ice
87a17d443c update py version 3.7 2020-07-13 20:32:14 +08:00
long2ice
55f18a69ed Merge pull request #23 from long2ice/master
Master
2020-07-09 14:34:37 +08:00
long2ice
86a9c4cedd Merge branch 'dev' 2020-07-09 14:28:09 +08:00
long2ice
86e1d3defb fix test error 2020-07-08 21:00:30 +08:00
long2ice
01fa7fbbdb Merge pull request #21 from moubctez/postgresql_fixes
Enhance PostgreSQL support
2020-07-08 20:54:40 +08:00
Adam Ciarciński
90196eb1bf Apply black 2020-07-08 14:38:42 +02:00
Adam Ciarciński
3c111792a9 Enhance PostgreSQL support 2020-07-07 19:54:55 +02:00
long2ice
77e9d7bc91 update deps 2020-07-06 00:10:07 +08:00
long2ice
fe2ddff88b update version 2020-06-24 15:17:52 +08:00
long2ice
0d23297f46 Fix bug in windows. 2020-06-24 15:17:30 +08:00
long2ice
6be6d55e5b Merge remote-tracking branch 'origin/dev' into dev 2020-06-22 12:55:46 +08:00
long2ice
25674bc73a update Makefile 2020-06-22 12:55:26 +08:00
long2ice
1715eda1a3 Merge pull request #11 from FutureSenzhong/dev
updata:File operations use utf8 encoding
2020-06-22 12:27:22 +08:00
邓森中
f5775049dd updata:File operations use utf8 encoding 2020-06-22 11:18:52 +08:00
long2ice
6fd0f8a42f update README.md licence 2020-06-20 12:49:43 +08:00
long2ice
f52dc009af update README.md 2020-06-20 12:48:32 +08:00
long2ice
9248d456f9 update license 2020-06-17 11:39:15 +08:00
long2ice
c24f2f6b09 update pyproject.toml 2020-06-12 18:11:19 +08:00
long2ice
19c9c2c30f Merge branch 'dev' 2020-06-12 17:53:30 +08:00
long2ice
73b75349ee update version 0.2.0 2020-06-12 17:53:17 +08:00
long2ice
7bc553221a raise NotImplementedError 2020-06-12 09:31:01 +08:00
long2ice
7413a05e19 set --safe bool 2020-06-08 18:07:41 +08:00
long2ice
bf194ca8ce Update model file find method 2020-06-03 18:42:35 +08:00
long2ice
b06da0223a add --build 2020-06-03 09:39:52 +08:00
long2ice
83554cdc5d Merge remote-tracking branch 'origin/dev' into dev
# Conflicts:
#	.github/workflows/pypi.yml
#	.github/workflows/test.yml
#	Makefile
2020-06-03 09:38:20 +08:00
long2ice
6c76bfccad Merge remote-tracking branch 'origin/dev' into dev 2020-06-02 22:23:26 +08:00
long2ice
a1746e457c update github actions
update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions
2020-06-02 22:23:15 +08:00
long2ice
2a0435dea9 update github actions 2020-06-02 22:14:44 +08:00
long2ice
e87f67f1e1 update github actions
update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions

update github actions
2020-06-02 22:02:46 +08:00
long2ice
7b4b7ac749 update github actions 2020-06-02 18:58:59 +08:00
long2ice
5b9b51db3f update github actions 2020-06-02 18:38:39 +08:00
long2ice
ffeee3c901 update github actions 2020-06-02 18:28:50 +08:00
long2ice
b4366d2427 update github actions 2020-06-02 18:20:55 +08:00
long2ice
ec1c80f3a9 remove requirements 2020-06-01 14:57:29 +08:00
long2ice
d2083632eb add cli -V 2020-05-27 12:44:49 +08:00
long2ice
dc8b4c2263 Merge remote-tracking branch 'origin/master' 2020-05-27 11:22:01 +08:00
long2ice
2fc43cb0d8 Merge pull request #7 from long2ice/dev
v0.1.9
2020-05-26 20:06:56 +08:00
long2ice
cb5dffeeb8 Merge branch 'dev' 2020-05-26 20:05:35 +08:00
long2ice
125389461f check tortoise add aerich.models 2020-05-26 14:44:55 +08:00
long2ice
c09c878eaf add support modify column
diff mysql ddl
2020-05-26 10:22:02 +08:00
long2ice
ef3e0c11d5 update version 2020-05-25 23:46:35 +08:00
long2ice
881f70f748 Fix default_connection when upgrade 2020-05-25 23:44:42 +08:00
long2ice
615d9747dc Merge pull request #5 from long2ice/dev
v0.1.8
2020-05-25 22:43:36 +08:00
long2ice
6ffca1a0c7 add support modify column 2020-05-25 22:39:39 +08:00
long2ice
95e41720cb Fix init db sql error 2020-05-25 18:53:34 +08:00
long2ice
40c0008e6e Fix upgrade error when migrate 2020-05-25 18:02:56 +08:00
long2ice
ce75e55d60 update README.rst 2020-05-25 16:36:18 +08:00
long2ice
4d4f951e09 update README.rst 2020-05-25 16:33:56 +08:00
long2ice
354e861dad add more test 2020-05-24 13:47:10 +08:00
long2ice
3a76486993 migrate raise error 2020-05-24 00:05:45 +08:00
long2ice
4d0a6b4de6 Fix version num str 2020-05-22 15:35:35 +08:00
long2ice
c01d2993e0 Exclude models.Aerich.
Add init record when init-db.
2020-05-22 11:59:03 +08:00
long2ice
bab5ebf2f0 migrate exclude aerich.models 2020-05-22 11:14:16 +08:00
long2ice
7e5cefd7d6 write old models exclude aerich.models 2020-05-22 11:03:52 +08:00
long2ice
0cea28d521 update version 2020-05-21 23:57:13 +08:00
long2ice
b92e6551fd update dependency_links 2020-05-21 23:33:58 +08:00
long2ice
bbabde32a1 update version 2020-05-21 21:24:21 +08:00
44 changed files with 4934 additions and 1692 deletions

1
.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1 @@
custom: ["https://sponsor.long2ice.io"]

View File

@@ -1,7 +1,13 @@
name: test
on: [push, pull_request]
name: ci
on:
push:
branches-ignore:
- main
pull_request:
branches-ignore:
- main
jobs:
testall:
ci:
runs-on: ubuntu-latest
services:
postgres:
@@ -19,10 +25,10 @@ jobs:
- uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Install dependencies
- name: Install and configure Poetry
run: |
python -m pip install --upgrade pip
pip install -r requirements-dev.txt
pip install -U pip poetry
poetry config virtualenvs.create false
- name: CI
env:
MYSQL_PASS: root
@@ -31,4 +37,4 @@ jobs:
POSTGRES_PASS: 123456
POSTGRES_HOST: 127.0.0.1
POSTGRES_PORT: 5432
run: make testall
run: make ci

View File

@@ -8,14 +8,17 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v1
- uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Build dists
- name: Install and configure Poetry
run: |
python3 setup.py sdist
pip install -U pip poetry
poetry config virtualenvs.create false
- name: Build dists
run: make build
- name: Pypi Publish
uses: pypa/gh-action-pypi-publish@master
with:
user: __token__
password: ${{ secrets.pypi_password }}
password: ${{ secrets.pypi_password }}

5
.gitignore vendored
View File

@@ -143,4 +143,7 @@ cython_debug/
.idea
migrations
aerich.ini
src
src
.vscode
.DS_Store
.python-version

248
CHANGELOG.md Normal file
View File

@@ -0,0 +1,248 @@
# ChangeLog
## 0.7
### 0.7.2
- Support virtual fields.
- Fix modify multiple times. (#279)
- Added `-i` and `--in-transaction` options to `aerich migrate` command. (#296)
- Fix generates two semicolons in a row. (#301)
### 0.7.1
- Fix syntax error with python3.8.10. (#265)
- Fix sql generate error. (#263)
- Fix initialize an empty database. (#267)
### 0.7.1rc1
- Fix postgres sql error (#263)
### 0.7.0
**Now aerich use `.py` file to record versions.**
Upgrade Note:
1. Drop `aerich` table
2. Delete `migrations/models` folder
3. Run `aerich init-db`
- Improve `inspectdb` adding support to `postgresql::numeric` data type
- Add support for dynamically load DDL classes easing to add support to
new databases without changing `Migrate` class logic
- Fix decimal field change. (#246)
- Support add/remove field with index.
## 0.6
### 0.6.3
- Improve `inspectdb` and support `postgres` & `sqlite`.
### 0.6.2
- Support migration for specified index. (#203)
### 0.6.1
- Fix `pyproject.toml` not existing error. (#217)
### 0.6.0
- Change default config file from `aerich.ini` to `pyproject.toml`. (#197)
**Upgrade note:**
1. Run `aerich init -t config.TORTOISE_ORM`.
2. Remove `aerich.ini`.
- Remove `pydantic` dependency. (#198)
- `inspectdb` support `DATE`. (#215)
## 0.5
### 0.5.8
- Support `indexes` change. (#193)
### 0.5.7
- Fix no module found error. (#188) (#189)
### 0.5.6
- Add `Command` class. (#148) (#141) (#123) (#106)
- Fix: migrate doesn't use source_field in unique_together. (#181)
### 0.5.5
- Fix KeyError: 'src_folder' after upgrading aerich to 0.5.4. (#176)
- Fix MySQL 5.X rename column.
- Fix `db_constraint` when fk changed. (#179)
### 0.5.4
- Fix incorrect index creation order. (#151)
- Not catch exception when import config. (#164)
- Support `drop column` for sqlite. (#40)
### 0.5.3
- Fix postgre alter null. (#142)
- Fix default function when migrate. (#147)
### 0.5.2
- Fix rename field on the field add. (#134)
- Fix postgres field type change error. (#135)
- Fix inspectdb for `FloatField`. (#138)
- Support `rename table`. (#139)
### 0.5.1
- Fix tortoise connections not being closed properly. (#120)
- Fix bug for field change. (#119)
- Fix drop model in the downgrade. (#132)
### 0.5.0
- Refactor core code, now has no limitation for everything.
## 0.4
### 0.4.4
- Fix unnecessary import. (#113)
### 0.4.3
- Replace migrations separator to sql standard comment.
- Add `inspectdb` command.
### 0.4.2
- Use `pathlib` for path resolving. (#89)
- Fix upgrade in new db. (#96)
- Fix packaging error. (#92)
### 0.4.1
- Bug fix. (#91 #93)
### 0.4.0
- Use `.sql` instead of `.json` to store version file.
- Add `rename` column support MySQL5.
- Remove callable detection for defaults. (#87)
- Fix `sqlite` stuck. (#90)
## 0.3
### 0.3.3
- Fix encoding error. (#75)
- Support multiple databases. (#68)
- Compatible with models file in directory. (#70)
### 0.3.2
- Fix migrate to new database error. (#62)
### 0.3.1
- Fix first version error.
- Fix init error. (#61)
### 0.3.0
- Refactoring migrate logic, and this version is not compatible with previous version.
- Now there don't need `old_models.py` and it store in database.
- Upgrade steps:
1. Upgrade aerich version.
2. Drop aerich table in database.
3. Delete `migrations/{app}` folder and rerun `aerich init-db`.
4. Update model and `aerich migrate` normally.
## 0.2
### 0.2.5
- Fix windows support. (#46)
- Support `db_constraint` in fk, m2m should manual define table with fk. (#52)
### 0.2.4
- Raise error with SQLite unsupported features.
- Fix Postgres alter table. (#48)
- Add `Rename` support.
### 0.2.3
- Fix tortoise ssl config.
- PostgreSQL add/drop index/unique.
### 0.2.2
- Fix postgres drop fk.
- Fix version sort.
### 0.2.1
- Fix bug in windows.
- Enhance PostgreSQL support.
### 0.2.0
- Update model file find method.
- Set `--safe` bool.
## 0.1
### 0.1.9
- Fix default_connection when upgrade
- Find default app instead of default.
- Diff MySQL ddl.
- Check tortoise config.
### 0.1.8
- Fix upgrade error when migrate.
- Fix init db sql error.
- Support change column.
### 0.1.7
- Exclude models.Aerich.
- Add init record when init-db.
- Fix version num str.
### 0.1.6
- update dependency_links
### 0.1.5
- Add sqlite and postgres support.
- Fix dependency import.
- Store versions in db.
### 0.1.4
- Fix transaction and fields import.
- Make unique index worked.
- Add cli --version.
### 0.1.3
- Support indexes and unique_together.
### 0.1.2
- Now aerich support m2m.
- Add cli cmd init-db.
- Change cli options.
### 0.1.1
- Now aerich is basic worked.

View File

@@ -1,31 +0,0 @@
=========
ChangeLog
=========
0.1
===
0.1.5
-----
- Add sqlite and postgres support.
- Fix dependency import.
- Store versions in db.
0.1.4
-----
- Fix transaction and fields import.
- Make unique index worked.
- Add cli --version.
0.1.3
-----
- Support indexes and unique_together.
0.1.2
-----
- Now aerich support m2m.
- Add cli cmd init-db.
- Change cli options.
0.1.1
-----
- Now aerich is basic worked.

214
LICENSE
View File

@@ -1,21 +1,201 @@
The MIT License (MIT)
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
Copyright (c) 2020 long2ice
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
1. Definitions.
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2020 long2ice
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -1,3 +0,0 @@
include LICENSE
include README.rst
include requirements.txt

View File

@@ -1,40 +1,26 @@
checkfiles = aerich/ tests/
checkfiles = aerich/ tests/ conftest.py
black_opts = -l 100 -t py38
py_warn = PYTHONDEVMODE=1
MYSQL_HOST ?= "127.0.0.1"
MYSQL_PORT ?= 3306
MYSQL_PASS ?= "123456"
POSTGRES_HOST ?= "127.0.0.1"
POSTGRES_PORT ?= 5432
POSTGRES_PASS ?= "123456"
help:
@echo "Aerich development makefile"
@echo
@echo "usage: make <target>"
@echo "Targets:"
@echo " up Updates dev/test dependencies"
@echo " deps Ensure dev/test dependencies are installed"
@echo " check Checks that build is sane"
@echo " lint Reports all linter violations"
@echo " test Runs all tests"
@echo " style Auto-formats the code"
up:
@poetry update
deps:
@which pip-sync > /dev/null || pip install -q pip-tools
@pip install -r requirements-dev.txt
@poetry install -E asyncpg -E asyncmy
style: deps
isort -rc $(checkfiles)
black $(black_opts) $(checkfiles)
@isort -src $(checkfiles)
@black $(black_opts) $(checkfiles)
check: deps
ifneq ($(shell which black),)
black --check $(black_opts) $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false)
endif
flake8 $(checkfiles)
mypy $(checkfiles)
pylint -d C,W,R $(checkfiles)
bandit -r $(checkfiles)
python setup.py check -mrs
@black --check $(black_opts) $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false)
@ruff $(checkfiles)
test: deps
$(py_warn) TEST_DB=sqlite://:memory: py.test
@@ -43,17 +29,14 @@ test_sqlite:
$(py_warn) TEST_DB=sqlite://:memory: py.test
test_mysql:
$(py_warn) TEST_DB="mysql://root:$(MYSQL_PASS)@$(MYSQL_HOST):$(MYSQL_PORT)/test_\{\}" py.test
$(py_warn) TEST_DB="mysql://root:$(MYSQL_PASS)@$(MYSQL_HOST):$(MYSQL_PORT)/test_\{\}" pytest -vv -s
test_postgres:
$(py_warn) TEST_DB="postgres://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" py.test
$(py_warn) TEST_DB="postgres://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" pytest -vv -s
testall: deps test_sqlite test_postgres test_mysql
publish: deps
rm -fR dist/
python setup.py sdist
twine upload dist/*
build: deps
@poetry build
ci:
@act -P ubuntu-latest=nektos/act-environments-ubuntu:18.04 -b
ci: check testall

276
README.md Normal file
View File

@@ -0,0 +1,276 @@
# Aerich
[![image](https://img.shields.io/pypi/v/aerich.svg?style=flat)](https://pypi.python.org/pypi/aerich)
[![image](https://img.shields.io/github/license/tortoise/aerich)](https://github.com/tortoise/aerich)
[![image](https://github.com/tortoise/aerich/workflows/pypi/badge.svg)](https://github.com/tortoise/aerich/actions?query=workflow:pypi)
[![image](https://github.com/tortoise/aerich/workflows/ci/badge.svg)](https://github.com/tortoise/aerich/actions?query=workflow:ci)
English | [Русский](./README_RU.md)
## Introduction
Aerich is a database migrations tool for TortoiseORM, which is like alembic for SQLAlchemy, or like Django ORM with
it\'s own migration solution.
## Install
Just install from pypi:
```shell
pip install aerich
```
## Quick Start
```shell
> aerich -h
Usage: aerich [OPTIONS] COMMAND [ARGS]...
Options:
-V, --version Show the version and exit.
-c, --config TEXT Config file. [default: pyproject.toml]
--app TEXT Tortoise-ORM app name.
-h, --help Show this message and exit.
Commands:
downgrade Downgrade to specified version.
heads Show current available heads in migrate location.
history List all migrate items.
init Init config file and generate root migrate location.
init-db Generate schema and generate app migrate location.
inspectdb Introspects the database tables to standard output as...
migrate Generate migrate changes file.
upgrade Upgrade to specified version.
```
## Usage
You need add `aerich.models` to your `Tortoise-ORM` config first. Example:
```python
TORTOISE_ORM = {
"connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"},
"apps": {
"models": {
"models": ["tests.models", "aerich.models"],
"default_connection": "default",
},
},
}
```
### Initialization
```shell
> aerich init -h
Usage: aerich init [OPTIONS]
Init config file and generate root migrate location.
Options:
-t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like
settings.TORTOISE_ORM. [required]
--location TEXT Migrate store location. [default: ./migrations]
-s, --src_folder TEXT Folder of the source, relative to the project root.
-h, --help Show this message and exit.
```
Initialize the config file and migrations location:
```shell
> aerich init -t tests.backends.mysql.TORTOISE_ORM
Success create migrate location ./migrations
Success write config to pyproject.toml
```
### Init db
```shell
> aerich init-db
Success create app migrate location ./migrations/models
Success generate schema for app "models"
```
If your Tortoise-ORM app is not the default `models`, you must specify the correct app via `--app`,
e.g. `aerich --app other_models init-db`.
### Update models and make migrate
```shell
> aerich migrate --name drop_column
Success migrate 1_202029051520102929_drop_column.py
```
Format of migrate filename is
`{version_num}_{datetime}_{name|update}.py`.
If `aerich` guesses you are renaming a column, it will ask `Rename {old_column} to {new_column} [True]`. You can choose
`True` to rename column without column drop, or choose `False` to drop the column then create. Note that the latter may
lose data.
### Upgrade to latest version
```shell
> aerich upgrade
Success upgrade 1_202029051520102929_drop_column.py
```
Now your db is migrated to latest.
### Downgrade to specified version
```shell
> aerich downgrade -h
Usage: aerich downgrade [OPTIONS]
Downgrade to specified version.
Options:
-v, --version INTEGER Specified version, default to last. [default: -1]
-d, --delete Delete version files at the same time. [default:
False]
--yes Confirm the action without prompting.
-h, --help Show this message and exit.
```
```shell
> aerich downgrade
Success downgrade 1_202029051520102929_drop_column.py
```
Now your db is rolled back to the specified version.
### Show history
```shell
> aerich history
1_202029051520102929_drop_column.py
```
### Show heads to be migrated
```shell
> aerich heads
1_202029051520102929_drop_column.py
```
### Inspect db tables to TortoiseORM model
Currently `inspectdb` support MySQL & Postgres & SQLite.
```shell
Usage: aerich inspectdb [OPTIONS]
Introspects the database tables to standard output as TortoiseORM model.
Options:
-t, --table TEXT Which tables to inspect.
-h, --help Show this message and exit.
```
Inspect all tables and print to console:
```shell
aerich --app models inspectdb
```
Inspect a specified table in the default app and redirect to `models.py`:
```shell
aerich inspectdb -t user > models.py
```
For example, you table is:
```sql
CREATE TABLE `test`
(
`id` int NOT NULL AUTO_INCREMENT,
`decimal` decimal(10, 2) NOT NULL,
`date` date DEFAULT NULL,
`datetime` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`time` time DEFAULT NULL,
`float` float DEFAULT NULL,
`string` varchar(200) COLLATE utf8mb4_general_ci DEFAULT NULL,
`tinyint` tinyint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `asyncmy_string_index` (`string`)
) ENGINE = InnoDB
DEFAULT CHARSET = utf8mb4
COLLATE = utf8mb4_general_ci
```
Now run `aerich inspectdb -t test` to see the generated model:
```python
from tortoise import Model, fields
class Test(Model):
date = fields.DateField(null=True, )
datetime = fields.DatetimeField(auto_now=True, )
decimal = fields.DecimalField(max_digits=10, decimal_places=2, )
float = fields.FloatField(null=True, )
id = fields.IntField(pk=True, )
string = fields.CharField(max_length=200, null=True, )
time = fields.TimeField(null=True, )
tinyint = fields.BooleanField(null=True, )
```
Note that this command is limited and can't infer some fields, such as `IntEnumField`, `ForeignKeyField`, and others.
### Multiple databases
```python
tortoise_orm = {
"connections": {
"default": expand_db_url(db_url, True),
"second": expand_db_url(db_url_second, True),
},
"apps": {
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"},
"models_second": {"models": ["tests.models_second"], "default_connection": "second", },
},
}
```
You only need to specify `aerich.models` in one app, and must specify `--app` when running `aerich migrate` and so on.
## Restore `aerich` workflow
In some cases, such as broken changes from upgrade of `aerich`, you can't run `aerich migrate` or `aerich upgrade`, you
can make the following steps:
1. drop `aerich` table.
2. delete `migrations/{app}` directory.
3. rerun `aerich init-db`.
Note that these actions is safe, also you can do that to reset your migrations if your migration files is too many.
## Use `aerich` in application
You can use `aerich` out of cli by use `Command` class.
```python
from aerich import Command
command = Command(tortoise_config=config, app='models')
await command.init()
await command.migrate('test')
```
## License
This project is licensed under the
[Apache-2.0](https://github.com/long2ice/aerich/blob/master/LICENSE) License.

View File

@@ -1,160 +0,0 @@
======
Aerich
======
.. image:: https://img.shields.io/pypi/v/aerich.svg?style=flat
:target: https://pypi.python.org/pypi/aerich
.. image:: https://img.shields.io/github/license/long2ice/aerich
:target: https://github.com/long2ice/aerich
.. image:: https://github.com/long2ice/aerich/workflows/pypi/badge.svg
:target: https://github.com/long2ice/aerich/actions?query=workflow:pypi
.. image:: https://github.com/long2ice/aerich/workflows/test/badge.svg
:target: https://github.com/long2ice/aerich/actions?query=workflow:test
Introduction
============
Tortoise-ORM is the best asyncio ORM now, but it lacks a database migrations tool like alembic for SQLAlchemy, or Django ORM with it's own migrations tool.
This project aim to be a best migrations tool for Tortoise-ORM and which written by one of contributors of Tortoise-ORM.
Install
=======
Just install from pypi:
.. code-block:: shell
$ pip install aerich
Quick Start
===========
.. code-block:: shell
$ aerich -h
Usage: aerich [OPTIONS] COMMAND [ARGS]...
Options:
-c, --config TEXT Config file. [default: aerich.ini]
--app TEXT Tortoise-ORM app name. [default: models]
-n, --name TEXT Name of section in .ini file to use for aerich config.
[default: aerich]
-h, --help Show this message and exit.
Commands:
downgrade Downgrade to previous version.
heads Show current available heads in migrate location.
history List all migrate items.
init Init config file and generate root migrate location.
init-db Generate schema and generate app migrate location.
migrate Generate migrate changes file.
upgrade Upgrade to latest version.
Usage
=====
You need add ``aerich.models`` to your ``Tortoise-ORM`` config first, example:
.. code-block:: python
TORTOISE_ORM = {
"connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"},
"apps": {
"models": {
"models": ["tests.models", "aerich.models"],
"default_connection": "default",
},
},
}
Initialization
--------------
.. code-block:: shell
$ aerich init -h
Usage: aerich init [OPTIONS]
Init config file and generate root migrate location.
Options:
-t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like settings.TORTOISE_ORM.
[required]
--location TEXT Migrate store location. [default: ./migrations]
-h, --help Show this message and exit.
Init config file and location:
.. code-block:: shell
$ aerich init -t tests.backends.mysql.TORTOISE_ORM
Success create migrate location ./migrations
Success generate config file aerich.ini
Init db
-------
.. code-block:: shell
$ aerich init-db
Success create app migrate location ./migrations/models
Success generate schema for app "models"
Update models and make migrate
------------------------------
.. code-block:: shell
$ aerich migrate --name drop_column
Success migrate 1_202029051520102929_drop_column.json
Format of migrate filename is ``{version_num}_{datetime}_{name|update}.json``
Upgrade to latest version
-------------------------
.. code-block:: shell
$ aerich upgrade
Success upgrade 1_202029051520102929_drop_column.json
Now your db is migrated to latest.
Downgrade to previous version
-----------------------------
.. code-block:: shell
$ aerich downgrade
Success downgrade 1_202029051520102929_drop_column.json
Now your db rollback to previous version.
Show history
------------
.. code-block:: shell
$ aerich history
1_202029051520102929_drop_column.json
Show heads to be migrated
-------------------------
.. code-block:: shell
$ aerich heads
1_202029051520102929_drop_column.json
License
=======
This project is licensed under the `MIT <https://github.com/long2ice/aerich/blob/master/LICENSE>`_ License.

274
README_RU.md Normal file
View File

@@ -0,0 +1,274 @@
# Aerich
[![image](https://img.shields.io/pypi/v/aerich.svg?style=flat)](https://pypi.python.org/pypi/aerich)
[![image](https://img.shields.io/github/license/tortoise/aerich)](https://github.com/tortoise/aerich)
[![image](https://github.com/tortoise/aerich/workflows/pypi/badge.svg)](https://github.com/tortoise/aerich/actions?query=workflow:pypi)
[![image](https://github.com/tortoise/aerich/workflows/ci/badge.svg)](https://github.com/tortoise/aerich/actions?query=workflow:ci)
[English](./README.md) | Русский
## Введение
Aerich - это инструмент для миграции базы данных для TortoiseORM, который аналогичен Alembic для SQLAlchemy или встроенному решению миграций в Django ORM.
## Установка
Просто установите из pypi:
```shell
pip install aerich
```
## Быстрый старт
```shell
> aerich -h
Usage: aerich [OPTIONS] COMMAND [ARGS]...
Options:
-V, --version Show the version and exit.
-c, --config TEXT Config file. [default: pyproject.toml]
--app TEXT Tortoise-ORM app name.
-h, --help Show this message and exit.
Commands:
downgrade Downgrade to specified version.
heads Show current available heads in migrate location.
history List all migrate items.
init Init config file and generate root migrate location.
init-db Generate schema and generate app migrate location.
inspectdb Introspects the database tables to standard output as...
migrate Generate migrate changes file.
upgrade Upgrade to specified version.
```
## Использование
Сначала вам нужно добавить aerich.models в конфигурацию вашего Tortoise-ORM. Пример:
```python
TORTOISE_ORM = {
"connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"},
"apps": {
"models": {
"models": ["tests.models", "aerich.models"],
"default_connection": "default",
},
},
}
```
### Инициализация
```shell
> aerich init -h
Usage: aerich init [OPTIONS]
Init config file and generate root migrate location.
Options:
-t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like
settings.TORTOISE_ORM. [required]
--location TEXT Migrate store location. [default: ./migrations]
-s, --src_folder TEXT Folder of the source, relative to the project root.
-h, --help Show this message and exit.
```
Инициализируйте файл конфигурации и задайте местоположение миграций:
```shell
> aerich init -t tests.backends.mysql.TORTOISE_ORM
Success create migrate location ./migrations
Success write config to pyproject.toml
```
### Инициализация базы данных
```shell
> aerich init-db
Success create app migrate location ./migrations/models
Success generate schema for app "models"
```
Если ваше приложение Tortoise-ORM не является приложением по умолчанию с именем models, вы должны указать правильное имя приложения с помощью параметра --app, например: aerich --app other_models init-db.
### Обновление моделей и создание миграции
```shell
> aerich migrate --name drop_column
Success migrate 1_202029051520102929_drop_column.py
```
Формат имени файла миграции следующий: `{версия}_{дата_и_время}_{имя|обновление}.py`.
Если aerich предполагает, что вы переименовываете столбец, он спросит:
Переименовать `{старый_столбец} в {новый_столбец} [True]`. Вы можете выбрать `True`,
чтобы переименовать столбец без удаления столбца, или выбрать `False`, чтобы удалить столбец,
а затем создать новый. Обратите внимание, что последний вариант может привести к потере данных.
### Обновление до последней версии
```shell
> aerich upgrade
Success upgrade 1_202029051520102929_drop_column.py
```
Теперь ваша база данных обновлена до последней версии.
### Откат до указанной версии
```shell
> aerich downgrade -h
Usage: aerich downgrade [OPTIONS]
Downgrade to specified version.
Options:
-v, --version INTEGER Specified version, default to last. [default: -1]
-d, --delete Delete version files at the same time. [default:
False]
--yes Confirm the action without prompting.
-h, --help Show this message and exit.
```
```shell
> aerich downgrade
Success downgrade 1_202029051520102929_drop_column.py
```
Теперь ваша база данных откатилась до указанной версии.
### Показать историю
```shell
> aerich history
1_202029051520102929_drop_column.py
```
### Чтобы узнать, какие миграции должны быть применены, можно использовать команду:
```shell
> aerich heads
1_202029051520102929_drop_column.py
```
### Осмотр таблиц базы данных для модели TortoiseORM
В настоящее время inspectdb поддерживает MySQL, Postgres и SQLite.
```shell
Usage: aerich inspectdb [OPTIONS]
Introspects the database tables to standard output as TortoiseORM model.
Options:
-t, --table TEXT Which tables to inspect.
-h, --help Show this message and exit.
```
Посмотреть все таблицы и вывести их на консоль:
```shell
aerich --app models inspectdb
```
Осмотреть указанную таблицу в приложении по умолчанию и перенаправить в models.py:
```shell
aerich inspectdb -t user > models.py
```
Например, ваша таблица выглядит следующим образом:
```sql
CREATE TABLE `test`
(
`id` int NOT NULL AUTO_INCREMENT,
`decimal` decimal(10, 2) NOT NULL,
`date` date DEFAULT NULL,
`datetime` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`time` time DEFAULT NULL,
`float` float DEFAULT NULL,
`string` varchar(200) COLLATE utf8mb4_general_ci DEFAULT NULL,
`tinyint` tinyint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `asyncmy_string_index` (`string`)
) ENGINE = InnoDB
DEFAULT CHARSET = utf8mb4
COLLATE = utf8mb4_general_ci
```
Теперь выполните команду aerich inspectdb -t test, чтобы увидеть сгенерированную модель:
```python
from tortoise import Model, fields
class Test(Model):
date = fields.DateField(null=True, )
datetime = fields.DatetimeField(auto_now=True, )
decimal = fields.DecimalField(max_digits=10, decimal_places=2, )
float = fields.FloatField(null=True, )
id = fields.IntField(pk=True, )
string = fields.CharField(max_length=200, null=True, )
time = fields.TimeField(null=True, )
tinyint = fields.BooleanField(null=True, )
```
Обратите внимание, что эта команда имеет ограничения и не может автоматически определить некоторые поля, такие как `IntEnumField`, `ForeignKeyField` и другие.
### Несколько баз данных
```python
tortoise_orm = {
"connections": {
"default": expand_db_url(db_url, True),
"second": expand_db_url(db_url_second, True),
},
"apps": {
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"},
"models_second": {"models": ["tests.models_second"], "default_connection": "second", },
},
}
```
Вам нужно указать `aerich.models` только в одном приложении и должны указывать `--app` при запуске команды `aerich migrate` и т.д.
## Восстановление рабочего процесса aerich
В некоторых случаях, например, при возникновении проблем после обновления `aerich`, вы не можете запустить `aerich migrate` или `aerich upgrade`. В таком случае вы можете выполнить следующие шаги:
1. удалите таблицы `aerich`.
2. удалите директорию `migrations/{app}`.
3. rerun `aerich init-db`.
Обратите внимание, что эти действия безопасны, и вы можете использовать их для сброса миграций, если у вас слишком много файлов миграции.
## Использование aerich в приложении
Вы можете использовать `aerich` вне командной строки, используя класс `Command`.
```python
from aerich import Command
command = Command(tortoise_config=config, app='models')
await command.init()
await command.migrate('test')
```
## Лицензия
Этот проект лицензирован в соответствии с лицензией
[Apache-2.0](https://github.com/long2ice/aerich/blob/master/LICENSE) Лицензия.

View File

@@ -1 +1,150 @@
__version__ = "0.1.4"
import os
from pathlib import Path
from typing import List
from tortoise import Tortoise, generate_schema_for_client
from tortoise.exceptions import OperationalError
from tortoise.transactions import in_transaction
from tortoise.utils import get_schema_sql
from aerich.exceptions import DowngradeError
from aerich.inspectdb.mysql import InspectMySQL
from aerich.inspectdb.postgres import InspectPostgres
from aerich.inspectdb.sqlite import InspectSQLite
from aerich.migrate import MIGRATE_TEMPLATE, Migrate
from aerich.models import Aerich
from aerich.utils import (
get_app_connection,
get_app_connection_name,
get_models_describe,
import_py_file,
)
class Command:
def __init__(
self,
tortoise_config: dict,
app: str = "models",
location: str = "./migrations",
):
self.tortoise_config = tortoise_config
self.app = app
self.location = location
Migrate.app = app
async def init(self):
await Migrate.init(self.tortoise_config, self.app, self.location)
async def _upgrade(self, conn, version_file):
file_path = Path(Migrate.migrate_location, version_file)
m = import_py_file(file_path)
upgrade = getattr(m, "upgrade")
await conn.execute_script(await upgrade(conn))
await Aerich.create(
version=version_file,
app=self.app,
content=get_models_describe(self.app),
)
async def upgrade(self, run_in_transaction: bool = True):
migrated = []
for version_file in Migrate.get_all_version_files():
try:
exists = await Aerich.exists(version=version_file, app=self.app)
except OperationalError:
exists = False
if not exists:
app_conn_name = get_app_connection_name(self.tortoise_config, self.app)
if run_in_transaction:
async with in_transaction(app_conn_name) as conn:
await self._upgrade(conn, version_file)
else:
app_conn = get_app_connection(self.tortoise_config, self.app)
await self._upgrade(app_conn, version_file)
migrated.append(version_file)
return migrated
async def downgrade(self, version: int, delete: bool):
ret = []
if version == -1:
specified_version = await Migrate.get_last_version()
else:
specified_version = await Aerich.filter(
app=self.app, version__startswith=f"{version}_"
).first()
if not specified_version:
raise DowngradeError("No specified version found")
if version == -1:
versions = [specified_version]
else:
versions = await Aerich.filter(app=self.app, pk__gte=specified_version.pk)
for version in versions:
file = version.version
async with in_transaction(
get_app_connection_name(self.tortoise_config, self.app)
) as conn:
file_path = Path(Migrate.migrate_location, file)
m = import_py_file(file_path)
downgrade = getattr(m, "downgrade")
downgrade_sql = await downgrade(conn)
if not downgrade_sql.strip():
raise DowngradeError("No downgrade items found")
await conn.execute_script(downgrade_sql)
await version.delete()
if delete:
os.unlink(file_path)
ret.append(file)
return ret
async def heads(self):
ret = []
versions = Migrate.get_all_version_files()
for version in versions:
if not await Aerich.exists(version=version, app=self.app):
ret.append(version)
return ret
async def history(self):
versions = Migrate.get_all_version_files()
return [version for version in versions]
async def inspectdb(self, tables: List[str] = None) -> str:
connection = get_app_connection(self.tortoise_config, self.app)
dialect = connection.schema_generator.DIALECT
if dialect == "mysql":
cls = InspectMySQL
elif dialect == "postgres":
cls = InspectPostgres
elif dialect == "sqlite":
cls = InspectSQLite
else:
raise NotImplementedError(f"{dialect} is not supported")
inspect = cls(connection, tables)
return await inspect.inspect()
async def migrate(self, name: str = "update"):
return await Migrate.migrate(name)
async def init_db(self, safe: bool):
location = self.location
app = self.app
dirname = Path(location, app)
dirname.mkdir(parents=True)
await Tortoise.init(config=self.tortoise_config)
connection = get_app_connection(self.tortoise_config, app)
await generate_schema_for_client(connection, safe)
schema = get_schema_sql(connection, safe)
version = await Migrate.generate_version()
await Aerich.create(
version=version,
app=app,
content=get_models_describe(app),
)
version_file = Path(dirname, version)
content = MIGRATE_TEMPLATE.format(upgrade_sql=schema, downgrade_sql="")
with open(version_file, "w", encoding="utf-8") as f:
f.write(content)

View File

@@ -1,150 +1,168 @@
import json
import asyncio
import os
import sys
from configparser import ConfigParser
from enum import Enum
from functools import wraps
from pathlib import Path
from typing import List
import asyncclick as click
from asyncclick import Context, UsageError
from tortoise import ConfigurationError, Tortoise, generate_schema_for_client
from tortoise.transactions import in_transaction
import click
import tomlkit
from click import Context, UsageError
from tomlkit.exceptions import NonExistentKey
from tortoise import Tortoise
from aerich.migrate import Migrate
from aerich.utils import get_app_connection, get_app_connection_name, get_tortoise_config
from aerich import Command
from aerich.enums import Color
from aerich.exceptions import DowngradeError
from aerich.utils import add_src_path, get_tortoise_config
from aerich.version import __version__
from . import __version__
from .models import Aerich
CONFIG_DEFAULT_VALUES = {
"src_folder": ".",
}
class Color(str, Enum):
green = "green"
red = "red"
yellow = "yellow"
def coro(f):
@wraps(f)
def wrapper(*args, **kwargs):
loop = asyncio.get_event_loop()
# Close db connections at the end of all but the cli group function
try:
loop.run_until_complete(f(*args, **kwargs))
finally:
if f.__name__ not in ["cli", "init"]:
loop.run_until_complete(Tortoise.close_connections())
parser = ConfigParser()
return wrapper
@click.group(context_settings={"help_option_names": ["-h", "--help"]})
@click.version_option(__version__)
@click.version_option(__version__, "-V", "--version")
@click.option(
"-c", "--config", default="aerich.ini", show_default=True, help="Config file.",
)
@click.option("--app", default="models", show_default=True, help="Tortoise-ORM app name.")
@click.option(
"-n",
"--name",
default="aerich",
"-c",
"--config",
default="pyproject.toml",
show_default=True,
help="Name of section in .ini file to use for aerich config.",
help="Config file.",
)
@click.option("--app", required=False, help="Tortoise-ORM app name.")
@click.pass_context
async def cli(ctx: Context, config, app, name):
@coro
async def cli(ctx: Context, config, app):
ctx.ensure_object(dict)
ctx.obj["config_file"] = config
ctx.obj["name"] = name
ctx.obj["app"] = app
invoked_subcommand = ctx.invoked_subcommand
if invoked_subcommand != "init":
if not os.path.exists(config):
config_path = Path(config)
if not config_path.exists():
raise UsageError("You must exec init first", ctx=ctx)
parser.read(config)
location = parser[name]["location"]
tortoise_orm = parser[name]["tortoise_orm"]
content = config_path.read_text()
doc = tomlkit.parse(content)
try:
tool = doc["tool"]["aerich"]
location = tool["location"]
tortoise_orm = tool["tortoise_orm"]
src_folder = tool.get("src_folder", CONFIG_DEFAULT_VALUES["src_folder"])
except NonExistentKey:
raise UsageError("You need run aerich init again when upgrade to 0.6.0+")
add_src_path(src_folder)
tortoise_config = get_tortoise_config(ctx, tortoise_orm)
ctx.obj["config"] = tortoise_config
ctx.obj["location"] = location
app = app or list(tortoise_config.get("apps").keys())[0]
command = Command(tortoise_config=tortoise_config, app=app, location=location)
ctx.obj["command"] = command
if invoked_subcommand != "init-db":
try:
await Migrate.init_with_old_models(tortoise_config, app, location)
except ConfigurationError:
raise UsageError(ctx=ctx, message="You must exec init-db first")
if not Path(location, app).exists():
raise UsageError("You must exec init-db first", ctx=ctx)
await command.init()
@cli.command(help="Generate migrate changes file.")
@click.option("--name", default="update", show_default=True, help="Migrate name.")
@click.pass_context
@coro
async def migrate(ctx: Context, name):
config = ctx.obj["config"]
location = ctx.obj["location"]
app = ctx.obj["app"]
ret = await Migrate.migrate(name)
command = ctx.obj["command"]
ret = await command.migrate(name)
if not ret:
return click.secho("No changes detected", fg=Color.yellow)
Migrate.write_old_models(config, app, location)
click.secho(f"Success migrate {ret}", fg=Color.green)
@cli.command(help="Upgrade to latest version.")
@cli.command(help="Upgrade to specified version.")
@click.option(
"--in-transaction",
"-i",
default=True,
type=bool,
help="Make migrations in transaction or not. Can be helpful for large migrations or creating concurrent indexes.",
)
@click.pass_context
async def upgrade(ctx: Context):
config = ctx.obj["config"]
app = ctx.obj["app"]
migrated = False
for version in Migrate.get_all_version_files():
if not await Aerich.exists(version=version, app=app):
async with in_transaction(get_app_connection_name(config, app)) as conn:
file_path = os.path.join(Migrate.migrate_location, version)
with open(file_path, "r") as f:
content = json.load(f)
upgrade_query_list = content.get("upgrade")
for upgrade_query in upgrade_query_list:
await conn.execute_query(upgrade_query)
await Aerich.create(version=version, app=app)
click.secho(f"Success upgrade {version}", fg=Color.green)
migrated = True
@coro
async def upgrade(ctx: Context, in_transaction: bool):
command = ctx.obj["command"]
migrated = await command.upgrade(run_in_transaction=in_transaction)
if not migrated:
click.secho("No migrate items", fg=Color.yellow)
click.secho("No upgrade items found", fg=Color.yellow)
else:
for version_file in migrated:
click.secho(f"Success upgrade {version_file}", fg=Color.green)
@cli.command(help="Downgrade to previous version.")
@cli.command(help="Downgrade to specified version.")
@click.option(
"-v",
"--version",
default=-1,
type=int,
show_default=True,
help="Specified version, default to last.",
)
@click.option(
"-d",
"--delete",
is_flag=True,
default=False,
show_default=True,
help="Delete version files at the same time.",
)
@click.pass_context
async def downgrade(ctx: Context):
app = ctx.obj["app"]
config = ctx.obj["config"]
last_version = await Migrate.get_last_version()
if not last_version:
return click.secho("No last version found", fg=Color.yellow)
file = last_version.version
async with in_transaction(get_app_connection_name(config, app)) as conn:
file_path = os.path.join(Migrate.migrate_location, file)
with open(file_path, "r") as f:
content = json.load(f)
downgrade_query_list = content.get("downgrade")
for downgrade_query in downgrade_query_list:
await conn.execute_query(downgrade_query)
await last_version.delete()
return click.secho(f"Success downgrade {file}", fg=Color.green)
@click.confirmation_option(
prompt="Downgrade is dangerous, which maybe lose your data, are you sure?",
)
@coro
async def downgrade(ctx: Context, version: int, delete: bool):
command = ctx.obj["command"]
try:
files = await command.downgrade(version, delete)
except DowngradeError as e:
return click.secho(str(e), fg=Color.yellow)
for file in files:
click.secho(f"Success downgrade {file}", fg=Color.green)
@cli.command(help="Show current available heads in migrate location.")
@click.pass_context
@coro
async def heads(ctx: Context):
app = ctx.obj["app"]
versions = Migrate.get_all_version_files()
is_heads = False
for version in versions:
if not await Aerich.exists(version=version, app=app):
click.secho(version, fg=Color.green)
is_heads = True
if not is_heads:
click.secho("No available heads,try migrate", fg=Color.green)
command = ctx.obj["command"]
head_list = await command.heads()
if not head_list:
return click.secho("No available heads, try migrate first", fg=Color.green)
for version in head_list:
click.secho(version, fg=Color.green)
@cli.command(help="List all migrate items.")
@click.pass_context
def history(ctx):
versions = Migrate.get_all_version_files()
@coro
async def history(ctx: Context):
command = ctx.obj["command"]
versions = await command.history()
if not versions:
return click.secho("No history, try migrate", fg=Color.green)
for version in versions:
click.secho(version, fg=Color.green)
if not versions:
click.secho("No history,try migrate", fg=Color.green)
@cli.command(help="Init config file and generate root migrate location.")
@@ -155,59 +173,97 @@ def history(ctx):
help="Tortoise-ORM config module dict variable, like settings.TORTOISE_ORM.",
)
@click.option(
"--location", default="./migrations", show_default=True, help="Migrate store location."
"--location",
default="./migrations",
show_default=True,
help="Migrate store location.",
)
@click.option(
"-s",
"--src_folder",
default=CONFIG_DEFAULT_VALUES["src_folder"],
show_default=False,
help="Folder of the source, relative to the project root.",
)
@click.pass_context
async def init(
ctx: Context, tortoise_orm, location,
):
@coro
async def init(ctx: Context, tortoise_orm, location, src_folder):
config_file = ctx.obj["config_file"]
name = ctx.obj["name"]
if os.path.exists(config_file):
return click.secho("You have inited", fg=Color.yellow)
parser.add_section(name)
parser.set(name, "tortoise_orm", tortoise_orm)
parser.set(name, "location", location)
if os.path.isabs(src_folder):
src_folder = os.path.relpath(os.getcwd(), src_folder)
# Add ./ so it's clear that this is relative path
if not src_folder.startswith("./"):
src_folder = "./" + src_folder
with open(config_file, "w") as f:
parser.write(f)
# check that we can find the configuration, if not we can fail before the config file gets created
add_src_path(src_folder)
get_tortoise_config(ctx, tortoise_orm)
config_path = Path(config_file)
if config_path.exists():
content = config_path.read_text()
doc = tomlkit.parse(content)
else:
doc = tomlkit.parse("[tool.aerich]")
table = tomlkit.table()
table["tortoise_orm"] = tortoise_orm
table["location"] = location
table["src_folder"] = src_folder
doc["tool"]["aerich"] = table
if not os.path.isdir(location):
os.mkdir(location)
config_path.write_text(tomlkit.dumps(doc))
Path(location).mkdir(parents=True, exist_ok=True)
click.secho(f"Success create migrate location {location}", fg=Color.green)
click.secho(f"Success generate config file {config_file}", fg=Color.green)
click.secho(f"Success write config to {config_file}", fg=Color.green)
@cli.command(help="Generate schema and generate app migrate location.")
@click.option(
"-s",
"--safe",
type=bool,
is_flag=True,
default=True,
help="When set to true, creates the table only when it does not already exist.",
show_default=True,
)
@click.pass_context
async def init_db(ctx: Context, safe):
config = ctx.obj["config"]
location = ctx.obj["location"]
app = ctx.obj["app"]
dirname = os.path.join(location, app)
if not os.path.isdir(dirname):
os.mkdir(dirname)
@coro
async def init_db(ctx: Context, safe: bool):
command = ctx.obj["command"]
app = command.app
dirname = Path(command.location, app)
try:
await command.init_db(safe)
click.secho(f"Success create app migrate location {dirname}", fg=Color.green)
click.secho(f'Success generate schema for app "{app}"', fg=Color.green)
except FileExistsError:
return click.secho(
f"Inited {app} already, or delete {dirname} and try again.", fg=Color.yellow
)
Migrate.write_old_models(config, app, location)
await Tortoise.init(config=config)
connection = get_app_connection(config, app)
await generate_schema_for_client(connection, safe)
return click.secho(f'Success generate schema for app "{app}"', fg=Color.green)
@cli.command(help="Introspects the database tables to standard output as TortoiseORM model.")
@click.option(
"-t",
"--table",
help="Which tables to inspect.",
multiple=True,
required=False,
)
@click.pass_context
@coro
async def inspectdb(ctx: Context, table: List[str]):
command = ctx.obj["command"]
ret = await command.inspectdb(table)
click.secho(ret)
def main():
sys.path.insert(0, ".")
cli(_anyio_backend="asyncio")
cli()
if __name__ == "__main__":
main()

31
aerich/coder.py Normal file
View File

@@ -0,0 +1,31 @@
import base64
import json
import pickle # nosec: B301,B403
from tortoise.indexes import Index
class JsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, Index):
return {
"type": "index",
"val": base64.b64encode(pickle.dumps(obj)).decode(), # nosec: B301
}
else:
return super().default(obj)
def object_hook(obj):
_type = obj.get("type")
if not _type:
return obj
return pickle.loads(base64.b64decode(obj["val"])) # nosec: B301
def encoder(obj: dict):
return json.dumps(obj, cls=JsonEncoder)
def decoder(obj: str):
return json.loads(obj, object_hook=object_hook)

View File

@@ -1,67 +1,97 @@
from enum import Enum
from typing import List, Type
from tortoise import BaseDBAsyncClient, ForeignKeyFieldInstance, ManyToManyFieldInstance, Model
from tortoise import BaseDBAsyncClient, Model
from tortoise.backends.base.schema_generator import BaseSchemaGenerator
from tortoise.fields import Field, JSONField, TextField, UUIDField
from aerich.utils import is_default_function
class BaseDDL:
schema_generator_cls: Type[BaseSchemaGenerator] = BaseSchemaGenerator
DIALECT = "sql"
_DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS {table_name}"
_ADD_COLUMN_TEMPLATE = "ALTER TABLE {table_name} ADD {column}"
_DROP_COLUMN_TEMPLATE = "ALTER TABLE {table_name} DROP COLUMN {column_name}"
_ADD_INDEX_TEMPLATE = (
"ALTER TABLE {table_name} ADD {unique} INDEX {index_name} ({column_names})"
_DROP_TABLE_TEMPLATE = 'DROP TABLE IF EXISTS "{table_name}"'
_ADD_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ADD {column}'
_DROP_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" DROP COLUMN "{column_name}"'
_ALTER_DEFAULT_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {default}'
_RENAME_COLUMN_TEMPLATE = (
'ALTER TABLE "{table_name}" RENAME COLUMN "{old_column_name}" TO "{new_column_name}"'
)
_DROP_INDEX_TEMPLATE = "ALTER TABLE {table_name} DROP INDEX {index_name}"
_ADD_FK_TEMPLATE = "ALTER TABLE {table_name} ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}"
_DROP_FK_TEMPLATE = "ALTER TABLE {table_name} DROP FOREIGN KEY {fk_name}"
_M2M_TABLE_TEMPLATE = "CREATE TABLE {table_name} ({backward_key} {backward_type} NOT NULL REFERENCES {backward_table} ({backward_field}) ON DELETE CASCADE,{forward_key} {forward_type} NOT NULL REFERENCES {forward_table} ({forward_field}) ON DELETE CASCADE){extra}{comment};"
_ADD_INDEX_TEMPLATE = (
'ALTER TABLE "{table_name}" ADD {unique}INDEX "{index_name}" ({column_names})'
)
_DROP_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" DROP INDEX "{index_name}"'
_ADD_FK_TEMPLATE = 'ALTER TABLE "{table_name}" ADD CONSTRAINT "{fk_name}" FOREIGN KEY ("{db_column}") REFERENCES "{table}" ("{field}") ON DELETE {on_delete}'
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP FOREIGN KEY "{fk_name}"'
_M2M_TABLE_TEMPLATE = (
'CREATE TABLE "{table_name}" (\n'
' "{backward_key}" {backward_type} NOT NULL REFERENCES "{backward_table}" ("{backward_field}") ON DELETE CASCADE,\n'
' "{forward_key}" {forward_type} NOT NULL REFERENCES "{forward_table}" ("{forward_field}") ON DELETE {on_delete}\n'
"){extra}{comment}"
)
_MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" MODIFY COLUMN {column}'
_CHANGE_COLUMN_TEMPLATE = (
'ALTER TABLE "{table_name}" CHANGE {old_column_name} {new_column_name} {new_column_type}'
)
_RENAME_TABLE_TEMPLATE = 'ALTER TABLE "{old_table_name}" RENAME TO "{new_table_name}"'
def __init__(self, client: "BaseDBAsyncClient"):
self.client = client
self.schema_generator = self.schema_generator_cls(client)
def create_table(self, model: "Type[Model]"):
return self.schema_generator._get_table_sql(model, True)["table_creation_string"]
return self.schema_generator._get_table_sql(model, True)["table_creation_string"].rstrip(
";"
)
def drop_table(self, model: "Type[Model]"):
return self._DROP_TABLE_TEMPLATE.format(table_name=model._meta.db_table)
def drop_table(self, table_name: str):
return self._DROP_TABLE_TEMPLATE.format(table_name=table_name)
def create_m2m_table(self, model: "Type[Model]", field: ManyToManyFieldInstance):
def create_m2m(
self, model: "Type[Model]", field_describe: dict, reference_table_describe: dict
):
through = field_describe.get("through")
description = field_describe.get("description")
reference_id = reference_table_describe.get("pk_field").get("db_column")
db_field_types = reference_table_describe.get("pk_field").get("db_field_types")
return self._M2M_TABLE_TEMPLATE.format(
table_name=field.through,
table_name=through,
backward_table=model._meta.db_table,
forward_table=field.related_model._meta.db_table,
forward_table=reference_table_describe.get("table"),
backward_field=model._meta.db_pk_column,
forward_field=field.related_model._meta.db_pk_column,
backward_key=field.backward_key,
forward_field=reference_id,
backward_key=field_describe.get("backward_key"),
backward_type=model._meta.pk.get_for_dialect(self.DIALECT, "SQL_TYPE"),
forward_key=field.forward_key,
forward_type=field.related_model._meta.pk.get_for_dialect(self.DIALECT, "SQL_TYPE"),
extra=self.schema_generator._table_generate_extra(table=field.through),
forward_key=field_describe.get("forward_key"),
forward_type=db_field_types.get(self.DIALECT) or db_field_types.get(""),
on_delete=field_describe.get("on_delete"),
extra=self.schema_generator._table_generate_extra(table=through),
comment=self.schema_generator._table_comment_generator(
table=field.through, comment=field.description
table=through, comment=description
)
if field.description
if description
else "",
)
def drop_m2m(self, field: ManyToManyFieldInstance):
return self._DROP_TABLE_TEMPLATE.format(table_name=field.through)
def drop_m2m(self, table_name: str):
return self._DROP_TABLE_TEMPLATE.format(table_name=table_name)
def add_column(self, model: "Type[Model]", field_object: Field):
def _get_default(self, model: "Type[Model]", field_describe: dict):
db_table = model._meta.db_table
default = field_object.default
db_column = field_object.model_field_name
auto_now_add = getattr(field_object, "auto_now_add", False)
auto_now = getattr(field_object, "auto_now", False)
default = field_describe.get("default")
if isinstance(default, Enum):
default = default.value
db_column = field_describe.get("db_column")
auto_now_add = field_describe.get("auto_now_add", False)
auto_now = field_describe.get("auto_now", False)
if default is not None or auto_now_add:
if callable(default) or isinstance(field_object, (UUIDField, TextField, JSONField)):
if field_describe.get("field_type") in [
"UUIDField",
"TextField",
"JSONField",
] or is_default_function(default):
default = ""
else:
default = field_object.to_db_value(default, model)
try:
default = self.schema_generator._column_default_generator(
db_table,
@@ -73,22 +103,32 @@ class BaseDDL:
except NotImplementedError:
default = ""
else:
default = None
return default
def add_column(self, model: "Type[Model]", field_describe: dict, is_pk: bool = False):
db_table = model._meta.db_table
description = field_describe.get("description")
db_column = field_describe.get("db_column")
db_field_types = field_describe.get("db_field_types")
default = self._get_default(model, field_describe)
if default is None:
default = ""
return self._ADD_COLUMN_TEMPLATE.format(
table_name=db_table,
column=self.schema_generator._create_string(
db_column=field_object.model_field_name,
field_type=field_object.get_for_dialect(self.DIALECT, "SQL_TYPE"),
nullable="NOT NULL" if not field_object.null else "",
unique="UNIQUE" if field_object.unique else "",
db_column=db_column,
field_type=db_field_types.get(self.DIALECT, db_field_types.get("")),
nullable="NOT NULL" if not field_describe.get("nullable") else "",
unique="UNIQUE" if field_describe.get("unique") else "",
comment=self.schema_generator._column_comment_generator(
table=db_table,
column=field_object.model_field_name,
comment=field_object.description,
column=db_column,
comment=field_describe.get("description"),
)
if field_object.description
if description
else "",
is_primary_key=field_object.pk,
is_primary_key=is_pk,
default=default,
),
)
@@ -98,14 +138,56 @@ class BaseDDL:
table_name=model._meta.db_table, column_name=column_name
)
def modify_column(self, model: "Type[Model]", field_describe: dict, is_pk: bool = False):
db_table = model._meta.db_table
db_field_types = field_describe.get("db_field_types")
default = self._get_default(model, field_describe)
if default is None:
default = ""
return self._MODIFY_COLUMN_TEMPLATE.format(
table_name=db_table,
column=self.schema_generator._create_string(
db_column=field_describe.get("db_column"),
field_type=db_field_types.get(self.DIALECT) or db_field_types.get(""),
nullable="NOT NULL" if not field_describe.get("nullable") else "",
unique="",
comment=self.schema_generator._column_comment_generator(
table=db_table,
column=field_describe.get("db_column"),
comment=field_describe.get("description"),
)
if field_describe.get("description")
else "",
is_primary_key=is_pk,
default=default,
),
)
def rename_column(self, model: "Type[Model]", old_column_name: str, new_column_name: str):
return self._RENAME_COLUMN_TEMPLATE.format(
table_name=model._meta.db_table,
old_column_name=old_column_name,
new_column_name=new_column_name,
)
def change_column(
self, model: "Type[Model]", old_column_name: str, new_column_name: str, new_column_type: str
):
return self._CHANGE_COLUMN_TEMPLATE.format(
table_name=model._meta.db_table,
old_column_name=old_column_name,
new_column_name=new_column_name,
new_column_type=new_column_type,
)
def add_index(self, model: "Type[Model]", field_names: List[str], unique=False):
return self._ADD_INDEX_TEMPLATE.format(
unique="UNIQUE" if unique else "",
unique="UNIQUE " if unique else "",
index_name=self.schema_generator._generate_index_name(
"idx" if not unique else "uid", model, field_names
),
table_name=model._meta.db_table,
column_names=", ".join([self.schema_generator.quote(f) for f in field_names]),
column_names=", ".join(self.schema_generator.quote(f) for f in field_names),
)
def drop_index(self, model: "Type[Model]", field_names: List[str], unique=False):
@@ -116,39 +198,61 @@ class BaseDDL:
table_name=model._meta.db_table,
)
def add_fk(self, model: "Type[Model]", field: ForeignKeyFieldInstance):
db_table = model._meta.db_table
to_field_name = field.to_field_instance.source_field
if not to_field_name:
to_field_name = field.to_field_instance.model_field_name
def drop_index_by_name(self, model: "Type[Model]", index_name: str):
return self._DROP_INDEX_TEMPLATE.format(
index_name=index_name,
table_name=model._meta.db_table,
)
db_column = field.source_field or field.model_field_name + "_id"
def add_fk(self, model: "Type[Model]", field_describe: dict, reference_table_describe: dict):
db_table = model._meta.db_table
db_column = field_describe.get("raw_field")
reference_id = reference_table_describe.get("pk_field").get("db_column")
fk_name = self.schema_generator._generate_fk_name(
from_table=db_table,
from_field=db_column,
to_table=field.related_model._meta.db_table,
to_field=to_field_name,
to_table=reference_table_describe.get("table"),
to_field=reference_table_describe.get("pk_field").get("db_column"),
)
return self._ADD_FK_TEMPLATE.format(
table_name=db_table,
fk_name=fk_name,
db_column=db_column,
table=field.related_model._meta.db_table,
field=to_field_name,
on_delete=field.on_delete,
table=reference_table_describe.get("table"),
field=reference_id,
on_delete=field_describe.get("on_delete"),
)
def drop_fk(self, model: "Type[Model]", field: ForeignKeyFieldInstance):
to_field_name = field.to_field_instance.source_field
if not to_field_name:
to_field_name = field.to_field_instance.model_field_name
def drop_fk(self, model: "Type[Model]", field_describe: dict, reference_table_describe: dict):
db_table = model._meta.db_table
return self._DROP_FK_TEMPLATE.format(
table_name=db_table,
fk_name=self.schema_generator._generate_fk_name(
from_table=db_table,
from_field=field.source_field or field.model_field_name + "_id",
to_table=field.related_model._meta.db_table,
to_field=to_field_name,
from_field=field_describe.get("raw_field"),
to_table=reference_table_describe.get("table"),
to_field=reference_table_describe.get("pk_field").get("db_column"),
),
)
def alter_column_default(self, model: "Type[Model]", field_describe: dict):
db_table = model._meta.db_table
default = self._get_default(model, field_describe)
return self._ALTER_DEFAULT_TEMPLATE.format(
table_name=db_table,
column=field_describe.get("db_column"),
default="SET" + default if default is not None else "DROP DEFAULT",
)
def alter_column_null(self, model: "Type[Model]", field_describe: dict):
return self.modify_column(model, field_describe)
def set_comment(self, model: "Type[Model]", field_describe: dict):
return self.modify_column(model, field_describe)
def rename_table(self, model: "Type[Model]", old_table_name: str, new_table_name: str):
db_table = model._meta.db_table
return self._RENAME_TABLE_TEMPLATE.format(
table_name=db_table, old_table_name=old_table_name, new_table_name=new_table_name
)

View File

@@ -6,3 +6,27 @@ from aerich.ddl import BaseDDL
class MysqlDDL(BaseDDL):
schema_generator_cls = MySQLSchemaGenerator
DIALECT = MySQLSchemaGenerator.DIALECT
_DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS `{table_name}`"
_ADD_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` ADD {column}"
_ALTER_DEFAULT_TEMPLATE = "ALTER TABLE `{table_name}` ALTER COLUMN `{column}` {default}"
_CHANGE_COLUMN_TEMPLATE = (
"ALTER TABLE `{table_name}` CHANGE {old_column_name} {new_column_name} {new_column_type}"
)
_DROP_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` DROP COLUMN `{column_name}`"
_RENAME_COLUMN_TEMPLATE = (
"ALTER TABLE `{table_name}` RENAME COLUMN `{old_column_name}` TO `{new_column_name}`"
)
_ADD_INDEX_TEMPLATE = (
"ALTER TABLE `{table_name}` ADD {unique}INDEX `{index_name}` ({column_names})"
)
_DROP_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` DROP INDEX `{index_name}`"
_ADD_FK_TEMPLATE = "ALTER TABLE `{table_name}` ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}"
_DROP_FK_TEMPLATE = "ALTER TABLE `{table_name}` DROP FOREIGN KEY `{fk_name}`"
_M2M_TABLE_TEMPLATE = (
"CREATE TABLE `{table_name}` (\n"
" `{backward_key}` {backward_type} NOT NULL REFERENCES `{backward_table}` (`{backward_field}`) ON DELETE CASCADE,\n"
" `{forward_key}` {forward_type} NOT NULL REFERENCES `{forward_table}` (`{forward_field}`) ON DELETE CASCADE\n"
"){extra}{comment}"
)
_MODIFY_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` MODIFY COLUMN {column}"
_RENAME_TABLE_TEMPLATE = "ALTER TABLE `{old_table_name}` RENAME TO `{new_table_name}`"

View File

@@ -1,3 +1,6 @@
from typing import Type
from tortoise import Model
from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator
from aerich.ddl import BaseDDL
@@ -6,3 +9,41 @@ from aerich.ddl import BaseDDL
class PostgresDDL(BaseDDL):
schema_generator_cls = AsyncpgSchemaGenerator
DIALECT = AsyncpgSchemaGenerator.DIALECT
_ADD_INDEX_TEMPLATE = 'CREATE {unique}INDEX "{index_name}" ON "{table_name}" ({column_names})'
_DROP_INDEX_TEMPLATE = 'DROP INDEX "{index_name}"'
_ALTER_NULL_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {set_drop} NOT NULL'
_MODIFY_COLUMN_TEMPLATE = (
'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" TYPE {datatype}{using}'
)
_SET_COMMENT_TEMPLATE = 'COMMENT ON COLUMN "{table_name}"."{column}" IS {comment}'
_DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP CONSTRAINT "{fk_name}"'
def alter_column_null(self, model: "Type[Model]", field_describe: dict):
db_table = model._meta.db_table
return self._ALTER_NULL_TEMPLATE.format(
table_name=db_table,
column=field_describe.get("db_column"),
set_drop="DROP" if field_describe.get("nullable") else "SET",
)
def modify_column(self, model: "Type[Model]", field_describe: dict, is_pk: bool = False):
db_table = model._meta.db_table
db_field_types = field_describe.get("db_field_types")
db_column = field_describe.get("db_column")
datatype = db_field_types.get(self.DIALECT) or db_field_types.get("")
return self._MODIFY_COLUMN_TEMPLATE.format(
table_name=db_table,
column=db_column,
datatype=datatype,
using=f' USING "{db_column}"::{datatype}',
)
def set_comment(self, model: "Type[Model]", field_describe: dict):
db_table = model._meta.db_table
return self._SET_COMMENT_TEMPLATE.format(
table_name=db_table,
column=field_describe.get("db_column") or field_describe.get("raw_field"),
comment="'{}'".format(field_describe.get("description"))
if field_describe.get("description")
else "NULL",
)

View File

@@ -1,8 +1,24 @@
from typing import Type
from tortoise import Model
from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator
from aerich.ddl import BaseDDL
from aerich.exceptions import NotSupportError
class SqliteDDL(BaseDDL):
schema_generator_cls = SqliteSchemaGenerator
DIALECT = SqliteSchemaGenerator.DIALECT
def modify_column(self, model: "Type[Model]", field_object: dict, is_pk: bool = True):
raise NotSupportError("Modify column is unsupported in SQLite.")
def alter_column_default(self, model: "Type[Model]", field_describe: dict):
raise NotSupportError("Alter column default is unsupported in SQLite.")
def alter_column_null(self, model: "Type[Model]", field_describe: dict):
raise NotSupportError("Alter column null is unsupported in SQLite.")
def set_comment(self, model: "Type[Model]", field_describe: dict):
raise NotSupportError("Alter column comment is unsupported in SQLite.")

7
aerich/enums.py Normal file
View File

@@ -0,0 +1,7 @@
from enum import Enum
class Color(str, Enum):
green = "green"
red = "red"
yellow = "yellow"

View File

@@ -1,6 +1,10 @@
class ConfigurationError(Exception):
class NotSupportError(Exception):
"""
config error
raise when features not support
"""
pass
class DowngradeError(Exception):
"""
raise when downgrade error
"""

View File

@@ -0,0 +1,168 @@
from typing import Any, List, Optional
from pydantic import BaseModel
from tortoise import BaseDBAsyncClient
class Column(BaseModel):
name: str
data_type: str
null: bool
default: Any
comment: Optional[str]
pk: bool
unique: bool
index: bool
length: Optional[int]
extra: Optional[str]
decimal_places: Optional[int]
max_digits: Optional[int]
def translate(self) -> dict:
comment = default = length = index = null = pk = ""
if self.pk:
pk = "pk=True, "
else:
if self.unique:
index = "unique=True, "
else:
if self.index:
index = "index=True, "
if self.data_type in ["varchar", "VARCHAR"]:
length = f"max_length={self.length}, "
if self.data_type in ["decimal", "numeric"]:
length_parts = []
if self.max_digits:
length_parts.append(f"max_digits={self.max_digits}")
if self.decimal_places:
length_parts.append(f"decimal_places={self.decimal_places}")
length = ", ".join(length_parts)
if self.null:
null = "null=True, "
if self.default is not None:
if self.data_type in ["tinyint", "INT"]:
default = f"default={'True' if self.default == '1' else 'False'}, "
elif self.data_type == "bool":
default = f"default={'True' if self.default == 'true' else 'False'}, "
elif self.data_type in ["datetime", "timestamptz", "TIMESTAMP"]:
if "CURRENT_TIMESTAMP" == self.default:
if "DEFAULT_GENERATED on update CURRENT_TIMESTAMP" == self.extra:
default = "auto_now=True, "
else:
default = "auto_now_add=True, "
else:
if "::" in self.default:
default = f"default={self.default.split('::')[0]}, "
elif self.default.endswith("()"):
default = ""
else:
default = f"default={self.default}, "
if self.comment:
comment = f"description='{self.comment}', "
return {
"name": self.name,
"pk": pk,
"index": index,
"null": null,
"default": default,
"length": length,
"comment": comment,
}
class Inspect:
_table_template = "class {table}(Model):\n"
def __init__(self, conn: BaseDBAsyncClient, tables: Optional[List[str]] = None):
self.conn = conn
try:
self.database = conn.database
except AttributeError:
pass
self.tables = tables
@property
def field_map(self) -> dict:
raise NotImplementedError
async def inspect(self) -> str:
if not self.tables:
self.tables = await self.get_all_tables()
result = "from tortoise import Model, fields\n\n\n"
tables = []
for table in self.tables:
columns = await self.get_columns(table)
fields = []
model = self._table_template.format(table=table.title().replace("_", ""))
for column in columns:
field = self.field_map[column.data_type](**column.translate())
fields.append(" " + field)
tables.append(model + "\n".join(fields))
return result + "\n\n\n".join(tables)
async def get_columns(self, table: str) -> List[Column]:
raise NotImplementedError
async def get_all_tables(self) -> List[str]:
raise NotImplementedError
@classmethod
def decimal_field(cls, **kwargs) -> str:
return "{name} = fields.DecimalField({pk}{index}{length}{null}{default}{comment})".format(
**kwargs
)
@classmethod
def time_field(cls, **kwargs) -> str:
return "{name} = fields.TimeField({null}{default}{comment})".format(**kwargs)
@classmethod
def date_field(cls, **kwargs) -> str:
return "{name} = fields.DateField({null}{default}{comment})".format(**kwargs)
@classmethod
def float_field(cls, **kwargs) -> str:
return "{name} = fields.FloatField({null}{default}{comment})".format(**kwargs)
@classmethod
def datetime_field(cls, **kwargs) -> str:
return "{name} = fields.DatetimeField({null}{default}{comment})".format(**kwargs)
@classmethod
def text_field(cls, **kwargs) -> str:
return "{name} = fields.TextField({null}{default}{comment})".format(**kwargs)
@classmethod
def char_field(cls, **kwargs) -> str:
return "{name} = fields.CharField({pk}{index}{length}{null}{default}{comment})".format(
**kwargs
)
@classmethod
def int_field(cls, **kwargs) -> str:
return "{name} = fields.IntField({pk}{index}{comment})".format(**kwargs)
@classmethod
def smallint_field(cls, **kwargs) -> str:
return "{name} = fields.SmallIntField({pk}{index}{comment})".format(**kwargs)
@classmethod
def bigint_field(cls, **kwargs) -> str:
return "{name} = fields.BigIntField({pk}{index}{default}{comment})".format(**kwargs)
@classmethod
def bool_field(cls, **kwargs) -> str:
return "{name} = fields.BooleanField({null}{default}{comment})".format(**kwargs)
@classmethod
def uuid_field(cls, **kwargs) -> str:
return "{name} = fields.UUIDField({pk}{index}{default}{comment})".format(**kwargs)
@classmethod
def json_field(cls, **kwargs) -> str:
return "{name} = fields.JSONField({null}{default}{comment})".format(**kwargs)
@classmethod
def binary_field(cls, **kwargs) -> str:
return "{name} = fields.BinaryField({null}{default}{comment})".format(**kwargs)

69
aerich/inspectdb/mysql.py Normal file
View File

@@ -0,0 +1,69 @@
from typing import List
from aerich.inspectdb import Column, Inspect
class InspectMySQL(Inspect):
@property
def field_map(self) -> dict:
return {
"int": self.int_field,
"smallint": self.smallint_field,
"tinyint": self.bool_field,
"bigint": self.bigint_field,
"varchar": self.char_field,
"longtext": self.text_field,
"text": self.text_field,
"datetime": self.datetime_field,
"float": self.float_field,
"date": self.date_field,
"time": self.time_field,
"decimal": self.decimal_field,
"json": self.json_field,
"longblob": self.binary_field,
}
async def get_all_tables(self) -> List[str]:
sql = "select TABLE_NAME from information_schema.TABLES where TABLE_SCHEMA=%s"
ret = await self.conn.execute_query_dict(sql, [self.database])
return list(map(lambda x: x["TABLE_NAME"], ret))
async def get_columns(self, table: str) -> List[Column]:
columns = []
sql = """select c.*, s.NON_UNIQUE, s.INDEX_NAME
from information_schema.COLUMNS c
left join information_schema.STATISTICS s on c.TABLE_NAME = s.TABLE_NAME
and c.TABLE_SCHEMA = s.TABLE_SCHEMA
and c.COLUMN_NAME = s.COLUMN_NAME
where c.TABLE_SCHEMA = %s
and c.TABLE_NAME = %s"""
ret = await self.conn.execute_query_dict(sql, [self.database, table])
for row in ret:
non_unique = row["NON_UNIQUE"]
if non_unique is None:
unique = False
else:
unique = not non_unique
index_name = row["INDEX_NAME"]
if index_name is None:
index = False
else:
index = row["INDEX_NAME"] != "PRIMARY"
columns.append(
Column(
name=row["COLUMN_NAME"],
data_type=row["DATA_TYPE"],
null=row["IS_NULLABLE"] == "YES",
default=row["COLUMN_DEFAULT"],
pk=row["COLUMN_KEY"] == "PRI",
comment=row["COLUMN_COMMENT"],
unique=row["COLUMN_KEY"] == "UNI",
extra=row["EXTRA"],
unque=unique,
index=index,
length=row["CHARACTER_MAXIMUM_LENGTH"],
max_digits=row["NUMERIC_PRECISION"],
decimal_places=row["NUMERIC_SCALE"],
)
)
return columns

View File

@@ -0,0 +1,76 @@
from typing import List, Optional
from tortoise import BaseDBAsyncClient
from aerich.inspectdb import Column, Inspect
class InspectPostgres(Inspect):
def __init__(self, conn: BaseDBAsyncClient, tables: Optional[List[str]] = None):
super().__init__(conn, tables)
self.schema = self.conn.server_settings.get("schema") or "public"
@property
def field_map(self) -> dict:
return {
"int4": self.int_field,
"int8": self.int_field,
"smallint": self.smallint_field,
"varchar": self.char_field,
"text": self.text_field,
"bigint": self.bigint_field,
"timestamptz": self.datetime_field,
"float4": self.float_field,
"float8": self.float_field,
"date": self.date_field,
"time": self.time_field,
"decimal": self.decimal_field,
"numeric": self.decimal_field,
"uuid": self.uuid_field,
"jsonb": self.json_field,
"bytea": self.binary_field,
"bool": self.bool_field,
"timestamp": self.datetime_field,
}
async def get_all_tables(self) -> List[str]:
sql = "select TABLE_NAME from information_schema.TABLES where table_catalog=$1 and table_schema=$2"
ret = await self.conn.execute_query_dict(sql, [self.database, self.schema])
return list(map(lambda x: x["table_name"], ret))
async def get_columns(self, table: str) -> List[Column]:
columns = []
sql = f"""select c.column_name,
col_description('public.{table}'::regclass, ordinal_position) as column_comment,
t.constraint_type as column_key,
udt_name as data_type,
is_nullable,
column_default,
character_maximum_length,
numeric_precision,
numeric_scale
from information_schema.constraint_column_usage const
join information_schema.table_constraints t
using (table_catalog, table_schema, table_name, constraint_catalog, constraint_schema, constraint_name)
right join information_schema.columns c using (column_name, table_catalog, table_schema, table_name)
where c.table_catalog = $1
and c.table_name = $2
and c.table_schema = $3"""
ret = await self.conn.execute_query_dict(sql, [self.database, table, self.schema])
for row in ret:
columns.append(
Column(
name=row["column_name"],
data_type=row["data_type"],
null=row["is_nullable"] == "YES",
default=row["column_default"],
length=row["character_maximum_length"],
max_digits=row["numeric_precision"],
decimal_places=row["numeric_scale"],
comment=row["column_comment"],
pk=row["column_key"] == "PRIMARY KEY",
unique=False, # can't get this simply
index=False, # can't get this simply
)
)
return columns

View File

@@ -0,0 +1,61 @@
from typing import List
from aerich.inspectdb import Column, Inspect
class InspectSQLite(Inspect):
@property
def field_map(self) -> dict:
return {
"INTEGER": self.int_field,
"INT": self.bool_field,
"SMALLINT": self.smallint_field,
"VARCHAR": self.char_field,
"TEXT": self.text_field,
"TIMESTAMP": self.datetime_field,
"REAL": self.float_field,
"BIGINT": self.bigint_field,
"DATE": self.date_field,
"TIME": self.time_field,
"JSON": self.json_field,
"BLOB": self.binary_field,
}
async def get_columns(self, table: str) -> List[Column]:
columns = []
sql = f"PRAGMA table_info({table})"
ret = await self.conn.execute_query_dict(sql)
columns_index = await self._get_columns_index(table)
for row in ret:
try:
length = row["type"].split("(")[1].split(")")[0]
except IndexError:
length = None
columns.append(
Column(
name=row["name"],
data_type=row["type"].split("(")[0],
null=row["notnull"] == 0,
default=row["dflt_value"],
length=length,
pk=row["pk"] == 1,
unique=columns_index.get(row["name"]) == "unique",
index=columns_index.get(row["name"]) == "index",
)
)
return columns
async def _get_columns_index(self, table: str):
sql = f"PRAGMA index_list ({table})"
indexes = await self.conn.execute_query_dict(sql)
ret = {}
for index in indexes:
sql = f"PRAGMA index_info({index['name']})"
index_info = (await self.conn.execute_query_dict(sql))[0]
ret[index_info["name"]] = "unique" if index["unique"] else "index"
return ret
async def get_all_tables(self) -> List[str]:
sql = "select tbl_name from sqlite_master where type='table' and name!='sqlite_sequence'"
ret = await self.conn.execute_query_dict(sql)
return list(map(lambda x: x["tbl_name"], ret))

View File

@@ -1,23 +1,32 @@
import json
import importlib
import os
import re
from copy import deepcopy
from datetime import datetime
from typing import Dict, List, Tuple, Type
from hashlib import md5
from pathlib import Path
from typing import Dict, List, Optional, Tuple, Type, Union
from tortoise import (
BackwardFKRelation,
BackwardOneToOneRelation,
ForeignKeyFieldInstance,
ManyToManyFieldInstance,
Model,
Tortoise,
)
from tortoise.fields import Field
import click
from dictdiffer import diff
from tortoise import BaseDBAsyncClient, Model, Tortoise
from tortoise.exceptions import OperationalError
from tortoise.indexes import Index
from aerich.ddl import BaseDDL
from aerich.models import Aerich
from aerich.utils import get_app_connection
from aerich.models import MAX_VERSION_LENGTH, Aerich
from aerich.utils import get_app_connection, get_models_describe, is_default_function
MIGRATE_TEMPLATE = """from tortoise import BaseDBAsyncClient
async def upgrade(db: BaseDBAsyncClient) -> str:
return \"\"\"
{upgrade_sql}\"\"\"
async def downgrade(db: BaseDBAsyncClient) -> str:
return \"\"\"
{downgrade_sql}\"\"\"
"""
class Migrate:
@@ -27,71 +36,97 @@ class Migrate:
_downgrade_fk_m2m_index_operators: List[str] = []
_upgrade_m2m: List[str] = []
_downgrade_m2m: List[str] = []
_aerich = Aerich.__name__
_rename_old = []
_rename_new = []
ddl: BaseDDL
migrate_config: dict
old_models = "old_models"
diff_app = "diff_models"
_last_version_content: Optional[dict] = None
app: str
migrate_location: str
@classmethod
def get_old_model_file(cls):
return cls.old_models + ".py"
migrate_location: Path
dialect: str
_db_version: Optional[str] = None
@classmethod
def get_all_version_files(cls) -> List[str]:
return sorted(filter(lambda x: x.endswith("json"), os.listdir(cls.migrate_location)))
return sorted(
filter(lambda x: x.endswith("py"), os.listdir(cls.migrate_location)),
key=lambda x: int(x.split("_")[0]),
)
@classmethod
async def get_last_version(cls) -> Aerich:
return await Aerich.filter(app=cls.app).first()
def _get_model(cls, model: str) -> Type[Model]:
return Tortoise.apps.get(cls.app).get(model)
@classmethod
async def init_with_old_models(cls, config: dict, app: str, location: str):
migrate_config = cls._get_migrate_config(config, app, location)
async def get_last_version(cls) -> Optional[Aerich]:
try:
return await Aerich.filter(app=cls.app).first()
except OperationalError:
pass
@classmethod
async def _get_db_version(cls, connection: BaseDBAsyncClient):
if cls.dialect == "mysql":
sql = "select version() as version"
ret = await connection.execute_query(sql)
cls._db_version = ret[1][0].get("version")
@classmethod
async def load_ddl_class(cls):
ddl_dialect_module = importlib.import_module(f"aerich.ddl.{cls.dialect}")
return getattr(ddl_dialect_module, f"{cls.dialect.capitalize()}DDL")
@classmethod
async def init(cls, config: dict, app: str, location: str):
await Tortoise.init(config=config)
last_version = await cls.get_last_version()
cls.app = app
cls.migrate_config = migrate_config
cls.migrate_location = os.path.join(location, app)
await Tortoise.init(config=migrate_config)
cls.migrate_location = Path(location, app)
if last_version:
cls._last_version_content = last_version.content
connection = get_app_connection(config, app)
if connection.schema_generator.DIALECT == "mysql":
from aerich.ddl.mysql import MysqlDDL
cls.ddl = MysqlDDL(connection)
elif connection.schema_generator.DIALECT == "sqlite":
from aerich.ddl.sqlite import SqliteDDL
cls.ddl = SqliteDDL(connection)
elif connection.schema_generator.DIALECT == "postgres":
from aerich.ddl.postgres import PostgresDDL
cls.ddl = PostgresDDL(connection)
else:
raise NotImplementedError("Current only support MySQL")
cls.dialect = connection.schema_generator.DIALECT
cls.ddl_class = await cls.load_ddl_class()
cls.ddl = cls.ddl_class(connection)
await cls._get_db_version(connection)
@classmethod
async def _get_last_version_num(cls):
last_version = await cls.get_last_version()
if not last_version:
return 0
return None
version = last_version.version
return version.split("_")[0]
return int(version.split("_", 1)[0])
@classmethod
async def _generate_diff_sql(cls, name):
now = datetime.now().strftime("%Y%M%D%H%M%S").replace("/", "")
async def generate_version(cls, name=None):
now = datetime.now().strftime("%Y%m%d%H%M%S").replace("/", "")
last_version_num = await cls._get_last_version_num()
version = f"{last_version_num + 1}_{now}_{name}.json"
content = {
"upgrade": cls.upgrade_operators,
"downgrade": cls.downgrade_operators,
}
with open(os.path.join(cls.migrate_location, version), "w") as f:
json.dump(content, f, indent=2, ensure_ascii=False)
if last_version_num is None:
return f"0_{now}_init.py"
version = f"{last_version_num + 1}_{now}_{name}.py"
if len(version) > MAX_VERSION_LENGTH:
raise ValueError(f"Version name exceeds maximum length ({MAX_VERSION_LENGTH})")
return version
@classmethod
async def _generate_diff_py(cls, name):
version = await cls.generate_version(name)
# delete if same version exists
for version_file in cls.get_all_version_files():
if version_file.startswith(version.split("_")[0]):
os.unlink(Path(cls.migrate_location, version_file))
version_file = Path(cls.migrate_location, version)
content = MIGRATE_TEMPLATE.format(
upgrade_sql=";\n ".join(cls.upgrade_operators) + ";",
downgrade_sql=";\n ".join(cls.downgrade_operators) + ";",
)
with open(version_file, "w", encoding="utf-8") as f:
f.write(content)
return version
@classmethod
@@ -101,95 +136,52 @@ class Migrate:
:param name:
:return:
"""
apps = Tortoise.apps
diff_models = apps.get(cls.diff_app)
app_models = apps.get(cls.app)
cls._diff_models(diff_models, app_models)
cls._diff_models(app_models, diff_models, False)
new_version_content = get_models_describe(cls.app)
cls.diff_models(cls._last_version_content, new_version_content)
cls.diff_models(new_version_content, cls._last_version_content, False)
cls._merge_operators()
if not cls.upgrade_operators:
return ""
return await cls._generate_diff_sql(name)
return await cls._generate_diff_py(name)
@classmethod
def _add_operator(cls, operator: str, upgrade=True, fk=False):
def _add_operator(cls, operator: str, upgrade=True, fk_m2m_index=False):
"""
add operator,differentiate fk because fk is order limit
:param operator:
:param upgrade:
:param fk_m2m:
:param fk_m2m_index:
:return:
"""
operator = operator.rstrip(";")
if upgrade:
if fk:
if fk_m2m_index:
cls._upgrade_fk_m2m_index_operators.append(operator)
else:
cls.upgrade_operators.append(operator)
else:
if fk:
if fk_m2m_index:
cls._downgrade_fk_m2m_index_operators.append(operator)
else:
cls.downgrade_operators.append(operator)
@classmethod
def cp_models(
cls, app: str, model_files: List[str], old_model_file,
):
"""
cp currents models to old_model_files
:param app:
:param model_files:
:param old_model_file:
:return:
"""
pattern = rf"(\n)?('|\")({app})(.\w+)('|\")"
for i, model_file in enumerate(model_files):
with open(model_file, "r") as f:
content = f.read()
ret = re.sub(pattern, rf"\2{cls.diff_app}\4\5", content)
mode = "w" if i == 0 else "a"
with open(old_model_file, mode) as f:
f.write(ret)
def _handle_indexes(cls, model: Type[Model], indexes: List[Union[Tuple[str], Index]]):
ret = []
for index in indexes:
if isinstance(index, Index):
index.__hash__ = lambda self: md5( # nosec: B303
self.index_name(cls.ddl.schema_generator, model).encode()
+ self.__class__.__name__.encode()
).hexdigest()
ret.append(index)
return ret
@classmethod
def _get_migrate_config(cls, config: dict, app: str, location: str):
"""
generate tmp config with old models
:param config:
:param app:
:param location:
:return:
"""
temp_config = deepcopy(config)
path = os.path.join(location, app, cls.old_models)
path = path.replace("/", ".").lstrip(".")
temp_config["apps"][cls.diff_app] = {"models": [path]}
return temp_config
@classmethod
def write_old_models(cls, config: dict, app: str, location: str):
"""
write new models to old models
:param config:
:param app:
:param location:
:return:
"""
old_model_files = []
models = config.get("apps").get(app).get("models")
for model in models:
old_model_files.append(model.replace(".", "/") + ".py")
cls.cp_models(app, old_model_files, os.path.join(location, app, cls.get_old_model_file()))
@classmethod
def _diff_models(
cls, old_models: Dict[str, Type[Model]], new_models: Dict[str, Type[Model]], upgrade=True
):
def diff_models(cls, old_models: Dict[str, dict], new_models: Dict[str, dict], upgrade=True):
"""
diff models and add operators
:param old_models:
@@ -197,177 +189,418 @@ class Migrate:
:param upgrade:
:return:
"""
for new_model_str, new_model in new_models.items():
_aerich = f"{cls.app}.{cls._aerich}"
old_models.pop(_aerich, None)
new_models.pop(_aerich, None)
for new_model_str, new_model_describe in new_models.items():
model = cls._get_model(new_model_describe.get("name").split(".")[1])
if new_model_str not in old_models.keys():
cls._add_operator(cls.add_model(new_model), upgrade)
if upgrade:
cls._add_operator(cls.add_model(model), upgrade)
else:
# we can't find origin model when downgrade, so skip
pass
else:
cls.diff_model(old_models.get(new_model_str), new_model, upgrade)
old_model_describe = old_models.get(new_model_str)
# rename table
new_table = new_model_describe.get("table")
old_table = old_model_describe.get("table")
if new_table != old_table:
cls._add_operator(cls.rename_table(model, old_table, new_table), upgrade)
old_unique_together = set(
map(lambda x: tuple(x), old_model_describe.get("unique_together"))
)
new_unique_together = set(
map(lambda x: tuple(x), new_model_describe.get("unique_together"))
)
old_indexes = set(
map(
lambda x: x if isinstance(x, Index) else tuple(x),
cls._handle_indexes(model, old_model_describe.get("indexes", [])),
)
)
new_indexes = set(
map(
lambda x: x if isinstance(x, Index) else tuple(x),
cls._handle_indexes(model, new_model_describe.get("indexes", [])),
)
)
old_pk_field = old_model_describe.get("pk_field")
new_pk_field = new_model_describe.get("pk_field")
# pk field
changes = diff(old_pk_field, new_pk_field)
for action, option, change in changes:
# current only support rename pk
if action == "change" and option == "name":
cls._add_operator(cls._rename_field(model, *change), upgrade)
# m2m fields
old_m2m_fields = old_model_describe.get("m2m_fields")
new_m2m_fields = new_model_describe.get("m2m_fields")
for action, option, change in diff(old_m2m_fields, new_m2m_fields):
if change[0][0] == "db_constraint":
continue
table = change[0][1].get("through")
if action == "add":
add = False
if upgrade and table not in cls._upgrade_m2m:
cls._upgrade_m2m.append(table)
add = True
elif not upgrade and table not in cls._downgrade_m2m:
cls._downgrade_m2m.append(table)
add = True
if add:
cls._add_operator(
cls.create_m2m(
model,
change[0][1],
new_models.get(change[0][1].get("model_name")),
),
upgrade,
fk_m2m_index=True,
)
elif action == "remove":
add = False
if upgrade and table not in cls._upgrade_m2m:
cls._upgrade_m2m.append(table)
add = True
elif not upgrade and table not in cls._downgrade_m2m:
cls._downgrade_m2m.append(table)
add = True
if add:
cls._add_operator(cls.drop_m2m(table), upgrade, True)
# add unique_together
for index in new_unique_together.difference(old_unique_together):
cls._add_operator(cls._add_index(model, index, True), upgrade, True)
# remove unique_together
for index in old_unique_together.difference(new_unique_together):
cls._add_operator(cls._drop_index(model, index, True), upgrade, True)
# add indexes
for index in new_indexes.difference(old_indexes):
cls._add_operator(cls._add_index(model, index, False), upgrade, True)
# remove indexes
for index in old_indexes.difference(new_indexes):
cls._add_operator(cls._drop_index(model, index, False), upgrade, True)
old_data_fields = list(
filter(
lambda x: x.get("db_field_types") is not None,
old_model_describe.get("data_fields"),
)
)
new_data_fields = list(
filter(
lambda x: x.get("db_field_types") is not None,
new_model_describe.get("data_fields"),
)
)
old_data_fields_name = list(map(lambda x: x.get("name"), old_data_fields))
new_data_fields_name = list(map(lambda x: x.get("name"), new_data_fields))
# add fields or rename fields
for new_data_field_name in set(new_data_fields_name).difference(
set(old_data_fields_name)
):
new_data_field = next(
filter(lambda x: x.get("name") == new_data_field_name, new_data_fields)
)
is_rename = False
for old_data_field in old_data_fields:
changes = list(diff(old_data_field, new_data_field))
old_data_field_name = old_data_field.get("name")
if len(changes) == 2:
# rename field
if (
changes[0]
== (
"change",
"name",
(old_data_field_name, new_data_field_name),
)
and changes[1]
== (
"change",
"db_column",
(
old_data_field.get("db_column"),
new_data_field.get("db_column"),
),
)
and old_data_field_name not in new_data_fields_name
):
if upgrade:
is_rename = click.prompt(
f"Rename {old_data_field_name} to {new_data_field_name}?",
default=True,
type=bool,
show_choices=True,
)
else:
is_rename = old_data_field_name in cls._rename_new
if is_rename:
cls._rename_new.append(new_data_field_name)
cls._rename_old.append(old_data_field_name)
# only MySQL8+ has rename syntax
if (
cls.dialect == "mysql"
and cls._db_version
and cls._db_version.startswith("5.")
):
cls._add_operator(
cls._change_field(
model, old_data_field, new_data_field
),
upgrade,
)
else:
cls._add_operator(
cls._rename_field(model, *changes[1][2]),
upgrade,
)
if not is_rename:
cls._add_operator(
cls._add_field(
model,
new_data_field,
),
upgrade,
)
if new_data_field["indexed"]:
cls._add_operator(
cls._add_index(
model, {new_data_field["db_column"]}, new_data_field["unique"]
),
upgrade,
True,
)
# remove fields
for old_data_field_name in set(old_data_fields_name).difference(
set(new_data_fields_name)
):
# don't remove field if is renamed
if (upgrade and old_data_field_name in cls._rename_old) or (
not upgrade and old_data_field_name in cls._rename_new
):
continue
old_data_field = next(
filter(lambda x: x.get("name") == old_data_field_name, old_data_fields)
)
db_column = old_data_field["db_column"]
cls._add_operator(
cls._remove_field(
model,
db_column,
),
upgrade,
)
if old_data_field["indexed"]:
cls._add_operator(
cls._drop_index(
model,
{db_column},
),
upgrade,
True,
)
old_fk_fields = old_model_describe.get("fk_fields")
new_fk_fields = new_model_describe.get("fk_fields")
old_fk_fields_name = list(map(lambda x: x.get("name"), old_fk_fields))
new_fk_fields_name = list(map(lambda x: x.get("name"), new_fk_fields))
# add fk
for new_fk_field_name in set(new_fk_fields_name).difference(
set(old_fk_fields_name)
):
fk_field = next(
filter(lambda x: x.get("name") == new_fk_field_name, new_fk_fields)
)
if fk_field.get("db_constraint"):
cls._add_operator(
cls._add_fk(
model, fk_field, new_models.get(fk_field.get("python_type"))
),
upgrade,
fk_m2m_index=True,
)
# drop fk
for old_fk_field_name in set(old_fk_fields_name).difference(
set(new_fk_fields_name)
):
old_fk_field = next(
filter(lambda x: x.get("name") == old_fk_field_name, old_fk_fields)
)
if old_fk_field.get("db_constraint"):
cls._add_operator(
cls._drop_fk(
model, old_fk_field, old_models.get(old_fk_field.get("python_type"))
),
upgrade,
fk_m2m_index=True,
)
# change fields
for field_name in set(new_data_fields_name).intersection(set(old_data_fields_name)):
old_data_field = next(
filter(lambda x: x.get("name") == field_name, old_data_fields)
)
new_data_field = next(
filter(lambda x: x.get("name") == field_name, new_data_fields)
)
changes = diff(old_data_field, new_data_field)
modified = False
for change in changes:
_, option, old_new = change
if option == "indexed":
# change index
unique = new_data_field.get("unique")
if old_new[0] is False and old_new[1] is True:
cls._add_operator(
cls._add_index(model, (field_name,), unique), upgrade, True
)
else:
cls._add_operator(
cls._drop_index(model, (field_name,), unique), upgrade, True
)
elif option == "db_field_types.":
if new_data_field.get("field_type") == "DecimalField":
# modify column
cls._add_operator(
cls._modify_field(model, new_data_field),
upgrade,
)
else:
continue
elif option == "default":
if not (
is_default_function(old_new[0]) or is_default_function(old_new[1])
):
# change column default
cls._add_operator(
cls._alter_default(model, new_data_field), upgrade
)
elif option == "unique":
# because indexed include it
continue
elif option == "nullable":
# change nullable
cls._add_operator(cls._alter_null(model, new_data_field), upgrade)
else:
if modified:
continue
# modify column
cls._add_operator(
cls._modify_field(model, new_data_field),
upgrade,
)
modified = True
for old_model in old_models:
if old_model not in new_models.keys():
cls._add_operator(cls.remove_model(old_models.get(old_model)), upgrade)
cls._add_operator(cls.drop_model(old_models.get(old_model).get("table")), upgrade)
@classmethod
def rename_table(cls, model: Type[Model], old_table_name: str, new_table_name: str):
return cls.ddl.rename_table(model, old_table_name, new_table_name)
@classmethod
def add_model(cls, model: Type[Model]):
return cls.ddl.create_table(model)
@classmethod
def remove_model(cls, model: Type[Model]):
return cls.ddl.drop_table(model)
def drop_model(cls, table_name: str):
return cls.ddl.drop_table(table_name)
@classmethod
def diff_model(cls, old_model: Type[Model], new_model: Type[Model], upgrade=True):
"""
diff single model
:param old_model:
:param new_model:
:param upgrade:
:return:
"""
old_indexes = old_model._meta.indexes
new_indexes = new_model._meta.indexes
def create_m2m(cls, model: Type[Model], field_describe: dict, reference_table_describe: dict):
return cls.ddl.create_m2m(model, field_describe, reference_table_describe)
old_unique_together = old_model._meta.unique_together
new_unique_together = new_model._meta.unique_together
old_fields_map = old_model._meta.fields_map
new_fields_map = new_model._meta.fields_map
old_keys = old_fields_map.keys()
new_keys = new_fields_map.keys()
for new_key in new_keys:
new_field = new_fields_map.get(new_key)
if cls._exclude_field(new_field, upgrade):
continue
if new_key not in old_keys:
cls._add_operator(
cls._add_field(new_model, new_field),
upgrade,
isinstance(new_field, (ForeignKeyFieldInstance, ManyToManyFieldInstance)),
)
else:
old_field = old_fields_map.get(new_key)
if (old_field.index and not new_field.index) or (
old_field.unique and not new_field.unique
):
cls._add_operator(
cls._remove_index(
old_model, (old_field.model_field_name,), old_field.unique
),
upgrade,
isinstance(old_field, (ForeignKeyFieldInstance, ManyToManyFieldInstance)),
)
elif (new_field.index and not old_field.index) or (
new_field.unique and not old_field.unique
):
cls._add_operator(
cls._add_index(new_model, (new_field.model_field_name,), new_field.unique),
upgrade,
isinstance(new_field, (ForeignKeyFieldInstance, ManyToManyFieldInstance)),
)
for old_key in old_keys:
field = old_fields_map.get(old_key)
if old_key not in new_keys and not cls._exclude_field(field, upgrade):
cls._add_operator(
cls._remove_field(old_model, field),
upgrade,
isinstance(field, (ForeignKeyFieldInstance, ManyToManyFieldInstance)),
)
for new_index in new_indexes:
if new_index not in old_indexes:
cls._add_operator(cls._add_index(new_model, new_index,), upgrade)
for old_index in old_indexes:
if old_index not in new_indexes:
cls._add_operator(cls._remove_index(old_model, old_index), upgrade)
for new_unique in new_unique_together:
if new_unique not in old_unique_together:
cls._add_operator(cls._add_index(new_model, new_unique, unique=True), upgrade)
for old_unique in old_unique_together:
if old_unique not in new_unique_together:
cls._add_operator(cls._remove_index(old_model, old_unique, unique=True), upgrade)
@classmethod
def drop_m2m(cls, table_name: str):
return cls.ddl.drop_m2m(table_name)
@classmethod
def _resolve_fk_fields_name(cls, model: Type[Model], fields_name: Tuple[str]):
ret = []
for field_name in fields_name:
if field_name in model._meta.fk_fields:
field = model._meta.fields_map[field_name]
if field.source_field:
ret.append(field.source_field)
elif field_name in model._meta.fk_fields:
ret.append(field_name + "_id")
else:
ret.append(field_name)
return ret
@classmethod
def _remove_index(cls, model: Type[Model], fields_name: Tuple[str], unique=False):
def _drop_index(cls, model: Type[Model], fields_name: Union[Tuple[str], Index], unique=False):
if isinstance(fields_name, Index):
return cls.ddl.drop_index_by_name(
model, fields_name.index_name(cls.ddl.schema_generator, model)
)
fields_name = cls._resolve_fk_fields_name(model, fields_name)
return cls.ddl.drop_index(model, fields_name, unique)
@classmethod
def _add_index(cls, model: Type[Model], fields_name: Tuple[str], unique=False):
def _add_index(cls, model: Type[Model], fields_name: Union[Tuple[str], Index], unique=False):
if isinstance(fields_name, Index):
return fields_name.get_sql(cls.ddl.schema_generator, model, False)
fields_name = cls._resolve_fk_fields_name(model, fields_name)
return cls.ddl.add_index(model, fields_name, unique)
@classmethod
def _exclude_field(cls, field: Field, upgrade=False):
"""
exclude BackwardFKRelation and repeat m2m field
:param field:
:return:
"""
if isinstance(field, ManyToManyFieldInstance):
through = field.through
if upgrade:
if through in cls._upgrade_m2m:
return True
else:
cls._upgrade_m2m.append(through)
return False
else:
if through in cls._downgrade_m2m:
return True
else:
cls._downgrade_m2m.append(through)
return False
return isinstance(field, (BackwardFKRelation, BackwardOneToOneRelation))
def _add_field(cls, model: Type[Model], field_describe: dict, is_pk: bool = False):
return cls.ddl.add_column(model, field_describe, is_pk)
@classmethod
def _add_field(cls, model: Type[Model], field: Field):
if isinstance(field, ForeignKeyFieldInstance):
return cls.ddl.add_fk(model, field)
if isinstance(field, ManyToManyFieldInstance):
return cls.ddl.create_m2m_table(model, field)
return cls.ddl.add_column(model, field)
def _alter_default(cls, model: Type[Model], field_describe: dict):
return cls.ddl.alter_column_default(model, field_describe)
@classmethod
def _remove_field(cls, model: Type[Model], field: Field):
if isinstance(field, ForeignKeyFieldInstance):
return cls.ddl.drop_fk(model, field)
if isinstance(field, ManyToManyFieldInstance):
return cls.ddl.drop_m2m(field)
return cls.ddl.drop_column(model, field.model_field_name)
def _alter_null(cls, model: Type[Model], field_describe: dict):
return cls.ddl.alter_column_null(model, field_describe)
@classmethod
def _add_fk(cls, model: Type[Model], field: ForeignKeyFieldInstance):
def _set_comment(cls, model: Type[Model], field_describe: dict):
return cls.ddl.set_comment(model, field_describe)
@classmethod
def _modify_field(cls, model: Type[Model], field_describe: dict):
return cls.ddl.modify_column(model, field_describe)
@classmethod
def _drop_fk(cls, model: Type[Model], field_describe: dict, reference_table_describe: dict):
return cls.ddl.drop_fk(model, field_describe, reference_table_describe)
@classmethod
def _remove_field(cls, model: Type[Model], column_name: str):
return cls.ddl.drop_column(model, column_name)
@classmethod
def _rename_field(cls, model: Type[Model], old_field_name: str, new_field_name: str):
return cls.ddl.rename_column(model, old_field_name, new_field_name)
@classmethod
def _change_field(cls, model: Type[Model], old_field_describe: dict, new_field_describe: dict):
db_field_types = new_field_describe.get("db_field_types")
return cls.ddl.change_column(
model,
old_field_describe.get("db_column"),
new_field_describe.get("db_column"),
db_field_types.get(cls.dialect) or db_field_types.get(""),
)
@classmethod
def _add_fk(cls, model: Type[Model], field_describe: dict, reference_table_describe: dict):
"""
add fk
:param model:
:param field:
:param field_describe:
:param reference_table_describe:
:return:
"""
return cls.ddl.add_fk(model, field)
@classmethod
def _remove_fk(cls, model: Type[Model], field: ForeignKeyFieldInstance):
"""
drop fk
:param model:
:param field:
:return:
"""
return cls.ddl.drop_fk(model, field)
return cls.ddl.add_fk(model, field_describe, reference_table_describe)
@classmethod
def _merge_operators(cls):

View File

@@ -1,9 +1,15 @@
from tortoise import Model, fields
from aerich.coder import decoder, encoder
MAX_VERSION_LENGTH = 255
MAX_APP_LENGTH = 100
class Aerich(Model):
version = fields.CharField(max_length=50)
app = fields.CharField(max_length=20)
version = fields.CharField(max_length=MAX_VERSION_LENGTH)
app = fields.CharField(max_length=MAX_APP_LENGTH)
content = fields.JSONField(encoder=encoder, decoder=decoder)
class Meta:
ordering = ["-id"]

View File

@@ -1,17 +1,44 @@
import importlib
import importlib.util
import os
import re
import sys
from pathlib import Path
from typing import Dict
from asyncclick import BadOptionUsage, Context
from click import BadOptionUsage, ClickException, Context
from tortoise import BaseDBAsyncClient, Tortoise
def get_app_connection_name(config, app) -> str:
def add_src_path(path: str) -> str:
"""
add a folder to the paths, so we can import from there
:param path: path to add
:return: absolute path
"""
if not os.path.isabs(path):
# use the absolute path, otherwise some other things (e.g. __file__) won't work properly
path = os.path.abspath(path)
if not os.path.isdir(path):
raise ClickException(f"Specified source folder does not exist: {path}")
if path not in sys.path:
sys.path.insert(0, path)
return path
def get_app_connection_name(config, app_name: str) -> str:
"""
get connection name
:param config:
:param app:
:param app_name:
:return:
"""
return config.get("apps").get(app).get("default_connection")
app = config.get("apps").get(app_name)
if app:
return app.get("default_connection", "default")
raise BadOptionUsage(
option_name="--app",
message=f'Can\'t get app named "{app_name}"',
)
def get_app_connection(config, app) -> BaseDBAsyncClient:
@@ -34,12 +61,11 @@ def get_tortoise_config(ctx: Context, tortoise_orm: str) -> dict:
splits = tortoise_orm.split(".")
config_path = ".".join(splits[:-1])
tortoise_config = splits[-1]
try:
config_module = importlib.import_module(config_path)
except (ModuleNotFoundError, AttributeError):
raise BadOptionUsage(
ctx=ctx, message=f'No config named "{config_path}"', option_name="--config"
)
except ModuleNotFoundError as e:
raise ClickException(f"Error while importing configuration module: {e}") from None
config = getattr(config_module, tortoise_config, None)
if not config:
@@ -49,3 +75,28 @@ def get_tortoise_config(ctx: Context, tortoise_orm: str) -> dict:
ctx=ctx,
)
return config
def get_models_describe(app: str) -> Dict:
"""
get app models describe
:param app:
:return:
"""
ret = {}
for model in Tortoise.apps.get(app).values():
describe = model.describe()
ret[describe.get("name")] = describe
return ret
def is_default_function(string: str):
return re.match(r"^<function.+>$", str(string or ""))
def import_py_file(file: Path):
module_name, file_ext = os.path.splitext(os.path.split(file)[-1])
spec = importlib.util.spec_from_file_location(module_name, file)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module

1
aerich/version.py Normal file
View File

@@ -0,0 +1 @@
__version__ = "0.7.2"

View File

@@ -1,11 +1,65 @@
import asyncio
import os
import pytest
from tortoise.contrib.test import finalizer, initializer
from tortoise import Tortoise, expand_db_url, generate_schema_for_client
from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator
from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator
from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator
from aerich.ddl.mysql import MysqlDDL
from aerich.ddl.postgres import PostgresDDL
from aerich.ddl.sqlite import SqliteDDL
from aerich.migrate import Migrate
db_url = os.getenv("TEST_DB", "sqlite://:memory:")
db_url_second = os.getenv("TEST_DB_SECOND", "sqlite://:memory:")
tortoise_orm = {
"connections": {
"default": expand_db_url(db_url, True),
"second": expand_db_url(db_url_second, True),
},
"apps": {
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"},
"models_second": {"models": ["tests.models_second"], "default_connection": "second"},
},
}
@pytest.fixture(scope="module", autouse=True)
def initialize_tests(request):
db_url = os.getenv("TEST_DB", "sqlite://:memory:")
initializer(["tests.models"], db_url=db_url)
request.addfinalizer(finalizer)
@pytest.fixture(scope="function", autouse=True)
def reset_migrate():
Migrate.upgrade_operators = []
Migrate.downgrade_operators = []
Migrate._upgrade_fk_m2m_index_operators = []
Migrate._downgrade_fk_m2m_index_operators = []
Migrate._upgrade_m2m = []
Migrate._downgrade_m2m = []
@pytest.fixture(scope="session")
def event_loop():
policy = asyncio.get_event_loop_policy()
res = policy.new_event_loop()
asyncio.set_event_loop(res)
res._close = res.close
res.close = lambda: None
yield res
res._close()
@pytest.fixture(scope="session", autouse=True)
async def initialize_tests(event_loop, request):
await Tortoise.init(config=tortoise_orm, _create_db=True)
await generate_schema_for_client(Tortoise.get_connection("default"), safe=True)
client = Tortoise.get_connection("default")
if client.schema_generator is MySQLSchemaGenerator:
Migrate.ddl = MysqlDDL(client)
elif client.schema_generator is SqliteSchemaGenerator:
Migrate.ddl = SqliteDDL(client)
elif client.schema_generator is AsyncpgSchemaGenerator:
Migrate.ddl = PostgresDDL(client)
Migrate.dialect = Migrate.ddl.DIALECT
request.addfinalizer(lambda: event_loop.run_until_complete(Tortoise._drop_databases()))

1606
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,32 +1,66 @@
[tool.poetry]
name = "aerich"
version = "0.1.1"
version = "0.7.2"
description = "A database migrations tool for Tortoise ORM."
authors = ["long2ice <long2ice@gmail.com>"]
license = "Apache-2.0"
readme = "README.md"
homepage = "https://github.com/tortoise/aerich"
repository = "https://github.com/tortoise/aerich.git"
documentation = "https://github.com/tortoise/aerich"
keywords = ["migrate", "Tortoise-ORM", "mysql"]
packages = [
{ include = "aerich" }
]
include = ["CHANGELOG.md", "LICENSE", "README.md"]
[tool.poetry.dependencies]
python = "^3.8"
tortoise-orm = {git = "https://github.com/long2ice/tortoise-orm.git", branch = "develop"}
asyncclick = "*"
python = "^3.7"
tortoise-orm = "*"
click = "*"
asyncpg = { version = "*", optional = true }
asyncmy = { version = "^0.2.8rc1", optional = true, allow-prereleases = true }
pydantic = "*"
dictdiffer = "*"
tomlkit = "*"
[tool.poetry.dev-dependencies]
taskipy = "*"
asynctest = "*"
flake8 = "*"
ruff = "*"
isort = "*"
black = "^19.10b0"
black = "*"
pytest = "*"
aiomysql = "*"
asyncpg = "*"
pytest-xdist = "*"
mypy = "*"
pytest-asyncio = "*"
bandit = "*"
pytest-mock = "*"
cryptography = "*"
[tool.taskipy.tasks]
export = "poetry export -f requirements.txt --without-hashes > requirements.txt"
export-dev = "poetry export -f requirements.txt --dev --without-hashes > requirements-dev.txt"
[tool.poetry.extras]
asyncmy = ["asyncmy"]
asyncpg = ["asyncpg"]
[tool.aerich]
tortoise_orm = "conftest.tortoise_orm"
location = "./migrations"
src_folder = "./."
[build-system]
requires = ["poetry>=0.12"]
build-backend = "poetry.masonry.api"
[tool.poetry.scripts]
aerich = "aerich.cli:main"
[tool.black]
line-length = 100
target-version = ['py36', 'py37', 'py38', 'py39']
[tool.pytest.ini_options]
asyncio_mode = 'auto'
[tool.mypy]
pretty = true
ignore_missing_imports = true
[tool.ruff]
ignore = ['E501']

View File

@@ -1,2 +0,0 @@
[pytest]
addopts = -p no:warnings --ignore=src

View File

@@ -1,48 +0,0 @@
aiomysql==0.0.20
aiosqlite==0.13.0
anyio==1.3.0
apipkg==1.5
appdirs==1.4.4
async-generator==1.10
asyncclick==7.0.9
asyncpg==0.20.1
asynctest==0.13.0
atomicwrites==1.4.0; sys_platform == "win32"
attrs==19.3.0
black==19.10b0
cffi==1.14.0
ciso8601==2.1.3; sys_platform != "win32" and implementation_name == "cpython"
click==7.1.2
colorama==0.4.3; sys_platform == "win32"
cryptography==2.9.2
execnet==1.7.1
flake8==3.8.1
iso8601==0.1.12; sys_platform == "win32" or implementation_name != "cpython"
isort==4.3.21
mccabe==0.6.1
more-itertools==8.3.0
mypy==0.770
mypy-extensions==0.4.3
packaging==20.4
pathspec==0.8.0
pluggy==0.13.1
py==1.8.1
pycodestyle==2.6.0
pycparser==2.20
pydantic==1.5.1
pyflakes==2.2.0
pymysql==0.9.2
pyparsing==2.4.7
pypika==0.37.6
pytest==5.4.2
pytest-forked==1.1.3
pytest-xdist==1.32.0
regex==2020.5.14
six==1.14.0
sniffio==1.1.0
taskipy==1.2.1
toml==0.10.1
-e git+https://github.com/long2ice/tortoise-orm.git@1f67b7a0ca1384365d6ff89d9e245e733166d1a6#egg=tortoise-orm
typed-ast==1.4.1
typing-extensions==3.7.4.2
wcwidth==0.1.9

View File

@@ -1,10 +0,0 @@
aiosqlite==0.13.0
anyio==1.3.0
async-generator==1.10
asyncclick==7.0.9
ciso8601==2.1.3; sys_platform != "win32" and implementation_name == "cpython"
iso8601==0.1.12; sys_platform == "win32" or implementation_name != "cpython"
pydantic==1.5.1
pypika==0.37.6
sniffio==1.1.0
typing-extensions==3.7.4.2

View File

@@ -1,47 +0,0 @@
[flake8]
max-line-length = 100
exclude =
ignore = E501,W503,DAR101,DAR201,DAR402
[darglint]
docstring_style=sphinx
[isort]
not_skip=__init__.py
multi_line_output=3
include_trailing_comma=True
force_grid_wrap=0
use_parentheses=True
line_length=100
[tool:pytest]
addopts = -n auto --tb=native -q
[mypy]
pretty = True
ignore_missing_imports = True
check_untyped_defs = True
disallow_subclassing_any = True
disallow_untyped_calls = True
disallow_untyped_defs = False
disallow_incomplete_defs = False
disallow_untyped_decorators = True
no_implicit_optional = True
warn_redundant_casts = True
warn_unused_ignores = True
warn_no_return = True
warn_return_any = False
warn_unused_configs = True
warn_unreachable = True
allow_redefinition = True
strict_equality = True
show_error_context = True
[mypy-tests.*]
check_untyped_defs = False
disallow_untyped_defs = False
disallow_incomplete_defs = False
warn_unreachable = False
[mypy-conftest]
disallow_untyped_defs = False

View File

@@ -1,44 +0,0 @@
import os
import re
from setuptools import find_packages, setup
def version():
ver_str_line = open('aerich/__init__.py', 'rt').read()
mob = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", ver_str_line, re.M)
if not mob:
raise RuntimeError("Unable to find version string")
return mob.group(1)
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as f:
long_description = f.read()
def requirements():
return open('requirements.txt', 'rt').read().splitlines()
setup(
name='aerich',
version=version(),
description='A database migrations tool for Tortoise-ORM.',
author='long2ice',
long_description_content_type='text/x-rst',
long_description=long_description,
author_email='long2ice@gmail.com',
url='https://github.com/long2ice/aerich',
license='MIT License',
packages=find_packages(include=['aerich*']),
include_package_data=True,
zip_safe=True,
entry_points={
'console_scripts': ['aerich = aerich.cli:main'],
},
platforms='any',
keywords=(
'migrate Tortoise-ORM mysql'
),
dependency_links=['https://github.com/long2ice/tortoise-orm.git@develop#egg=tortoise-orm'],
install_requires=requirements(),
)

View File

@@ -1,6 +0,0 @@
TORTOISE_ORM = {
"connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"},
"apps": {
"models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"}
},
}

View File

@@ -1,4 +1,5 @@
import datetime
import uuid
from enum import IntEnum
from tortoise import Model, fields
@@ -23,17 +24,29 @@ class Status(IntEnum):
class User(Model):
username = fields.CharField(max_length=20, unique=True)
password = fields.CharField(max_length=200)
password = fields.CharField(max_length=100)
last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now)
is_active = fields.BooleanField(default=True, description="Is Active")
is_superuser = fields.BooleanField(default=False, description="Is SuperUser")
avatar = fields.CharField(max_length=200, default="")
intro = fields.TextField(default="")
longitude = fields.DecimalField(max_digits=10, decimal_places=8)
class Email(Model):
email_id = fields.IntField(pk=True)
email = fields.CharField(max_length=200, index=True)
is_primary = fields.BooleanField(default=False)
address = fields.CharField(max_length=200)
users = fields.ManyToManyField("models.User")
def default_name():
return uuid.uuid4()
class Category(Model):
slug = fields.CharField(max_length=200)
name = fields.CharField(max_length=200)
slug = fields.CharField(max_length=100)
name = fields.CharField(max_length=200, null=True, default=default_name)
user = fields.ForeignKeyField("models.User", description="User")
created_at = fields.DatetimeField(auto_now_add=True)
@@ -41,17 +54,28 @@ class Category(Model):
class Product(Model):
categories = fields.ManyToManyField("models.Category")
name = fields.CharField(max_length=50)
view_num = fields.IntField(description="View Num")
view_num = fields.IntField(description="View Num", default=0)
sort = fields.IntField()
is_reviewed = fields.BooleanField(description="Is Reviewed")
type = fields.IntEnumField(ProductType, description="Product Type")
image = fields.CharField(max_length=200)
type = fields.IntEnumField(
ProductType, description="Product Type", source_field="type_db_alias"
)
pic = fields.CharField(max_length=200)
body = fields.TextField()
created_at = fields.DatetimeField(auto_now_add=True)
class Meta:
unique_together = (("name", "type"),)
indexes = (("name", "type"),)
class Config(Model):
label = fields.CharField(max_length=200)
key = fields.CharField(max_length=20)
value = fields.JSONField()
status: Status = fields.IntEnumField(Status, default=Status.on)
status: Status = fields.IntEnumField(Status)
user = fields.ForeignKeyField("models.User", description="User")
class NewModel(Model):
name = fields.CharField(max_length=50)

65
tests/models_second.py Normal file
View File

@@ -0,0 +1,65 @@
import datetime
from enum import IntEnum
from tortoise import Model, fields
class ProductType(IntEnum):
article = 1
page = 2
class PermissionAction(IntEnum):
create = 1
delete = 2
update = 3
read = 4
class Status(IntEnum):
on = 1
off = 0
class User(Model):
username = fields.CharField(max_length=20, unique=True)
password = fields.CharField(max_length=200)
last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now)
is_active = fields.BooleanField(default=True, description="Is Active")
is_superuser = fields.BooleanField(default=False, description="Is SuperUser")
avatar = fields.CharField(max_length=200, default="")
intro = fields.TextField(default="")
class Email(Model):
email = fields.CharField(max_length=200)
is_primary = fields.BooleanField(default=False)
user = fields.ForeignKeyField("models_second.User", db_constraint=False)
class Category(Model):
slug = fields.CharField(max_length=200)
name = fields.CharField(max_length=200)
user = fields.ForeignKeyField("models_second.User", description="User")
created_at = fields.DatetimeField(auto_now_add=True)
class Product(Model):
categories = fields.ManyToManyField("models_second.Category")
name = fields.CharField(max_length=50)
view_num = fields.IntField(description="View Num")
sort = fields.IntField()
is_reviewed = fields.BooleanField(description="Is Reviewed")
type = fields.IntEnumField(
ProductType, description="Product Type", source_field="type_db_alias"
)
image = fields.CharField(max_length=200)
body = fields.TextField()
created_at = fields.DatetimeField(auto_now_add=True)
class Config(Model):
label = fields.CharField(max_length=200)
key = fields.CharField(max_length=20)
value = fields.JSONField()
status: Status = fields.IntEnumField(Status, default=Status.on)

69
tests/old_models.py Normal file
View File

@@ -0,0 +1,69 @@
import datetime
from enum import IntEnum
from tortoise import Model, fields
class ProductType(IntEnum):
article = 1
page = 2
class PermissionAction(IntEnum):
create = 1
delete = 2
update = 3
read = 4
class Status(IntEnum):
on = 1
off = 0
class User(Model):
username = fields.CharField(max_length=20)
password = fields.CharField(max_length=200)
last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now)
is_active = fields.BooleanField(default=True, description="Is Active")
is_superuser = fields.BooleanField(default=False, description="Is SuperUser")
avatar = fields.CharField(max_length=200, default="")
intro = fields.TextField(default="")
longitude = fields.DecimalField(max_digits=12, decimal_places=9)
class Email(Model):
email = fields.CharField(max_length=200)
is_primary = fields.BooleanField(default=False)
user = fields.ForeignKeyField("models.User", db_constraint=False)
class Category(Model):
slug = fields.CharField(max_length=200)
name = fields.CharField(max_length=200)
user = fields.ForeignKeyField("models.User", description="User")
created_at = fields.DatetimeField(auto_now_add=True)
class Product(Model):
categories = fields.ManyToManyField("models.Category")
name = fields.CharField(max_length=50)
view_num = fields.IntField(description="View Num")
sort = fields.IntField()
is_reviewed = fields.BooleanField(description="Is Reviewed")
type = fields.IntEnumField(
ProductType, description="Product Type", source_field="type_db_alias"
)
image = fields.CharField(max_length=200)
body = fields.TextField()
created_at = fields.DatetimeField(auto_now_add=True)
class Config(Model):
label = fields.CharField(max_length=200)
key = fields.CharField(max_length=20)
value = fields.JSONField()
status: Status = fields.IntEnumField(Status, default=Status.on)
class Meta:
table = "configs"

View File

@@ -1,114 +1,206 @@
from tortoise import Tortoise
from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator
from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator
from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator
from tortoise.contrib import test
from aerich.ddl.mysql import MysqlDDL
from aerich.ddl.postgres import PostgresDDL
from aerich.ddl.sqlite import SqliteDDL
from tests.models import Category
from aerich.migrate import Migrate
from tests.models import Category, Product, User
class TestDDL(test.TruncationTestCase):
maxDiff = None
def setUp(self) -> None:
client = Tortoise.get_connection("models")
if client.schema_generator is MySQLSchemaGenerator:
self.ddl = MysqlDDL(client)
elif client.schema_generator is SqliteSchemaGenerator:
self.ddl = SqliteDDL(client)
elif client.schema_generator is AsyncpgSchemaGenerator:
self.ddl = PostgresDDL(client)
def test_create_table(self):
ret = self.ddl.create_table(Category)
if isinstance(self.ddl, MysqlDDL):
self.assertEqual(
ret,
"""CREATE TABLE IF NOT EXISTS `category` (
def test_create_table():
ret = Migrate.ddl.create_table(Category)
if isinstance(Migrate.ddl, MysqlDDL):
assert (
ret
== """CREATE TABLE IF NOT EXISTS `category` (
`id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,
`slug` VARCHAR(200) NOT NULL,
`name` VARCHAR(200) NOT NULL,
`slug` VARCHAR(100) NOT NULL,
`name` VARCHAR(200),
`created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6),
`user_id` INT NOT NULL COMMENT 'User',
CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE
) CHARACTER SET utf8mb4;""",
)
elif isinstance(self.ddl, SqliteDDL):
self.assertEqual(
ret,
"""CREATE TABLE IF NOT EXISTS "category" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
"slug" VARCHAR(200) NOT NULL,
"name" VARCHAR(200) NOT NULL,
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE /* User */
);""",
)
elif isinstance(self.ddl, PostgresDDL):
self.assertEqual(
ret,
"""CREATE TABLE IF NOT EXISTS "category" (
"id" SERIAL NOT NULL PRIMARY KEY,
"slug" VARCHAR(200) NOT NULL,
"name" VARCHAR(200) NOT NULL,
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE
);
COMMENT ON COLUMN "category"."user_id" IS 'User';""",
)
def test_drop_table(self):
ret = self.ddl.drop_table(Category)
self.assertEqual(ret, "DROP TABLE IF EXISTS category")
def test_add_column(self):
ret = self.ddl.add_column(Category, Category._meta.fields_map.get("name"))
if isinstance(self.ddl, MysqlDDL):
self.assertEqual(ret, "ALTER TABLE category ADD `name` VARCHAR(200) NOT NULL")
elif isinstance(self.ddl, PostgresDDL):
self.assertEqual(ret, 'ALTER TABLE category ADD "name" VARCHAR(200) NOT NULL')
elif isinstance(self.ddl, SqliteDDL):
self.assertEqual(ret, 'ALTER TABLE category ADD "name" VARCHAR(200) NOT NULL')
def test_drop_column(self):
ret = self.ddl.drop_column(Category, "name")
self.assertEqual(ret, "ALTER TABLE category DROP COLUMN name")
self.assertEqual(ret, "ALTER TABLE category DROP COLUMN name")
def test_add_index(self):
index = self.ddl.add_index(Category, ["name"])
index_u = self.ddl.add_index(Category, ["name"], True)
if isinstance(self.ddl, MysqlDDL):
self.assertEqual(
index, "ALTER TABLE category ADD INDEX idx_category_name_8b0cb9 (`name`)"
)
self.assertEqual(
index_u, "ALTER TABLE category ADD UNIQUE INDEX uid_category_name_8b0cb9 (`name`)"
)
elif isinstance(self.ddl, SqliteDDL):
self.assertEqual(
index_u, 'ALTER TABLE category ADD UNIQUE INDEX uid_category_name_8b0cb9 ("name")'
)
self.assertEqual(
index_u, 'ALTER TABLE category ADD UNIQUE INDEX uid_category_name_8b0cb9 ("name")'
)
def test_drop_index(self):
ret = self.ddl.drop_index(Category, ["name"])
self.assertEqual(ret, "ALTER TABLE category DROP INDEX idx_category_name_8b0cb9")
ret = self.ddl.drop_index(Category, ["name"], True)
self.assertEqual(ret, "ALTER TABLE category DROP INDEX uid_category_name_8b0cb9")
def test_add_fk(self):
ret = self.ddl.add_fk(Category, Category._meta.fields_map.get("user"))
self.assertEqual(
ret,
"ALTER TABLE category ADD CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE",
) CHARACTER SET utf8mb4"""
)
def test_drop_fk(self):
ret = self.ddl.drop_fk(Category, Category._meta.fields_map.get("user"))
self.assertEqual(ret, "ALTER TABLE category DROP FOREIGN KEY fk_category_user_e2e3874c")
elif isinstance(Migrate.ddl, SqliteDDL):
assert (
ret
== """CREATE TABLE IF NOT EXISTS "category" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
"slug" VARCHAR(100) NOT NULL,
"name" VARCHAR(200),
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE /* User */
)"""
)
elif isinstance(Migrate.ddl, PostgresDDL):
assert (
ret
== """CREATE TABLE IF NOT EXISTS "category" (
"id" SERIAL NOT NULL PRIMARY KEY,
"slug" VARCHAR(100) NOT NULL,
"name" VARCHAR(200),
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
"user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE
);
COMMENT ON COLUMN "category"."user_id" IS 'User'"""
)
def test_drop_table():
ret = Migrate.ddl.drop_table(Category._meta.db_table)
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "DROP TABLE IF EXISTS `category`"
else:
assert ret == 'DROP TABLE IF EXISTS "category"'
def test_add_column():
ret = Migrate.ddl.add_column(Category, Category._meta.fields_map.get("name").describe(False))
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` ADD `name` VARCHAR(200)"
else:
assert ret == 'ALTER TABLE "category" ADD "name" VARCHAR(200)'
def test_modify_column():
if isinstance(Migrate.ddl, SqliteDDL):
return
ret0 = Migrate.ddl.modify_column(
Category, Category._meta.fields_map.get("name").describe(False)
)
ret1 = Migrate.ddl.modify_column(User, User._meta.fields_map.get("is_active").describe(False))
if isinstance(Migrate.ddl, MysqlDDL):
assert ret0 == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)"
assert (
ret1
== "ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1"
)
elif isinstance(Migrate.ddl, PostgresDDL):
assert (
ret0
== 'ALTER TABLE "category" ALTER COLUMN "name" TYPE VARCHAR(200) USING "name"::VARCHAR(200)'
)
assert (
ret1 == 'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL'
)
def test_alter_column_default():
if isinstance(Migrate.ddl, SqliteDDL):
return
ret = Migrate.ddl.alter_column_default(User, User._meta.fields_map.get("intro").describe(False))
if isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'ALTER TABLE "user" ALTER COLUMN "intro" SET DEFAULT \'\''
elif isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `user` ALTER COLUMN `intro` SET DEFAULT ''"
ret = Migrate.ddl.alter_column_default(
Category, Category._meta.fields_map.get("created_at").describe(False)
)
if isinstance(Migrate.ddl, PostgresDDL):
assert (
ret == 'ALTER TABLE "category" ALTER COLUMN "created_at" SET DEFAULT CURRENT_TIMESTAMP'
)
elif isinstance(Migrate.ddl, MysqlDDL):
assert (
ret
== "ALTER TABLE `category` ALTER COLUMN `created_at` SET DEFAULT CURRENT_TIMESTAMP(6)"
)
ret = Migrate.ddl.alter_column_default(
Product, Product._meta.fields_map.get("view_num").describe(False)
)
if isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0'
elif isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0"
def test_alter_column_null():
if isinstance(Migrate.ddl, (SqliteDDL, MysqlDDL)):
return
ret = Migrate.ddl.alter_column_null(
Category, Category._meta.fields_map.get("name").describe(False)
)
if isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'ALTER TABLE "category" ALTER COLUMN "name" DROP NOT NULL'
def test_set_comment():
if isinstance(Migrate.ddl, (SqliteDDL, MysqlDDL)):
return
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("name").describe(False))
assert ret == 'COMMENT ON COLUMN "category"."name" IS NULL'
ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("user").describe(False))
assert ret == 'COMMENT ON COLUMN "category"."user_id" IS \'User\''
def test_drop_column():
ret = Migrate.ddl.drop_column(Category, "name")
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` DROP COLUMN `name`"
elif isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'ALTER TABLE "category" DROP COLUMN "name"'
def test_add_index():
index = Migrate.ddl.add_index(Category, ["name"])
index_u = Migrate.ddl.add_index(Category, ["name"], True)
if isinstance(Migrate.ddl, MysqlDDL):
assert index == "ALTER TABLE `category` ADD INDEX `idx_category_name_8b0cb9` (`name`)"
assert (
index_u == "ALTER TABLE `category` ADD UNIQUE INDEX `uid_category_name_8b0cb9` (`name`)"
)
elif isinstance(Migrate.ddl, PostgresDDL):
assert index == 'CREATE INDEX "idx_category_name_8b0cb9" ON "category" ("name")'
assert index_u == 'CREATE UNIQUE INDEX "uid_category_name_8b0cb9" ON "category" ("name")'
else:
assert index == 'ALTER TABLE "category" ADD INDEX "idx_category_name_8b0cb9" ("name")'
assert (
index_u == 'ALTER TABLE "category" ADD UNIQUE INDEX "uid_category_name_8b0cb9" ("name")'
)
def test_drop_index():
ret = Migrate.ddl.drop_index(Category, ["name"])
ret_u = Migrate.ddl.drop_index(Category, ["name"], True)
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` DROP INDEX `idx_category_name_8b0cb9`"
assert ret_u == "ALTER TABLE `category` DROP INDEX `uid_category_name_8b0cb9`"
elif isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'DROP INDEX "idx_category_name_8b0cb9"'
assert ret_u == 'DROP INDEX "uid_category_name_8b0cb9"'
else:
assert ret == 'ALTER TABLE "category" DROP INDEX "idx_category_name_8b0cb9"'
assert ret_u == 'ALTER TABLE "category" DROP INDEX "uid_category_name_8b0cb9"'
def test_add_fk():
ret = Migrate.ddl.add_fk(
Category, Category._meta.fields_map.get("user").describe(False), User.describe(False)
)
if isinstance(Migrate.ddl, MysqlDDL):
assert (
ret
== "ALTER TABLE `category` ADD CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE"
)
else:
assert (
ret
== 'ALTER TABLE "category" ADD CONSTRAINT "fk_category_user_e2e3874c" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE'
)
def test_drop_fk():
ret = Migrate.ddl.drop_fk(
Category, Category._meta.fields_map.get("user").describe(False), User.describe(False)
)
if isinstance(Migrate.ddl, MysqlDDL):
assert ret == "ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_e2e3874c`"
elif isinstance(Migrate.ddl, PostgresDDL):
assert ret == 'ALTER TABLE "category" DROP CONSTRAINT "fk_category_user_e2e3874c"'
else:
assert ret == 'ALTER TABLE "category" DROP FOREIGN KEY "fk_category_user_e2e3874c"'

968
tests/test_migrate.py Normal file
View File

@@ -0,0 +1,968 @@
import pytest
from pytest_mock import MockerFixture
from aerich.ddl.mysql import MysqlDDL
from aerich.ddl.postgres import PostgresDDL
from aerich.ddl.sqlite import SqliteDDL
from aerich.exceptions import NotSupportError
from aerich.migrate import Migrate
from aerich.utils import get_models_describe
old_models_describe = {
"models.Category": {
"name": "models.Category",
"app": "models",
"table": "category",
"abstract": False,
"description": None,
"docstring": None,
"unique_together": [],
"indexes": [],
"pk_field": {
"name": "id",
"field_type": "IntField",
"db_column": "id",
"python_type": "int",
"generated": True,
"nullable": False,
"unique": True,
"indexed": True,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": 1, "le": 2147483647},
"db_field_types": {"": "INT"},
},
"data_fields": [
{
"name": "slug",
"field_type": "CharField",
"db_column": "slug",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 200},
"db_field_types": {"": "VARCHAR(200)"},
},
{
"name": "name",
"field_type": "CharField",
"db_column": "name",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 200},
"db_field_types": {"": "VARCHAR(200)"},
},
{
"name": "created_at",
"field_type": "DatetimeField",
"db_column": "created_at",
"python_type": "datetime.datetime",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"readOnly": True},
"db_field_types": {
"": "TIMESTAMP",
"mysql": "DATETIME(6)",
"postgres": "TIMESTAMPTZ",
},
"auto_now_add": True,
"auto_now": False,
},
{
"name": "user_id",
"field_type": "IntField",
"db_column": "user_id",
"python_type": "int",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": "User",
"docstring": None,
"constraints": {"ge": 1, "le": 2147483647},
"db_field_types": {"": "INT"},
},
],
"fk_fields": [
{
"name": "user",
"field_type": "ForeignKeyFieldInstance",
"python_type": "models.User",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": "User",
"docstring": None,
"constraints": {},
"raw_field": "user_id",
"on_delete": "CASCADE",
}
],
"backward_fk_fields": [],
"o2o_fields": [],
"backward_o2o_fields": [],
"m2m_fields": [
{
"name": "products",
"field_type": "ManyToManyFieldInstance",
"python_type": "models.Product",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {},
"model_name": "models.Product",
"related_name": "categories",
"forward_key": "product_id",
"backward_key": "category_id",
"through": "product_category",
"on_delete": "CASCADE",
"_generated": True,
}
],
},
"models.Config": {
"name": "models.Config",
"app": "models",
"table": "configs",
"abstract": False,
"description": None,
"docstring": None,
"unique_together": [],
"indexes": [],
"pk_field": {
"name": "id",
"field_type": "IntField",
"db_column": "id",
"python_type": "int",
"generated": True,
"nullable": False,
"unique": True,
"indexed": True,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": 1, "le": 2147483647},
"db_field_types": {"": "INT"},
},
"data_fields": [
{
"name": "label",
"field_type": "CharField",
"db_column": "label",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 200},
"db_field_types": {"": "VARCHAR(200)"},
},
{
"name": "key",
"field_type": "CharField",
"db_column": "key",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 20},
"db_field_types": {"": "VARCHAR(20)"},
},
{
"name": "value",
"field_type": "JSONField",
"db_column": "value",
"python_type": "Union[dict, list]",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {},
"db_field_types": {"": "TEXT", "postgres": "JSONB"},
},
{
"name": "status",
"field_type": "IntEnumFieldInstance",
"db_column": "status",
"python_type": "int",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": 1,
"description": "on: 1\noff: 0",
"docstring": None,
"constraints": {"ge": -32768, "le": 32767},
"db_field_types": {"": "SMALLINT"},
},
],
"fk_fields": [],
"backward_fk_fields": [],
"o2o_fields": [],
"backward_o2o_fields": [],
"m2m_fields": [],
},
"models.Email": {
"name": "models.Email",
"app": "models",
"table": "email",
"abstract": False,
"description": None,
"docstring": None,
"unique_together": [],
"indexes": [],
"pk_field": {
"name": "id",
"field_type": "IntField",
"db_column": "id",
"python_type": "int",
"generated": True,
"nullable": False,
"unique": True,
"indexed": True,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": 1, "le": 2147483647},
"db_field_types": {"": "INT"},
},
"data_fields": [
{
"name": "email",
"field_type": "CharField",
"db_column": "email",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 200},
"db_field_types": {"": "VARCHAR(200)"},
},
{
"name": "is_primary",
"field_type": "BooleanField",
"db_column": "is_primary",
"python_type": "bool",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": False,
"description": None,
"docstring": None,
"constraints": {},
"db_field_types": {"": "BOOL", "sqlite": "INT"},
},
{
"name": "user_id",
"field_type": "IntField",
"db_column": "user_id",
"python_type": "int",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": 1, "le": 2147483647},
"db_field_types": {"": "INT"},
},
],
"fk_fields": [
{
"name": "user",
"field_type": "ForeignKeyFieldInstance",
"python_type": "models.User",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {},
"raw_field": "user_id",
"on_delete": "CASCADE",
}
],
"backward_fk_fields": [],
"o2o_fields": [],
"backward_o2o_fields": [],
"m2m_fields": [],
},
"models.Product": {
"name": "models.Product",
"app": "models",
"table": "product",
"abstract": False,
"description": None,
"docstring": None,
"unique_together": [],
"indexes": [],
"pk_field": {
"name": "id",
"field_type": "IntField",
"db_column": "id",
"python_type": "int",
"generated": True,
"nullable": False,
"unique": True,
"indexed": True,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": 1, "le": 2147483647},
"db_field_types": {"": "INT"},
},
"data_fields": [
{
"name": "name",
"field_type": "CharField",
"db_column": "name",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 50},
"db_field_types": {"": "VARCHAR(50)"},
},
{
"name": "view_num",
"field_type": "IntField",
"db_column": "view_num",
"python_type": "int",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": "View Num",
"docstring": None,
"constraints": {"ge": -2147483648, "le": 2147483647},
"db_field_types": {"": "INT"},
},
{
"name": "sort",
"field_type": "IntField",
"db_column": "sort",
"python_type": "int",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": -2147483648, "le": 2147483647},
"db_field_types": {"": "INT"},
},
{
"name": "is_reviewed",
"field_type": "BooleanField",
"db_column": "is_reviewed",
"python_type": "bool",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": "Is Reviewed",
"docstring": None,
"constraints": {},
"db_field_types": {"": "BOOL", "sqlite": "INT"},
},
{
"name": "type",
"field_type": "IntEnumFieldInstance",
"db_column": "type_db_alias",
"python_type": "int",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": "Product Type",
"docstring": None,
"constraints": {"ge": -32768, "le": 32767},
"db_field_types": {"": "SMALLINT"},
},
{
"name": "image",
"field_type": "CharField",
"db_column": "image",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 200},
"db_field_types": {"": "VARCHAR(200)"},
},
{
"name": "body",
"field_type": "TextField",
"db_column": "body",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {},
"db_field_types": {"": "TEXT", "mysql": "LONGTEXT"},
},
{
"name": "created_at",
"field_type": "DatetimeField",
"db_column": "created_at",
"python_type": "datetime.datetime",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"readOnly": True},
"db_field_types": {
"": "TIMESTAMP",
"mysql": "DATETIME(6)",
"postgres": "TIMESTAMPTZ",
},
"auto_now_add": True,
"auto_now": False,
},
],
"fk_fields": [],
"backward_fk_fields": [],
"o2o_fields": [],
"backward_o2o_fields": [],
"m2m_fields": [
{
"name": "categories",
"field_type": "ManyToManyFieldInstance",
"python_type": "models.Category",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {},
"model_name": "models.Category",
"related_name": "products",
"forward_key": "category_id",
"backward_key": "product_id",
"through": "product_category",
"on_delete": "CASCADE",
"_generated": False,
}
],
},
"models.User": {
"name": "models.User",
"app": "models",
"table": "user",
"abstract": False,
"description": None,
"docstring": None,
"unique_together": [],
"indexes": [],
"pk_field": {
"name": "id",
"field_type": "IntField",
"db_column": "id",
"python_type": "int",
"generated": True,
"nullable": False,
"unique": True,
"indexed": True,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": 1, "le": 2147483647},
"db_field_types": {"": "INT"},
},
"data_fields": [
{
"name": "username",
"field_type": "CharField",
"db_column": "username",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 20},
"db_field_types": {"": "VARCHAR(20)"},
},
{
"name": "password",
"field_type": "CharField",
"db_column": "password",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 200},
"db_field_types": {"": "VARCHAR(200)"},
},
{
"name": "last_login",
"field_type": "DatetimeField",
"db_column": "last_login",
"python_type": "datetime.datetime",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": "<function None.now>",
"description": "Last Login",
"docstring": None,
"constraints": {},
"db_field_types": {
"": "TIMESTAMP",
"mysql": "DATETIME(6)",
"postgres": "TIMESTAMPTZ",
},
"auto_now_add": False,
"auto_now": False,
},
{
"name": "is_active",
"field_type": "BooleanField",
"db_column": "is_active",
"python_type": "bool",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": True,
"description": "Is Active",
"docstring": None,
"constraints": {},
"db_field_types": {"": "BOOL", "sqlite": "INT"},
},
{
"name": "is_superuser",
"field_type": "BooleanField",
"db_column": "is_superuser",
"python_type": "bool",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": False,
"description": "Is SuperUser",
"docstring": None,
"constraints": {},
"db_field_types": {"": "BOOL", "sqlite": "INT"},
},
{
"name": "avatar",
"field_type": "CharField",
"db_column": "avatar",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": "",
"description": None,
"docstring": None,
"constraints": {"max_length": 200},
"db_field_types": {"": "VARCHAR(200)"},
},
{
"name": "intro",
"field_type": "TextField",
"db_column": "intro",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": "",
"description": None,
"docstring": None,
"constraints": {},
"db_field_types": {"": "TEXT", "mysql": "LONGTEXT"},
},
{
"name": "longitude",
"unique": False,
"default": None,
"indexed": False,
"nullable": False,
"db_column": "longitude",
"docstring": None,
"generated": False,
"field_type": "DecimalField",
"constraints": {},
"description": None,
"python_type": "decimal.Decimal",
"db_field_types": {"": "DECIMAL(12,9)", "sqlite": "VARCHAR(40)"},
},
],
"fk_fields": [],
"backward_fk_fields": [
{
"name": "categorys",
"field_type": "BackwardFKRelation",
"python_type": "models.Category",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": "User",
"docstring": None,
"constraints": {},
},
{
"name": "emails",
"field_type": "BackwardFKRelation",
"python_type": "models.Email",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {},
},
],
"o2o_fields": [],
"backward_o2o_fields": [],
"m2m_fields": [],
},
"models.Aerich": {
"name": "models.Aerich",
"app": "models",
"table": "aerich",
"abstract": False,
"description": None,
"docstring": None,
"unique_together": [],
"indexes": [],
"pk_field": {
"name": "id",
"field_type": "IntField",
"db_column": "id",
"python_type": "int",
"generated": True,
"nullable": False,
"unique": True,
"indexed": True,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": 1, "le": 2147483647},
"db_field_types": {"": "INT"},
},
"data_fields": [
{
"name": "version",
"field_type": "CharField",
"db_column": "version",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 255},
"db_field_types": {"": "VARCHAR(255)"},
},
{
"name": "app",
"field_type": "CharField",
"db_column": "app",
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"max_length": 20},
"db_field_types": {"": "VARCHAR(20)"},
},
{
"name": "content",
"field_type": "JSONField",
"db_column": "content",
"python_type": "Union[dict, list]",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {},
"db_field_types": {"": "TEXT", "postgres": "JSONB"},
},
],
"fk_fields": [],
"backward_fk_fields": [],
"o2o_fields": [],
"backward_o2o_fields": [],
"m2m_fields": [],
},
}
def test_migrate(mocker: MockerFixture):
"""
models.py diff with old_models.py
- change email pk: id -> email_id
- add field: Email.address
- add fk: Config.user
- drop fk: Email.user
- drop field: User.avatar
- add index: Email.email
- add many to many: Email.users
- remove unique: User.username
- change column: length User.password
- add unique_together: (name,type) of Product
- alter default: Config.status
- rename column: Product.image -> Product.pic
"""
mocker.patch("click.prompt", side_effect=(True,))
models_describe = get_models_describe("models")
Migrate.app = "models"
if isinstance(Migrate.ddl, SqliteDDL):
with pytest.raises(NotSupportError):
Migrate.diff_models(old_models_describe, models_describe)
Migrate.diff_models(models_describe, old_models_describe, False)
else:
Migrate.diff_models(old_models_describe, models_describe)
Migrate.diff_models(models_describe, old_models_describe, False)
Migrate._merge_operators()
if isinstance(Migrate.ddl, MysqlDDL):
expected_upgrade_operators = {
"ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)",
"ALTER TABLE `category` MODIFY COLUMN `slug` VARCHAR(100) NOT NULL",
"ALTER TABLE `config` ADD `user_id` INT NOT NULL COMMENT 'User'",
"ALTER TABLE `config` ADD CONSTRAINT `fk_config_user_17daa970` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE",
"ALTER TABLE `config` ALTER COLUMN `status` DROP DEFAULT",
"ALTER TABLE `config` MODIFY COLUMN `value` JSON NOT NULL",
"ALTER TABLE `email` ADD `address` VARCHAR(200) NOT NULL",
"ALTER TABLE `email` DROP COLUMN `user_id`",
"ALTER TABLE `configs` RENAME TO `config`",
"ALTER TABLE `product` RENAME COLUMN `image` TO `pic`",
"ALTER TABLE `email` RENAME COLUMN `id` TO `email_id`",
"ALTER TABLE `product` ADD INDEX `idx_product_name_869427` (`name`, `type_db_alias`)",
"ALTER TABLE `email` ADD INDEX `idx_email_email_4a1a33` (`email`)",
"ALTER TABLE `product` ADD UNIQUE INDEX `uid_product_name_869427` (`name`, `type_db_alias`)",
"ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0",
"ALTER TABLE `product` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
"ALTER TABLE `product` MODIFY COLUMN `is_reviewed` BOOL NOT NULL COMMENT 'Is Reviewed'",
"ALTER TABLE `user` DROP COLUMN `avatar`",
"ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(100) NOT NULL",
"ALTER TABLE `user` MODIFY COLUMN `intro` LONGTEXT NOT NULL",
"ALTER TABLE `user` MODIFY COLUMN `last_login` DATETIME(6) NOT NULL COMMENT 'Last Login'",
"ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1",
"ALTER TABLE `user` MODIFY COLUMN `is_superuser` BOOL NOT NULL COMMENT 'Is SuperUser' DEFAULT 0",
"ALTER TABLE `user` MODIFY COLUMN `longitude` DECIMAL(10,8) NOT NULL",
"ALTER TABLE `user` ADD UNIQUE INDEX `uid_user_usernam_9987ab` (`username`)",
"CREATE TABLE `email_user` (\n `email_id` INT NOT NULL REFERENCES `email` (`email_id`) ON DELETE CASCADE,\n `user_id` INT NOT NULL REFERENCES `user` (`id`) ON DELETE CASCADE\n) CHARACTER SET utf8mb4",
"CREATE TABLE IF NOT EXISTS `newmodel` (\n `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,\n `name` VARCHAR(50) NOT NULL\n) CHARACTER SET utf8mb4",
"ALTER TABLE `category` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
"ALTER TABLE `product` MODIFY COLUMN `body` LONGTEXT NOT NULL",
"ALTER TABLE `email` MODIFY COLUMN `is_primary` BOOL NOT NULL DEFAULT 0",
}
expected_downgrade_operators = {
"ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200) NOT NULL",
"ALTER TABLE `category` MODIFY COLUMN `slug` VARCHAR(200) NOT NULL",
"ALTER TABLE `config` DROP COLUMN `user_id`",
"ALTER TABLE `config` DROP FOREIGN KEY `fk_config_user_17daa970`",
"ALTER TABLE `config` ALTER COLUMN `status` SET DEFAULT 1",
"ALTER TABLE `email` ADD `user_id` INT NOT NULL",
"ALTER TABLE `email` DROP COLUMN `address`",
"ALTER TABLE `config` RENAME TO `configs`",
"ALTER TABLE `product` RENAME COLUMN `pic` TO `image`",
"ALTER TABLE `email` RENAME COLUMN `email_id` TO `id`",
"ALTER TABLE `product` DROP INDEX `idx_product_name_869427`",
"ALTER TABLE `email` DROP INDEX `idx_email_email_4a1a33`",
"ALTER TABLE `product` DROP INDEX `uid_product_name_869427`",
"ALTER TABLE `product` ALTER COLUMN `view_num` DROP DEFAULT",
"ALTER TABLE `user` ADD `avatar` VARCHAR(200) NOT NULL DEFAULT ''",
"ALTER TABLE `user` DROP INDEX `idx_user_usernam_9987ab`",
"ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(200) NOT NULL",
"DROP TABLE IF EXISTS `email_user`",
"DROP TABLE IF EXISTS `newmodel`",
"ALTER TABLE `user` MODIFY COLUMN `intro` LONGTEXT NOT NULL",
"ALTER TABLE `config` MODIFY COLUMN `value` TEXT NOT NULL",
"ALTER TABLE `category` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
"ALTER TABLE `product` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)",
"ALTER TABLE `product` MODIFY COLUMN `is_reviewed` BOOL NOT NULL COMMENT 'Is Reviewed'",
"ALTER TABLE `user` MODIFY COLUMN `last_login` DATETIME(6) NOT NULL COMMENT 'Last Login'",
"ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1",
"ALTER TABLE `user` MODIFY COLUMN `is_superuser` BOOL NOT NULL COMMENT 'Is SuperUser' DEFAULT 0",
"ALTER TABLE `user` MODIFY COLUMN `longitude` DECIMAL(12,9) NOT NULL",
"ALTER TABLE `product` MODIFY COLUMN `body` LONGTEXT NOT NULL",
"ALTER TABLE `email` MODIFY COLUMN `is_primary` BOOL NOT NULL DEFAULT 0",
}
assert not set(Migrate.upgrade_operators).symmetric_difference(expected_upgrade_operators)
assert not set(Migrate.downgrade_operators).symmetric_difference(
expected_downgrade_operators
)
elif isinstance(Migrate.ddl, PostgresDDL):
expected_upgrade_operators = {
'ALTER TABLE "category" ALTER COLUMN "name" DROP NOT NULL',
'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(100) USING "slug"::VARCHAR(100)',
'ALTER TABLE "category" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
'ALTER TABLE "config" ADD "user_id" INT NOT NULL',
'ALTER TABLE "config" ADD CONSTRAINT "fk_config_user_17daa970" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE',
'ALTER TABLE "config" ALTER COLUMN "status" DROP DEFAULT',
'ALTER TABLE "config" ALTER COLUMN "value" TYPE JSONB USING "value"::JSONB',
'ALTER TABLE "configs" RENAME TO "config"',
'ALTER TABLE "email" ADD "address" VARCHAR(200) NOT NULL',
'ALTER TABLE "email" DROP COLUMN "user_id"',
'ALTER TABLE "email" RENAME COLUMN "id" TO "email_id"',
'ALTER TABLE "email" ALTER COLUMN "is_primary" TYPE BOOL USING "is_primary"::BOOL',
'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0',
'ALTER TABLE "product" RENAME COLUMN "image" TO "pic"',
'ALTER TABLE "product" ALTER COLUMN "is_reviewed" TYPE BOOL USING "is_reviewed"::BOOL',
'ALTER TABLE "product" ALTER COLUMN "body" TYPE TEXT USING "body"::TEXT',
'ALTER TABLE "product" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(100) USING "password"::VARCHAR(100)',
'ALTER TABLE "user" DROP COLUMN "avatar"',
'ALTER TABLE "user" ALTER COLUMN "is_superuser" TYPE BOOL USING "is_superuser"::BOOL',
'ALTER TABLE "user" ALTER COLUMN "last_login" TYPE TIMESTAMPTZ USING "last_login"::TIMESTAMPTZ',
'ALTER TABLE "user" ALTER COLUMN "intro" TYPE TEXT USING "intro"::TEXT',
'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL',
'ALTER TABLE "user" ALTER COLUMN "longitude" TYPE DECIMAL(10,8) USING "longitude"::DECIMAL(10,8)',
'CREATE INDEX "idx_product_name_869427" ON "product" ("name", "type_db_alias")',
'CREATE INDEX "idx_email_email_4a1a33" ON "email" ("email")',
'CREATE TABLE "email_user" (\n "email_id" INT NOT NULL REFERENCES "email" ("email_id") ON DELETE CASCADE,\n "user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE\n)',
'CREATE TABLE IF NOT EXISTS "newmodel" (\n "id" SERIAL NOT NULL PRIMARY KEY,\n "name" VARCHAR(50) NOT NULL\n);\nCOMMENT ON COLUMN "config"."user_id" IS \'User\'',
'CREATE UNIQUE INDEX "uid_product_name_869427" ON "product" ("name", "type_db_alias")',
'CREATE UNIQUE INDEX "uid_user_usernam_9987ab" ON "user" ("username")',
}
expected_downgrade_operators = {
'ALTER TABLE "category" ALTER COLUMN "name" SET NOT NULL',
'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(200) USING "slug"::VARCHAR(200)',
'ALTER TABLE "category" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
'ALTER TABLE "config" ALTER COLUMN "status" SET DEFAULT 1',
'ALTER TABLE "config" DROP COLUMN "user_id"',
'ALTER TABLE "config" DROP CONSTRAINT "fk_config_user_17daa970"',
'ALTER TABLE "config" RENAME TO "configs"',
'ALTER TABLE "config" ALTER COLUMN "value" TYPE JSONB USING "value"::JSONB',
'ALTER TABLE "email" ADD "user_id" INT NOT NULL',
'ALTER TABLE "email" DROP COLUMN "address"',
'ALTER TABLE "email" RENAME COLUMN "email_id" TO "id"',
'ALTER TABLE "email" ALTER COLUMN "is_primary" TYPE BOOL USING "is_primary"::BOOL',
'ALTER TABLE "product" ALTER COLUMN "view_num" DROP DEFAULT',
'ALTER TABLE "product" RENAME COLUMN "pic" TO "image"',
'ALTER TABLE "user" ADD "avatar" VARCHAR(200) NOT NULL DEFAULT \'\'',
'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(200) USING "password"::VARCHAR(200)',
'ALTER TABLE "user" ALTER COLUMN "last_login" TYPE TIMESTAMPTZ USING "last_login"::TIMESTAMPTZ',
'ALTER TABLE "user" ALTER COLUMN "is_superuser" TYPE BOOL USING "is_superuser"::BOOL',
'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL',
'ALTER TABLE "user" ALTER COLUMN "intro" TYPE TEXT USING "intro"::TEXT',
'ALTER TABLE "user" ALTER COLUMN "longitude" TYPE DECIMAL(12,9) USING "longitude"::DECIMAL(12,9)',
'ALTER TABLE "product" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ',
'ALTER TABLE "product" ALTER COLUMN "is_reviewed" TYPE BOOL USING "is_reviewed"::BOOL',
'ALTER TABLE "product" ALTER COLUMN "body" TYPE TEXT USING "body"::TEXT',
'DROP INDEX "idx_product_name_869427"',
'DROP INDEX "idx_email_email_4a1a33"',
'DROP INDEX "idx_user_usernam_9987ab"',
'DROP INDEX "uid_product_name_869427"',
'DROP TABLE IF EXISTS "email_user"',
'DROP TABLE IF EXISTS "newmodel"',
}
assert not set(Migrate.upgrade_operators).symmetric_difference(expected_upgrade_operators)
assert not set(Migrate.downgrade_operators).symmetric_difference(
expected_downgrade_operators
)
elif isinstance(Migrate.ddl, SqliteDDL):
assert Migrate.upgrade_operators == []
assert Migrate.downgrade_operators == []
def test_sort_all_version_files(mocker):
mocker.patch(
"os.listdir",
return_value=[
"1_datetime_update.py",
"11_datetime_update.py",
"10_datetime_update.py",
"2_datetime_update.py",
],
)
Migrate.migrate_location = "."
assert Migrate.get_all_version_files() == [
"1_datetime_update.py",
"2_datetime_update.py",
"10_datetime_update.py",
"11_datetime_update.py",
]

6
tests/test_utils.py Normal file
View File

@@ -0,0 +1,6 @@
from aerich.utils import import_py_file
def test_import_py_file():
m = import_py_file("aerich/utils.py")
assert getattr(m, "import_py_file")