Compare commits
836 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
04c26acdd6 | ||
|
|
d0a15a8924 | ||
|
|
2215e2746b | ||
|
|
0e6bcbc030 | ||
|
|
232071f8f4 | ||
|
|
fc0fb31d43 | ||
|
|
1bd8cd803e | ||
|
|
ef57a58155 | ||
|
|
9680259904 | ||
|
|
49a4d23371 | ||
|
|
b9d370c885 | ||
|
|
e5a2714baf | ||
|
|
ff596fcb7e | ||
|
|
f0fad6df19 | ||
|
|
da173cf0e2 | ||
|
|
1669f0c5a4 | ||
|
|
b045925efe | ||
|
|
b3ce65453a | ||
|
|
50d891cb7b | ||
|
|
e31f9150d2 | ||
|
|
74ceb9703b | ||
|
|
58a3c6de03 | ||
|
|
86ad8d119d | ||
|
|
34d273015c | ||
|
|
7147043d63 | ||
|
|
b9b536133d | ||
|
|
8fd969aba9 | ||
|
|
f244207168 | ||
|
|
0620ac5641 | ||
|
|
3b9a167022 | ||
|
|
b479bb7c6b | ||
|
|
8ef771912d | ||
|
|
2d1c9afbb7 | ||
|
|
9ff5d8426c | ||
|
|
467e9c3ddf | ||
|
|
0d5e028c55 | ||
|
|
5858ea1bf0 | ||
|
|
1f220b4eaf | ||
|
|
97c99ca40d | ||
|
|
80a3b1c88c | ||
|
|
68447af127 | ||
|
|
d033e3b133 | ||
|
|
4428842e77 | ||
|
|
f38cc6edd3 | ||
|
|
aeb4f8f4da | ||
|
|
1b7c2085c9 | ||
|
|
48b979599f | ||
|
|
af3d3b7ee6 | ||
|
|
56fe126f3a | ||
|
|
04905d4b37 | ||
|
|
460df112f4 | ||
|
|
772096ec55 | ||
|
|
98d64f41c6 | ||
|
|
9a3bca8ab6 | ||
|
|
5781753cc8 | ||
|
|
fd3699a519 | ||
|
|
4f6a24411d | ||
|
|
de3888a48b | ||
|
|
700fe80a00 | ||
|
|
49e33b978d | ||
|
|
81197d6061 | ||
|
|
aa368be4d3 | ||
|
|
0f1fce4a7b | ||
|
|
cc591a634a | ||
|
|
6e332e782b | ||
|
|
0e9920b190 | ||
|
|
fd35df07c4 | ||
|
|
8f3d21c312 | ||
|
|
7b772e3a4a | ||
|
|
59438a4768 | ||
|
|
fe9f7f1f80 | ||
|
|
6b5231265c | ||
|
|
0014346de1 | ||
|
|
bc0e04d833 | ||
|
|
dd11911ed7 | ||
|
|
0b3feedf94 | ||
|
|
65b8cfc96a | ||
|
|
2e284b93b8 | ||
|
|
c28bb517cb | ||
|
|
aa3ff39ef8 | ||
|
|
149fb953e7 | ||
|
|
bf12621ce9 | ||
|
|
8a1a68ea7c | ||
|
|
eabb8f60f5 | ||
|
|
c00a378776 | ||
|
|
ee6ef1ff4b | ||
|
|
fb8f02d0c0 | ||
|
|
a025199294 | ||
|
|
87babaaa30 | ||
|
|
a4fff15491 | ||
|
|
a190dfe2c4 | ||
|
|
3926473917 | ||
|
|
9ffe0bcdee | ||
|
|
4fa3134294 | ||
|
|
92f6fce77d | ||
|
|
b1a2cf061d | ||
|
|
0a05c1f590 | ||
|
|
7dbc217768 | ||
|
|
bf411ab2ca | ||
|
|
277b827d4d | ||
|
|
e0bec881bc | ||
|
|
cc5e2ba054 | ||
|
|
904fcd1a0a | ||
|
|
2ec454447f | ||
|
|
ecd297e227 | ||
|
|
079ee3c191 | ||
|
|
f2638ecd02 | ||
|
|
ad6ff819fe | ||
|
|
48357640c6 | ||
|
|
e6c2169f76 | ||
|
|
1d17dc4663 | ||
|
|
eeac3bd2e6 | ||
|
|
3f5a15d236 | ||
|
|
91493a1e79 | ||
|
|
0c274908ec | ||
|
|
338c40b5d5 | ||
|
|
fc3ccf9606 | ||
|
|
746faceb5c | ||
|
|
8c3058d99b | ||
|
|
eb56fb9bda | ||
|
|
161493c0d2 | ||
|
|
cb9f329d11 | ||
|
|
03af784ebe | ||
|
|
e5f6e4584a | ||
|
|
79f9f223d0 | ||
|
|
0bc18cd6e1 | ||
|
|
30a3c6a5b7 | ||
|
|
90c5d83f84 | ||
|
|
d8b8ff6851 | ||
|
|
ee664f0c90 | ||
|
|
f8d371229e | ||
|
|
94a7e813b1 | ||
|
|
8ef7213426 | ||
|
|
2f4464ead5 | ||
|
|
89b93461ac | ||
|
|
9e40f3ae83 | ||
|
|
f4962fbc40 | ||
|
|
c9d53ca5d5 | ||
|
|
65f50fd713 | ||
|
|
bf1d04e399 | ||
|
|
5a8e5e5a40 | ||
|
|
f3919dd839 | ||
|
|
9f82a02ddf | ||
|
|
015a36c85f | ||
|
|
fbd3388a59 | ||
|
|
d8a52d68c5 | ||
|
|
4286708e2e | ||
|
|
e362d089e1 | ||
|
|
6b657886a5 | ||
|
|
eb16945147 | ||
|
|
38047ca992 | ||
|
|
c801e79d4b | ||
|
|
3fca3739de | ||
|
|
c218c8bb6c | ||
|
|
0bbc05995a | ||
|
|
3adb67901b | ||
|
|
d4350e7da4 | ||
|
|
4665658145 | ||
|
|
0d289fd5a1 | ||
|
|
aabc18755c | ||
|
|
1f2a5db016 | ||
|
|
ff40f66291 | ||
|
|
7f77084e0e | ||
|
|
aca4de728e | ||
|
|
9e7ca43cad | ||
|
|
7116dec74a | ||
|
|
a5302b870b | ||
|
|
604e9974b6 | ||
|
|
3e1c83f8fa | ||
|
|
e431e27cb2 | ||
|
|
4f188655d0 | ||
|
|
194b0cac88 | ||
|
|
7b4175fc5c | ||
|
|
adb5f74ddb | ||
|
|
107a1c34c8 | ||
|
|
dc7da5204f | ||
|
|
0301bca176 | ||
|
|
49f9bca23b | ||
|
|
31498bd7dd | ||
|
|
1698f398eb | ||
|
|
4275c2d7b7 | ||
|
|
22bff8566d | ||
|
|
d8657be320 | ||
|
|
3db9d58dac | ||
|
|
3fbe9c3cdd | ||
|
|
130e9c519c | ||
|
|
78c9e9745d | ||
|
|
38ebb5abf4 | ||
|
|
9b73be26ab | ||
|
|
fd0095b73f | ||
|
|
226049f66a | ||
|
|
dc1cf88ca6 | ||
|
|
f5f8b730b5 | ||
|
|
e8f6b42316 | ||
|
|
49b0d73654 | ||
|
|
394da67cf1 | ||
|
|
ef7da36ac6 | ||
|
|
1312100bc7 | ||
|
|
4085bc2152 | ||
|
|
f4d7e72426 | ||
|
|
ece63ad071 | ||
|
|
a9550b8243 | ||
|
|
43724e40b2 | ||
|
|
1bfa40e926 | ||
|
|
d493f71c4e | ||
|
|
87f4d1a323 | ||
|
|
0a0e6114f5 | ||
|
|
41d36fa3bf | ||
|
|
707923e3f5 | ||
|
|
d9b9581df2 | ||
|
|
463e7c66af | ||
|
|
2be28a22a7 | ||
|
|
d73f0bb1af | ||
|
|
ce74978b1e | ||
|
|
2b0157aecd | ||
|
|
f49baf5d90 | ||
|
|
7cc964c7d8 | ||
|
|
bc77322c2f | ||
|
|
8913a74a86 | ||
|
|
af35b25d15 | ||
|
|
476b07af6e | ||
|
|
e2b9a02531 | ||
|
|
6cc6229066 | ||
|
|
4c62a060f0 | ||
|
|
3d80637fa4 | ||
|
|
68be9fe979 | ||
|
|
547cd4a3ae | ||
|
|
ee2d50b2d1 | ||
|
|
15c3ddece8 | ||
|
|
beaa9744b7 | ||
|
|
8eb51790b5 | ||
|
|
aadc6262ed | ||
|
|
00ae6298d4 | ||
|
|
ad0669a326 | ||
|
|
85df76c623 | ||
|
|
87512246cb | ||
|
|
a3f9016ae9 | ||
|
|
4e58e9f8d1 | ||
|
|
7c533394fd | ||
|
|
333e014f13 | ||
|
|
c0c0efce18 | ||
|
|
beabaee345 | ||
|
|
c937af3919 | ||
|
|
aa4a6ae023 | ||
|
|
b57946ec98 | ||
|
|
1e110a2c41 | ||
|
|
b234aa48e4 | ||
|
|
8086576677 | ||
|
|
03e34299f0 | ||
|
|
421e3f324f | ||
|
|
a0b803959c | ||
|
|
ff4d57032a | ||
|
|
ba34589065 | ||
|
|
a4d11eef46 | ||
|
|
fda2e2b47a | ||
|
|
d287f480e5 | ||
|
|
d85f0e6226 | ||
|
|
cfb4943986 | ||
|
|
b453a96211 | ||
|
|
81f9b351b3 | ||
|
|
4bca3de42f | ||
|
|
235b1a3679 | ||
|
|
450658d7ac | ||
|
|
8e17e42e26 | ||
|
|
2d6a4c4b90 | ||
|
|
38703acc29 | ||
|
|
095217e797 | ||
|
|
86e965f854 | ||
|
|
57db68dc04 | ||
|
|
72de6d67c7 | ||
|
|
b2c3acd025 | ||
|
|
605de59bd0 | ||
|
|
e0565ddac5 | ||
|
|
18b68f1b80 | ||
|
|
ea88806630 | ||
|
|
412bed0f6d | ||
|
|
53cf26b9af | ||
|
|
d738462139 | ||
|
|
2fa48cd9e5 | ||
|
|
e64a7a9448 | ||
|
|
9490ad2bf7 | ||
|
|
84f3dce492 | ||
|
|
60c42dddd5 | ||
|
|
f93f9406ee | ||
|
|
705c55ce24 | ||
|
|
928770c43a | ||
|
|
59fbd505a0 | ||
|
|
1cc20c9770 | ||
|
|
f8f267a880 | ||
|
|
80ea1f6883 | ||
|
|
75ee282a3d | ||
|
|
4edad4601c | ||
|
|
152b51fd33 | ||
|
|
66a0fca4ad | ||
|
|
e7c7a66cd1 | ||
|
|
b3dbb87c3c | ||
|
|
3d45538998 | ||
|
|
8df9d3fef9 | ||
|
|
99e660c66d | ||
|
|
aa02f87b69 | ||
|
|
f0d1ee2cb4 | ||
|
|
ca4967311d | ||
|
|
65eb6ab611 | ||
|
|
1cb2f7814c | ||
|
|
b5485b16e6 | ||
|
|
62c8597a3b | ||
|
|
488604ff2e | ||
|
|
bd88a17b8e | ||
|
|
8e892dccfe | ||
|
|
c22eb34017 | ||
|
|
dcf3edb03e | ||
|
|
c85b59d3b5 | ||
|
|
1170de1e8e | ||
|
|
332bd767d4 | ||
|
|
0053b30237 | ||
|
|
d44533d956 | ||
|
|
12d8bd5a22 | ||
|
|
ae326678ec | ||
|
|
8d31f165c0 | ||
|
|
cfd4d6a161 | ||
|
|
329f030a41 | ||
|
|
68dc2925fb | ||
|
|
0d4e61d489 | ||
|
|
dc7b96a569 | ||
|
|
50882e5bb0 | ||
|
|
280a73af3b | ||
|
|
d8c0631dab | ||
|
|
9166ba91d7 | ||
|
|
6bc4e602bb | ||
|
|
45a7520fc3 | ||
|
|
64c0cace85 | ||
|
|
82af5e4a19 | ||
|
|
7e0ba1b335 | ||
|
|
44b7f792fe | ||
|
|
a3e432eb68 | ||
|
|
009f9a2b14 | ||
|
|
2ca905b6e5 | ||
|
|
3b099f936a | ||
|
|
4d6ddb070e | ||
|
|
b205314424 | ||
|
|
e83132f32c | ||
|
|
1b38309d70 | ||
|
|
6e8196d475 | ||
|
|
90fecc56dd | ||
|
|
d3d7f0e670 | ||
|
|
37ffeafeff | ||
|
|
abc159b7b9 | ||
|
|
648b28876d | ||
|
|
5b9f2bac87 | ||
|
|
17151f67c2 | ||
|
|
5f14d958ac | ||
|
|
bd6c52e025 | ||
|
|
cb77bb6b69 | ||
|
|
78b240b740 | ||
|
|
7e30f00178 | ||
|
|
35310dbc73 | ||
|
|
af82c07acc | ||
|
|
3f75f30f26 | ||
|
|
f7f0e10d4d | ||
|
|
091238a2cf | ||
|
|
0458ef869e | ||
|
|
0bf08db7b9 | ||
|
|
d3420918cd | ||
|
|
138e759161 | ||
|
|
f1d6ce7d12 | ||
|
|
ff749a7a0a | ||
|
|
bff78ca8dd | ||
|
|
81647d67a0 | ||
|
|
d8924ed892 | ||
|
|
799cdafae6 | ||
|
|
bc0c55e49a | ||
|
|
c61c6a8525 | ||
|
|
3e764d068c | ||
|
|
ac25f4b98b | ||
|
|
aa6ff8c84a | ||
|
|
37ca79e9c5 | ||
|
|
6040b4b494 | ||
|
|
51ea3e3c6f | ||
|
|
5a16dda50d | ||
|
|
bbfa978861 | ||
|
|
54ca7bf09f | ||
|
|
8bf5370b6c | ||
|
|
ecefa05e03 | ||
|
|
e013494fb2 | ||
|
|
4853f74dbf | ||
|
|
6f45ee6813 | ||
|
|
c60ed32f3a | ||
|
|
178851589d | ||
|
|
5bcc679194 | ||
|
|
1e17b5ac66 | ||
|
|
19f12f3f2f | ||
|
|
71e8d9a490 | ||
|
|
e3cd553f82 | ||
|
|
b61c8cd104 | ||
|
|
8f288fe458 | ||
|
|
02a920feea | ||
|
|
be2c4f2b3c | ||
|
|
7ac74b1c1f | ||
|
|
933cb1d5c7 | ||
|
|
6203e30152 | ||
|
|
7d94af0e31 | ||
|
|
564a2b5f1e | ||
|
|
1dbe7a3163 | ||
|
|
47f8a126ca | ||
|
|
693195f70b | ||
|
|
2267b7e7d7 | ||
|
|
a06e605e67 | ||
|
|
47c67ecc99 | ||
|
|
4c4b7cbeae | ||
|
|
ddececbfea | ||
|
|
71a6f3d1a4 | ||
|
|
e86cf962e9 | ||
|
|
99a58d5c91 | ||
|
|
eecbb5ca90 | ||
|
|
fbb3bf869c | ||
|
|
b887ea9623 | ||
|
|
c68e3e1238 | ||
|
|
c5080e4030 | ||
|
|
0d01365751 | ||
|
|
f4a06ad65d | ||
|
|
05a22d5a54 | ||
|
|
2424ece0c5 | ||
|
|
2d02551d0a | ||
|
|
ac416aeeb3 | ||
|
|
d09af430e8 | ||
|
|
79454b5eed | ||
|
|
921c1fa412 | ||
|
|
1aba145bc6 | ||
|
|
290d9df3eb | ||
|
|
aa76ccdd25 | ||
|
|
abe8070c36 | ||
|
|
2d28c258fd | ||
|
|
1338839b52 | ||
|
|
058203a0ec | ||
|
|
8fdf664968 | ||
|
|
50555ec73e | ||
|
|
951a532a9f | ||
|
|
e940044603 | ||
|
|
babfbb0fcd | ||
|
|
bbed312bdd | ||
|
|
b593764ded | ||
|
|
483c840fc8 | ||
|
|
de80f0ccff | ||
|
|
d0b87f7f82 | ||
|
|
bf32d3c39a | ||
|
|
bc14f2cdaa | ||
|
|
06a21e038a | ||
|
|
4d5eba317e | ||
|
|
d37a30e083 | ||
|
|
9170eea784 | ||
|
|
2769967e1e | ||
|
|
609f50d261 | ||
|
|
82f0eb1cbc | ||
|
|
b47669403b | ||
|
|
91899acfe5 | ||
|
|
ffedd33101 | ||
|
|
c9ed930606 | ||
|
|
af292b0ec2 | ||
|
|
1ead7f9b2b | ||
|
|
5c91877b69 | ||
|
|
e57d834a0d | ||
|
|
0578cdb62e | ||
|
|
b661afba01 | ||
|
|
b1002dd4f9 | ||
|
|
8e69008699 | ||
|
|
f45552f8f8 | ||
|
|
a4fe091a51 | ||
|
|
216217e2c6 | ||
|
|
799775b3a7 | ||
|
|
ae0384df29 | ||
|
|
8f57279dc7 | ||
|
|
e8dbd12f22 | ||
|
|
ca230d28b4 | ||
|
|
c96065b187 | ||
|
|
2abcf4764d | ||
|
|
6a4c342e45 | ||
|
|
bb0b1e88ef | ||
|
|
63c9135184 | ||
|
|
7fac0ef961 | ||
|
|
5a2e268160 | ||
|
|
a4e4e8f440 | ||
|
|
b62ce947a6 | ||
|
|
9538662262 | ||
|
|
09d7ae4f80 | ||
|
|
d7ded366c7 | ||
|
|
09c77973a0 | ||
|
|
22f3c70234 | ||
|
|
6527b1386f | ||
|
|
baabf97acd | ||
|
|
97005aca66 | ||
|
|
6e8ea50c19 | ||
|
|
1fcd706e11 | ||
|
|
008bb19b0b | ||
|
|
023acab779 | ||
|
|
68e8584520 | ||
|
|
5d120ebca0 | ||
|
|
f91b89f723 | ||
|
|
1181b75e16 | ||
|
|
5f00b4f923 | ||
|
|
4c31193b82 | ||
|
|
17fc9d1886 | ||
|
|
d7285d43dd | ||
|
|
aa8a991d20 | ||
|
|
40ba51ac43 | ||
|
|
d20430a778 | ||
|
|
f08f749cd9 | ||
|
|
a6c04f4f9a | ||
|
|
15b6c1590f | ||
|
|
4a8985278d | ||
|
|
996618a495 | ||
|
|
1f02d5fbbd | ||
|
|
c58b9f00f0 | ||
|
|
f131b18cbe | ||
|
|
118a998138 | ||
|
|
7ad6f036e7 | ||
|
|
1d29b824a8 | ||
|
|
3caf2dce28 | ||
|
|
1fc5b954f2 | ||
|
|
31d99c0bd2 | ||
|
|
0ac59c67ea | ||
|
|
8e8c74c621 | ||
|
|
f996f3df74 | ||
|
|
9499c97e18 | ||
|
|
c1c81fc07b | ||
|
|
072e86a2f0 | ||
|
|
70d6e763b0 | ||
|
|
15f4d4fee6 | ||
|
|
82e28dec43 | ||
|
|
b407c0e6c6 | ||
|
|
27ea01ee05 | ||
|
|
7ed5829b2c | ||
|
|
5bf1dd55b1 | ||
|
|
36aebffcc0 | ||
|
|
84c42ed58c | ||
|
|
9634e44343 | ||
|
|
048a045966 | ||
|
|
a18c8c0eb4 | ||
|
|
5fb0f46e3f | ||
|
|
962997ed16 | ||
|
|
daca0ebc14 | ||
|
|
9ae8fe7c2d | ||
|
|
1907133f99 | ||
|
|
4334955e39 | ||
|
|
f00c9dc4d6 | ||
|
|
7d0687ec73 | ||
|
|
da3773bfe8 | ||
|
|
6e1c132ee8 | ||
|
|
24ba35d76f | ||
|
|
64b63e9d52 | ||
|
|
7848a82a1c | ||
|
|
6a843cc8b2 | ||
|
|
ecdb0785a4 | ||
|
|
9a55caed75 | ||
|
|
2e01eb87db | ||
|
|
597b962ad5 | ||
|
|
7531f533e0 | ||
|
|
6b9d71554e | ||
|
|
bb1089e03d | ||
|
|
c82f0c937d | ||
|
|
00d2fd685a | ||
|
|
f28e1b8c90 | ||
|
|
2b17985a11 | ||
|
|
b392e3102e | ||
|
|
58b0b18ddd | ||
|
|
6a9ef319d0 | ||
|
|
cf38ef70cb | ||
|
|
ac64ade10f | ||
|
|
ee85af34d8 | ||
|
|
9d53ad53e5 | ||
|
|
9cdc3ebee6 | ||
|
|
14a5e05d64 | ||
|
|
f7b7d0f79e | ||
|
|
d98f36ceff | ||
|
|
abfabc30c9 | ||
|
|
c1aff7a248 | ||
|
|
e44f71eeb1 | ||
|
|
cb578c84e2 | ||
|
|
565e1dc0ed | ||
|
|
b1e28d02f7 | ||
|
|
d1467c2f73 | ||
|
|
c439150431 | ||
|
|
9bb3dfd639 | ||
|
|
4caa58b9ec | ||
|
|
b5213097e8 | ||
|
|
61081651e4 | ||
|
|
4ccfdf051d | ||
|
|
9f2a9d9cda | ||
|
|
827de76345 | ||
|
|
fdcaca42ae | ||
|
|
0744892244 | ||
|
|
b70ffc69df | ||
|
|
73b12cc32f | ||
|
|
ba6a37f315 | ||
|
|
6f8be8c8ac | ||
|
|
68497542b3 | ||
|
|
3d762fed10 | ||
|
|
48b849c031 | ||
|
|
88c4aa2d87 | ||
|
|
fb8c0d8fe3 | ||
|
|
1a863725d1 | ||
|
|
7b4245c91c | ||
|
|
9bd0d6b99d | ||
|
|
b640c766db | ||
|
|
50ffa8014e | ||
|
|
7ef688b256 | ||
|
|
b4fe0b35e4 | ||
|
|
a2cbbdf819 | ||
|
|
35b7efe3f4 | ||
|
|
7cea2a768f | ||
|
|
7247b9b68e | ||
|
|
dca837b843 | ||
|
|
c60c2ee8d0 | ||
|
|
3cdb5b5db2 | ||
|
|
b9cc8a4ca9 | ||
|
|
28606e9985 | ||
|
|
5bbe782812 | ||
|
|
d65861cdf7 | ||
|
|
c8df3fd2a7 | ||
|
|
6cfe6652a3 | ||
|
|
6b711da69d | ||
|
|
9b02867293 | ||
|
|
595cb99b2d | ||
|
|
f0a3445250 | ||
|
|
6d353dae1e | ||
|
|
57a38282a9 | ||
|
|
db47604865 | ||
|
|
2a121fe202 | ||
|
|
36baff0d7f | ||
|
|
201f3008b1 | ||
|
|
f4873fee18 | ||
|
|
e02261be6d | ||
|
|
2919e6765c | ||
|
|
b8fc4d0079 | ||
|
|
4a46f5f095 | ||
|
|
3484ceabb8 | ||
|
|
cab659dce6 | ||
|
|
a657f29439 | ||
|
|
4c054bf316 | ||
|
|
dc7922c38b | ||
|
|
c6c68abfcc | ||
|
|
6aacb0c898 | ||
|
|
e7000db491 | ||
|
|
fce994ea7f | ||
|
|
6c6446765e | ||
|
|
69a99c70c6 | ||
|
|
56d9f7a8af | ||
|
|
363aefe399 | ||
|
|
7fd4f792ba | ||
|
|
6fbdde63d8 | ||
|
|
b04dc90cdf | ||
|
|
b525c91bd3 | ||
|
|
a32c893078 | ||
|
|
2c6a744848 | ||
|
|
4492874d08 | ||
|
|
d3a592e5bf | ||
|
|
cab21b1b21 | ||
|
|
1319e422ea | ||
|
|
c88ea40b57 | ||
|
|
3194a37fcb | ||
|
|
72ebaa52e9 | ||
|
|
0e00695fc7 | ||
|
|
48a691e722 | ||
|
|
cf54d6d6f8 | ||
|
|
a03fe234d0 | ||
|
|
d88d40cc08 | ||
|
|
d3b4af116e | ||
|
|
352b23331b | ||
|
|
bdd6041a5c | ||
|
|
1894003f8a | ||
|
|
220513ae42 | ||
|
|
fcbabbe357 | ||
|
|
3627969fce | ||
|
|
8807c0dbef | ||
|
|
23cc9f6ff8 | ||
|
|
e50799e9c4 | ||
|
|
b92c4844eb | ||
|
|
c306d42d08 | ||
|
|
e31558318e | ||
|
|
78a9420f26 | ||
|
|
b47c5b5bfc | ||
|
|
28a312accf | ||
|
|
611094e92e | ||
|
|
2a8579a6a5 | ||
|
|
47577f2f47 | ||
|
|
34e3e45843 | ||
|
|
364dc9ddfb | ||
|
|
23324f0f87 | ||
|
|
17fa9a3b77 | ||
|
|
424b3ca308 | ||
|
|
26e2fc8fd4 | ||
|
|
8e18484898 | ||
|
|
354cfe0f9c | ||
|
|
983474b2bd | ||
|
|
14d861bcbb | ||
|
|
f6cd349a16 | ||
|
|
8e1c4dec87 | ||
|
|
18b47e4a73 | ||
|
|
4f157f50ed | ||
|
|
f44a2f4857 | ||
|
|
c685ace327 | ||
|
|
f23b0faf41 | ||
|
|
e0e2ca7ccd | ||
|
|
83fe7f7eef | ||
|
|
1feaa8f2e9 | ||
|
|
598d6bf4c5 | ||
|
|
0afd5a40d6 | ||
|
|
26b70e9ed3 | ||
|
|
a1a93a4bdd | ||
|
|
4939a7dd7c | ||
|
|
0fa6610fdb | ||
|
|
b0148e7860 | ||
|
|
59a06a242d | ||
|
|
ffe902605d | ||
|
|
556f7e85fc | ||
|
|
45c86be402 | ||
|
|
bf34f413de | ||
|
|
9b022b187f | ||
|
|
c3409d64dc | ||
|
|
3c5c3b5026 | ||
|
|
f240f00d84 | ||
|
|
68c7764c63 | ||
|
|
adfb039ba6 | ||
|
|
89416d9856 | ||
|
|
9b6c972e0f | ||
|
|
55fc04752a | ||
|
|
96f0919633 | ||
|
|
17b140baf4 | ||
|
|
45c2151d0f | ||
|
|
1887f5b7e7 | ||
|
|
708d1c7a32 | ||
|
|
acf8c3015a | ||
|
|
f83ae5789b | ||
|
|
57ccfcfc1b | ||
|
|
dd0fdcfdd4 | ||
|
|
5c805be067 | ||
|
|
e423380d7f | ||
|
|
4d8bebc917 | ||
|
|
4314fa883f | ||
|
|
d6e39b362b | ||
|
|
f89214f9cf | ||
|
|
d17cac8210 | ||
|
|
aa49283fa9 | ||
|
|
e79ea7a2cf | ||
|
|
8a1d280f19 | ||
|
|
6a8eb9562f | ||
|
|
8f76e1e344 | ||
|
|
7b9f084e6b | ||
|
|
5b1693a908 | ||
|
|
fd7c00da49 | ||
|
|
7fc5ced3af | ||
|
|
a86092fb64 | ||
|
|
003827e916 | ||
|
|
b15673c525 | ||
|
|
00363303b1 | ||
|
|
48fbe890f8 | ||
|
|
4179877cc7 | ||
|
|
282b83ac08 | ||
|
|
193656e71b | ||
|
|
a25d127f36 | ||
|
|
cf9df548ca | ||
|
|
f29b93c762 | ||
|
|
032ace40d1 | ||
|
|
f74dd1cb3c | ||
|
|
29889d1e35 | ||
|
|
d6d19c4229 | ||
|
|
ab08e67eaf | ||
|
|
00bf6ac258 | ||
|
|
b65478e7d9 | ||
|
|
e83b529f1c | ||
|
|
408274152b | ||
|
|
8ff82996fb | ||
|
|
d59c4044b7 | ||
|
|
3574e21e4f | ||
|
|
5a091956ef | ||
|
|
14e9c58444 | ||
|
|
bfe5b03c69 | ||
|
|
f96f7f840e | ||
|
|
a3bcf26dce | ||
|
|
a7852a89cc | ||
|
|
1b0c761fc0 | ||
|
|
5e4e8d4eda | ||
|
|
bd524d2e1e | ||
|
|
60fe919992 | ||
|
|
b90063b170 | ||
|
|
d9fce49b08 | ||
|
|
5dbee2a270 | ||
|
|
4779106139 | ||
|
|
bf2de81873 | ||
|
|
28cdedc9aa | ||
|
|
7e90571404 | ||
|
|
42bbe63927 | ||
|
|
7ddbea697e | ||
|
|
b4860de34d | ||
|
|
576f23d5fb | ||
|
|
86548fc7bf | ||
|
|
b3b4d992fe | ||
|
|
d72daf5f39 | ||
|
|
9ad959a478 | ||
|
|
cc00a321da | ||
|
|
de74273108 | ||
|
|
a7658c7573 | ||
|
|
48a85ee6e0 | ||
|
|
461b789515 | ||
|
|
b71ff6fbb8 | ||
|
|
1bcdcce93a | ||
|
|
c09bfca634 | ||
|
|
36c5f02bfb | ||
|
|
eae6e5d9a1 | ||
|
|
364813dd73 | ||
|
|
1a2b1f283b | ||
|
|
a0e5cf4ecc | ||
|
|
820f7b4d93 | ||
|
|
727866f090 | ||
|
|
3d45cdc339 | ||
|
|
02a557aa67 | ||
|
|
6da27e5976 | ||
|
|
19a6e324c4 | ||
|
|
62eadbc174 | ||
|
|
ae783d4f45 | ||
|
|
1241a902e3 | ||
|
|
fdba648afb | ||
|
|
080226dd72 | ||
|
|
2d76aebb8e | ||
|
|
8b5df3ca17 | ||
|
|
6b38ef3c9f | ||
|
|
15451ff42b | ||
|
|
6e2db1ced6 | ||
|
|
5c4ce8754e | ||
|
|
416486c370 | ||
|
|
2f075be6f8 | ||
|
|
c4de879b20 | ||
|
|
ee5686e91a | ||
|
|
2a795e9138 | ||
|
|
437b11af9a | ||
|
|
99a5f2cd9d | ||
|
|
54d8c64ad5 |
143
.github/workflows/github-actions.yml
vendored
Normal file
143
.github/workflows/github-actions.yml
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
name: MongoengineCI
|
||||
on:
|
||||
# All PR
|
||||
pull_request:
|
||||
# master branch merge
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
# release tags
|
||||
create:
|
||||
tags:
|
||||
- 'v[0-9]+\.[0-9]+\.[0-9]+*'
|
||||
env:
|
||||
MONGODB_3_6: 3.6.14
|
||||
MONGODB_4_0: 4.0.23
|
||||
MONGODB_4_2: 4.2
|
||||
MONGODB_4_4: 4.4
|
||||
|
||||
PYMONGO_3_4: 3.4
|
||||
PYMONGO_3_6: 3.6
|
||||
PYMONGO_3_9: 3.9
|
||||
PYMONGO_3_11: 3.11
|
||||
|
||||
MAIN_PYTHON_VERSION: 3.7
|
||||
|
||||
jobs:
|
||||
linting:
|
||||
# Run pre-commit (https://pre-commit.com/)
|
||||
# which runs pre-configured linter & autoformatter
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.7
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.7
|
||||
- run: bash .github/workflows/install_ci_python_dep.sh
|
||||
- run: pre-commit run -a
|
||||
|
||||
test:
|
||||
# Test suite run against recent python versions
|
||||
# and against a few combination of MongoDB and pymongo
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: [3.6, 3.7, 3.8, 3.9, pypy3]
|
||||
MONGODB: [$MONGODB_4_0]
|
||||
PYMONGO: [$PYMONGO_3_11]
|
||||
include:
|
||||
- python-version: 3.7
|
||||
MONGODB: $MONGODB_3_6
|
||||
PYMONGO: $PYMONGO_3_9
|
||||
- python-version: 3.7
|
||||
MONGODB: $MONGODB_4_2
|
||||
PYMONGO: $PYMONGO_3_6
|
||||
- python-version: 3.7
|
||||
MONGODB: $MONGODB_4_4
|
||||
PYMONGO: $PYMONGO_3_11
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: install mongo and ci dependencies
|
||||
run: |
|
||||
bash .github/workflows/install_mongo.sh ${{ matrix.MONGODB }}
|
||||
bash .github/workflows/install_ci_python_dep.sh
|
||||
bash .github/workflows/start_mongo.sh ${{ matrix.MONGODB }}
|
||||
- name: tox dry-run (to pre-install venv)
|
||||
run: tox -e $(echo py${{ matrix.python-version }}-mg${{ matrix.PYMONGO }} | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder"
|
||||
- name: Run test suite
|
||||
run: tox -e $(echo py${{ matrix.python-version }}-mg${{ matrix.PYMONGO }} | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine"
|
||||
- name: Send coverage to Coveralls
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
COVERALLS_SERVICE_NAME: github
|
||||
if: ${{ matrix.python-version == env.MAIN_PYTHON_VERSION }}
|
||||
run: coveralls
|
||||
|
||||
build_doc_dryrun:
|
||||
# ensures that readthedocs can be built continuously
|
||||
# to avoid that it breaks when new releases are being created
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.7
|
||||
- name: install python dep
|
||||
run: |
|
||||
pip install -e .
|
||||
pip install -r docs/requirements.txt
|
||||
- name: build doc
|
||||
run: |
|
||||
cd docs
|
||||
make html-readthedocs
|
||||
|
||||
build-n-publish-dummy:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [linting, test, build_doc_dryrun]
|
||||
if: github.event_name != 'pull_request'
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- name: Set up Python 3.7
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.7
|
||||
- name: build dummy wheel for test-pypi
|
||||
run: |
|
||||
pip install wheel
|
||||
python setup.py egg_info -b ".dev`date '+%Y%m%d%H%M%S'`" build sdist bdist_wheel
|
||||
# - name: publish test-pypi
|
||||
# # Although working and recommended, test-pypi has a limit
|
||||
# # in the size of projects so it's better to avoid publishing
|
||||
# # until there is a way to garbage collect these dummy releases
|
||||
# uses: pypa/gh-action-pypi-publish@master
|
||||
# with:
|
||||
# password: ${{ secrets.test_pypi_token }}
|
||||
# repository_url: https://test.pypi.org/legacy/
|
||||
|
||||
build-n-publish:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [linting, test, build_doc_dryrun, build-n-publish-dummy]
|
||||
if: github.event_name == 'create' && startsWith(github.ref, 'refs/tags/v')
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- name: Set up Python 3.7
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.7
|
||||
# todo separate build from publish
|
||||
# https://stackoverflow.com/questions/59349905/which-properties-does-github-event-in-a-github-workflow-have
|
||||
- name: build dummy wheel for test-pypi
|
||||
run: |
|
||||
pip install wheel
|
||||
python setup.py sdist bdist_wheel
|
||||
- name: publish pypi
|
||||
uses: pypa/gh-action-pypi-publish@master
|
||||
with:
|
||||
password: ${{ secrets.pypi_token }}
|
||||
5
.github/workflows/install_ci_python_dep.sh
vendored
Normal file
5
.github/workflows/install_ci_python_dep.sh
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
pip install --upgrade pip
|
||||
pip install coveralls
|
||||
pip install pre-commit
|
||||
pip install tox
|
||||
18
.github/workflows/install_mongo.sh
vendored
Normal file
18
.github/workflows/install_mongo.sh
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
#!/bin/bash
|
||||
|
||||
MONGODB=$1
|
||||
|
||||
# Mongo > 4.0 follows different name convention for download links
|
||||
mongo_build=mongodb-linux-x86_64-${MONGODB}
|
||||
|
||||
if [[ "$MONGODB" == *"4.2"* ]]; then
|
||||
mongo_build=mongodb-linux-x86_64-ubuntu1804-v${MONGODB}-latest
|
||||
elif [[ "$MONGODB" == *"4.4"* ]]; then
|
||||
mongo_build=mongodb-linux-x86_64-ubuntu1804-v${MONGODB}-latest
|
||||
fi
|
||||
|
||||
wget http://fastdl.mongodb.org/linux/$mongo_build.tgz
|
||||
tar xzf $mongo_build.tgz
|
||||
|
||||
mongodb_dir=$(find ${PWD}/ -type d -name "mongodb-linux-x86_64*")
|
||||
$mongodb_dir/bin/mongod --version
|
||||
9
.github/workflows/start_mongo.sh
vendored
Normal file
9
.github/workflows/start_mongo.sh
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
|
||||
MONGODB=$1
|
||||
|
||||
mongodb_dir=$(find ${PWD}/ -type d -name "mongodb-linux-x86_64*")
|
||||
|
||||
mkdir $mongodb_dir/data
|
||||
$mongodb_dir/bin/mongod --dbpath $mongodb_dir/data --logpath $mongodb_dir/mongodb.log --fork
|
||||
mongo --eval 'db.version();' # Make sure mongo is awake
|
||||
11
.gitignore
vendored
11
.gitignore
vendored
@@ -1,8 +1,15 @@
|
||||
.*
|
||||
!.gitignore
|
||||
*~
|
||||
*.py[co]
|
||||
.*.sw[po]
|
||||
.cache/
|
||||
.coverage
|
||||
.coveragerc
|
||||
.env
|
||||
.idea/
|
||||
.pytest_cache/
|
||||
.tox/
|
||||
.eggs/
|
||||
*.egg
|
||||
docs/.build
|
||||
docs/_build
|
||||
@@ -13,8 +20,6 @@ env/
|
||||
.settings
|
||||
.project
|
||||
.pydevproject
|
||||
tests/test_bugfix.py
|
||||
htmlcov/
|
||||
venv
|
||||
venv3
|
||||
scratchpad
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
sudo apt-get remove mongodb-org-server
|
||||
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
|
||||
|
||||
if [ "$MONGODB" = "2.4" ]; then
|
||||
echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list
|
||||
sudo apt-get update
|
||||
sudo apt-get install mongodb-10gen=2.4.14
|
||||
sudo service mongodb start
|
||||
elif [ "$MONGODB" = "2.6" ]; then
|
||||
echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list
|
||||
sudo apt-get update
|
||||
sudo apt-get install mongodb-org-server=2.6.12
|
||||
# service should be started automatically
|
||||
elif [ "$MONGODB" = "3.0" ]; then
|
||||
echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb.list
|
||||
sudo apt-get update
|
||||
sudo apt-get install mongodb-org-server=3.0.14
|
||||
# service should be started automatically
|
||||
else
|
||||
echo "Invalid MongoDB version, expected 2.4, 2.6, or 3.0."
|
||||
exit 1
|
||||
fi;
|
||||
|
||||
mkdir db
|
||||
1>db/logs mongod --dbpath=db &
|
||||
@@ -1,22 +0,0 @@
|
||||
pylint:
|
||||
disable:
|
||||
# We use this a lot (e.g. via document._meta)
|
||||
- protected-access
|
||||
|
||||
options:
|
||||
additional-builtins:
|
||||
# add xrange and long as valid built-ins. In Python 3, xrange is
|
||||
# translated into range and long is translated into int via 2to3 (see
|
||||
# "use_2to3" in setup.py). This should be removed when we drop Python
|
||||
# 2 support (which probably won't happen any time soon).
|
||||
- xrange
|
||||
- long
|
||||
|
||||
pyflakes:
|
||||
disable:
|
||||
# undefined variables are already covered by pylint (and exclude
|
||||
# xrange & long)
|
||||
- F821
|
||||
|
||||
ignore-paths:
|
||||
- benchmark.py
|
||||
26
.pre-commit-config.yaml
Normal file
26
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,26 @@
|
||||
fail_fast: false
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v3.4.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: debug-statements
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- repo: https://github.com/ambv/black
|
||||
rev: 21.4b2
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 3.9.1
|
||||
hooks:
|
||||
- id: flake8
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v2.14.0
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py36-plus]
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 5.8.0
|
||||
hooks:
|
||||
- id: isort
|
||||
20
.readthedocs.yml
Normal file
20
.readthedocs.yml
Normal file
@@ -0,0 +1,20 @@
|
||||
# .readthedocs.yml
|
||||
# Read the Docs configuration file
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
# Required
|
||||
version: 2
|
||||
|
||||
# Build documentation in the docs/ directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
|
||||
# Optionally set the version of Python and requirements required to build your docs
|
||||
python:
|
||||
version: 3.7
|
||||
install:
|
||||
- requirements: docs/requirements.txt
|
||||
# docs/conf.py is importing mongoengine
|
||||
# so mongoengine needs to be installed as well
|
||||
- method: setuptools
|
||||
path: .
|
||||
101
.travis.yml
101
.travis.yml
@@ -1,101 +0,0 @@
|
||||
# For full coverage, we'd have to test all supported Python, MongoDB, and
|
||||
# PyMongo combinations. However, that would result in an overly long build
|
||||
# with a very large number of jobs, hence we only test a subset of all the
|
||||
# combinations:
|
||||
# * MongoDB v2.4 & v3.0 are only tested against Python v2.7 & v3.5.
|
||||
# * MongoDB v2.4 is tested against PyMongo v2.7 & v3.x.
|
||||
# * MongoDB v3.0 is tested against PyMongo v3.x.
|
||||
# * MongoDB v2.6 is currently the "main" version tested against Python v2.7,
|
||||
# v3.5, PyPy & PyPy3, and PyMongo v2.7, v2.8 & v3.x.
|
||||
#
|
||||
# Reminder: Update README.rst if you change MongoDB versions we test.
|
||||
|
||||
language: python
|
||||
|
||||
python:
|
||||
- 2.7
|
||||
- 3.5
|
||||
- 3.6
|
||||
- pypy
|
||||
|
||||
env:
|
||||
- MONGODB=2.6 PYMONGO=3.x
|
||||
|
||||
matrix:
|
||||
# Finish the build as soon as one job fails
|
||||
fast_finish: true
|
||||
|
||||
include:
|
||||
- python: 2.7
|
||||
env: MONGODB=2.4 PYMONGO=3.5
|
||||
- python: 2.7
|
||||
env: MONGODB=3.0 PYMONGO=3.x
|
||||
- python: 3.5
|
||||
env: MONGODB=2.4 PYMONGO=3.5
|
||||
- python: 3.5
|
||||
env: MONGODB=3.0 PYMONGO=3.x
|
||||
- python: 3.6
|
||||
env: MONGODB=2.4 PYMONGO=3.5
|
||||
- python: 3.6
|
||||
env: MONGODB=3.0 PYMONGO=3.x
|
||||
|
||||
before_install:
|
||||
- bash .install_mongodb_on_travis.sh
|
||||
- sleep 15 # https://docs.travis-ci.com/user/database-setup/#MongoDB-does-not-immediately-accept-connections
|
||||
- mongo --eval 'db.version();'
|
||||
|
||||
install:
|
||||
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev
|
||||
libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev
|
||||
python-tk
|
||||
- travis_retry pip install --upgrade pip
|
||||
- travis_retry pip install coveralls
|
||||
- travis_retry pip install flake8 flake8-import-order
|
||||
- travis_retry pip install tox>=1.9
|
||||
- travis_retry pip install "virtualenv<14.0.0" # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32)
|
||||
- travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test
|
||||
|
||||
# Cache dependencies installed via pip
|
||||
cache: pip
|
||||
|
||||
# Run flake8 for py27
|
||||
before_script:
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi
|
||||
|
||||
script:
|
||||
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage
|
||||
|
||||
# For now only submit coveralls for Python v2.7. Python v3.x currently shows
|
||||
# 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible
|
||||
# code in a separate dir and runs tests on that.
|
||||
after_success:
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; fi
|
||||
|
||||
notifications:
|
||||
irc: irc.freenode.org#mongoengine
|
||||
|
||||
# Only run builds on the master branch and GitHub releases (tagged as vX.Y.Z)
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- /^v.*$/
|
||||
|
||||
# Whenever a new release is created via GitHub, publish it on PyPI.
|
||||
deploy:
|
||||
provider: pypi
|
||||
user: the_drow
|
||||
password:
|
||||
secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek=
|
||||
|
||||
# create a source distribution and a pure python wheel for faster installs
|
||||
distributions: "sdist bdist_wheel"
|
||||
|
||||
# only deploy on tagged commits (aka GitHub releases) and only for the
|
||||
# parent repo's builds running Python 2.7 along with PyMongo v3.x (we run
|
||||
# Travis against many different Python and PyMongo versions and we don't
|
||||
# want the deploy to occur multiple times).
|
||||
on:
|
||||
tags: true
|
||||
repo: MongoEngine/mongoengine
|
||||
condition: "$PYMONGO = 3.x"
|
||||
python: 2.7
|
||||
108
.travis_.yml
Normal file
108
.travis_.yml
Normal file
@@ -0,0 +1,108 @@
|
||||
## For full coverage, we'd have to test all supported Python, MongoDB, and
|
||||
## PyMongo combinations. However, that would result in an overly long build
|
||||
## with a very large number of jobs, hence we only test a subset of all the
|
||||
## combinations.
|
||||
## * Python3.7, MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup,
|
||||
## Other combinations are tested. See below for the details or check the travis jobs
|
||||
#
|
||||
## We should periodically check MongoDB Server versions supported by MongoDB
|
||||
## Inc., add newly released versions to the test matrix, and remove versions
|
||||
## which have reached their End of Life. See:
|
||||
## 1. https://www.mongodb.com/support-policy.
|
||||
## 2. https://docs.mongodb.com/ecosystem/drivers/driver-compatibility-reference/#python-driver-compatibility
|
||||
##
|
||||
## Reminder: Update README.rst if you change MongoDB versions we test.
|
||||
#
|
||||
#language: python
|
||||
#dist: xenial
|
||||
#python:
|
||||
# - 3.6
|
||||
# - 3.7
|
||||
# - 3.8
|
||||
# - 3.9
|
||||
# - pypy3
|
||||
#
|
||||
#env:
|
||||
# global:
|
||||
# - MONGODB_3_4=3.4.19
|
||||
# - MONGODB_3_6=3.6.13
|
||||
# - MONGODB_4_0=4.0.13
|
||||
#
|
||||
# - PYMONGO_3_4=3.4
|
||||
# - PYMONGO_3_6=3.6
|
||||
# - PYMONGO_3_9=3.9
|
||||
# - PYMONGO_3_11=3.11
|
||||
#
|
||||
# - MAIN_PYTHON_VERSION=3.7
|
||||
# matrix:
|
||||
# - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_11}
|
||||
#
|
||||
#matrix:
|
||||
# # Finish the build as soon as one job fails
|
||||
# fast_finish: true
|
||||
#
|
||||
# include:
|
||||
# - python: 3.7
|
||||
# env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6}
|
||||
# - python: 3.7
|
||||
# env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9}
|
||||
# - python: 3.7
|
||||
# env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_11}
|
||||
# - python: 3.8
|
||||
# env: MONGODB=${MONGODB_4_0} PYMONGO=${PYMONGO_3_11}
|
||||
#
|
||||
#install:
|
||||
# # Install Mongo
|
||||
# - wget http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB}.tgz
|
||||
# - tar xzf mongodb-linux-x86_64-${MONGODB}.tgz
|
||||
# - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --version
|
||||
# # Install Python dependencies.
|
||||
# - pip install --upgrade pip
|
||||
# - pip install coveralls
|
||||
# - pip install pre-commit
|
||||
# - pip install tox
|
||||
# # tox dryrun to setup the tox venv (we run a mock test).
|
||||
# - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder"
|
||||
#
|
||||
#before_script:
|
||||
# - mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data
|
||||
# - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork
|
||||
# # Run pre-commit hooks (black, flake8, etc) on entire codebase
|
||||
# - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then pre-commit run -a; else echo "pre-commit checks only runs on py37"; fi
|
||||
# - mongo --eval 'db.version();' # Make sure mongo is awake
|
||||
#
|
||||
#script:
|
||||
# - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine"
|
||||
#
|
||||
#after_success:
|
||||
# - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then coveralls --verbose; else echo "coveralls only sent for py37"; fi
|
||||
#
|
||||
#notifications:
|
||||
# irc: irc.freenode.org#mongoengine
|
||||
#
|
||||
## Only run builds on the master branch and GitHub releases (tagged as vX.Y.Z)
|
||||
#branches:
|
||||
# # Only run builds on the master branch and GitHub releases (tagged as vX.Y.Z)
|
||||
# only:
|
||||
# - master
|
||||
# - /^v.*$/
|
||||
#
|
||||
## Whenever a new release is created via GitHub, publish it on PyPI.
|
||||
#deploy:
|
||||
# provider: pypi
|
||||
# user: the_drow
|
||||
# password:
|
||||
# secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek=
|
||||
#
|
||||
# # Create a source distribution and a pure python wheel for faster installs.
|
||||
# distributions: "sdist bdist_wheel"
|
||||
#
|
||||
# # Only deploy on tagged commits (aka GitHub releases) and only for the parent
|
||||
# # repo's builds running Python v3.7 along with PyMongo v3.x and MongoDB v3.4.
|
||||
# # We run Travis against many different Python, PyMongo, and MongoDB versions
|
||||
# # and we don't want the deploy to occur multiple times).
|
||||
# on:
|
||||
# tags: true
|
||||
# repo: MongoEngine/mongoengine
|
||||
# condition: ($PYMONGO = ${PYMONGO_3_11}) && ($MONGODB = ${MONGODB_3_4})
|
||||
# python: 3.7
|
||||
14
AUTHORS
14
AUTHORS
@@ -246,3 +246,17 @@ that much better:
|
||||
* Renjianxin (https://github.com/Davidrjx)
|
||||
* Erdenezul Batmunkh (https://github.com/erdenezul)
|
||||
* Andy Yankovsky (https://github.com/werat)
|
||||
* Bastien Gérard (https://github.com/bagerard)
|
||||
* Trevor Hall (https://github.com/tjhall13)
|
||||
* Gleb Voropaev (https://github.com/buggyspace)
|
||||
* Paulo Amaral (https://github.com/pauloAmaral)
|
||||
* Gaurav Dadhania (https://github.com/GVRV)
|
||||
* Yurii Andrieiev (https://github.com/yandrieiev)
|
||||
* Filip Kucharczyk (https://github.com/Pacu2)
|
||||
* Eric Timmons (https://github.com/daewok)
|
||||
* Matthew Simpson (https://github.com/mcsimps2)
|
||||
* Leonardo Domingues (https://github.com/leodmgs)
|
||||
* Agustin Barto (https://github.com/abarto)
|
||||
* Stankiewicz Mateusz (https://github.com/mas15)
|
||||
* Felix Schultheiß (https://github.com/felix-smashdocs)
|
||||
* Jan Stein (https://github.com/janste63)
|
||||
|
||||
@@ -20,20 +20,47 @@ post to the `user group <http://groups.google.com/group/mongoengine-users>`
|
||||
Supported Interpreters
|
||||
----------------------
|
||||
|
||||
MongoEngine supports CPython 2.7 and newer. Language
|
||||
features not supported by all interpreters can not be used.
|
||||
Please also ensure that your code is properly converted by
|
||||
`2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support.
|
||||
MongoEngine supports CPython 3.5 and newer as well as Pypy3.
|
||||
Language features not supported by all interpreters can not be used.
|
||||
|
||||
Python3 codebase
|
||||
----------------------
|
||||
|
||||
Since 0.20, the codebase is exclusively Python 3.
|
||||
|
||||
Earlier versions were exclusively Python2, and were relying on 2to3 to support Python3 installs.
|
||||
Travis runs the tests against the main Python 3.x versions.
|
||||
|
||||
|
||||
Style Guide
|
||||
-----------
|
||||
|
||||
MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_
|
||||
including 4 space indents. When possible we try to stick to 79 character line
|
||||
limits. However, screens got bigger and an ORM has a strong focus on
|
||||
readability and if it can help, we accept 119 as maximum line length, in a
|
||||
similar way as `django does
|
||||
<https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/coding-style/#python-style>`_
|
||||
MongoEngine's codebase is auto-formatted with `black <https://github.com/python/black>`_, imports are ordered with `isort <https://pycqa.github.io/isort/>`_
|
||||
and other tools like flake8 are also used. Those tools will run as part of the CI and will fail in case the code is not formatted properly.
|
||||
|
||||
To install all development tools, simply run the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python -m pip install -r requirements-dev.txt
|
||||
|
||||
|
||||
You can install `pre-commit <https://pre-commit.com/>`_ into your git hooks,
|
||||
to automatically check and fix any formatting issue before creating a
|
||||
git commit.
|
||||
|
||||
To enable ``pre-commit`` simply run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pre-commit install
|
||||
|
||||
See the ``.pre-commit-config.yaml`` configuration file for more information
|
||||
on how it works.
|
||||
|
||||
pre-commit will now run upon every commit and will reject anything that doesn't comply.
|
||||
|
||||
You can also run all the checks with ``pre-commit run -a``, this is what is used in the CI.
|
||||
|
||||
Testing
|
||||
-------
|
||||
@@ -55,7 +82,7 @@ General Guidelines
|
||||
should adapt to the breaking change in docs/upgrade.rst.
|
||||
- Write inline documentation for new classes and methods.
|
||||
- Write tests and make sure they pass (make sure you have a mongod
|
||||
running on the default port, then execute ``python setup.py nosetests``
|
||||
running on the default port, then execute ``python setup.py test``
|
||||
from the cmd line to run the test suite).
|
||||
- Ensure tests pass on all supported Python, PyMongo, and MongoDB versions.
|
||||
You can test various Python and PyMongo versions locally by executing
|
||||
|
||||
54
README.rst
54
README.rst
@@ -12,9 +12,8 @@ MongoEngine
|
||||
.. image:: https://coveralls.io/repos/github/MongoEngine/mongoengine/badge.svg?branch=master
|
||||
:target: https://coveralls.io/github/MongoEngine/mongoengine?branch=master
|
||||
|
||||
.. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.svg?style=flat
|
||||
:target: https://landscape.io/github/MongoEngine/mongoengine/master
|
||||
:alt: Code Health
|
||||
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
|
||||
:target: https://github.com/ambv/black
|
||||
|
||||
About
|
||||
=====
|
||||
@@ -26,27 +25,30 @@ an `API reference <https://mongoengine-odm.readthedocs.io/apireference.html>`_.
|
||||
|
||||
Supported MongoDB Versions
|
||||
==========================
|
||||
MongoEngine is currently tested against MongoDB v2.4, v2.6, and v3.0. Future
|
||||
versions should be supported as well, but aren't actively tested at the moment.
|
||||
Make sure to open an issue or submit a pull request if you experience any
|
||||
problems with MongoDB v3.2+.
|
||||
MongoEngine is currently tested against MongoDB v3.4, v3.6 and v4.0. Future versions
|
||||
should be supported as well, but aren't actively tested at the moment. Make
|
||||
sure to open an issue or submit a pull request if you experience any problems
|
||||
with MongoDB version > 4.0.
|
||||
|
||||
Installation
|
||||
============
|
||||
We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of
|
||||
`pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``.
|
||||
`pip <https://pip.pypa.io/>`_. You can then use ``python -m pip install -U mongoengine``.
|
||||
You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
||||
and thus you can use ``easy_install -U mongoengine``. Otherwise, you can download the
|
||||
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
|
||||
setup.py install``.
|
||||
and thus you can use ``easy_install -U mongoengine``. Another option is
|
||||
`pipenv <https://docs.pipenv.org/>`_. You can then use ``pipenv install mongoengine``
|
||||
to both create the virtual environment and install the package. Otherwise, you can
|
||||
download the source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and
|
||||
run ``python setup.py install``.
|
||||
|
||||
The support for Python2 was dropped with MongoEngine 0.20.0
|
||||
|
||||
Dependencies
|
||||
============
|
||||
All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_.
|
||||
All of the dependencies can easily be installed via `python -m pip <https://pip.pypa.io/>`_.
|
||||
At the very least, you'll need these two packages to use MongoEngine:
|
||||
|
||||
- pymongo>=2.7.1
|
||||
- six>=1.10.0
|
||||
- pymongo>=3.4
|
||||
|
||||
If you utilize a ``DateTimeField``, you might also use a more flexible date parser:
|
||||
|
||||
@@ -56,6 +58,10 @@ If you need to use an ``ImageField`` or ``ImageGridFsProxy``:
|
||||
|
||||
- Pillow>=2.0.0
|
||||
|
||||
If you need to use signals:
|
||||
|
||||
- blinker>=1.3
|
||||
|
||||
Examples
|
||||
========
|
||||
Some simple examples of what MongoEngine code looks like:
|
||||
@@ -89,12 +95,11 @@ Some simple examples of what MongoEngine code looks like:
|
||||
|
||||
# Iterate over all posts using the BlogPost superclass
|
||||
>>> for post in BlogPost.objects:
|
||||
... print '===', post.title, '==='
|
||||
... print('===', post.title, '===')
|
||||
... if isinstance(post, TextPost):
|
||||
... print post.content
|
||||
... print(post.content)
|
||||
... elif isinstance(post, LinkPost):
|
||||
... print 'Link:', post.url
|
||||
... print
|
||||
... print('Link:', post.url)
|
||||
...
|
||||
|
||||
# Count all blog posts and its subtypes
|
||||
@@ -114,7 +119,8 @@ Some simple examples of what MongoEngine code looks like:
|
||||
Tests
|
||||
=====
|
||||
To run the test suite, ensure you are running a local instance of MongoDB on
|
||||
the standard port and have ``nose`` installed. Then, run ``python setup.py nosetests``.
|
||||
the standard port and have ``pytest`` installed. Then, run ``python setup.py test``
|
||||
or simply ``pytest``.
|
||||
|
||||
To run the test suite on every supported Python and PyMongo version, you can
|
||||
use ``tox``. You'll need to make sure you have each supported Python version
|
||||
@@ -123,20 +129,18 @@ installed in your environment and then:
|
||||
.. code-block:: shell
|
||||
|
||||
# Install tox
|
||||
$ pip install tox
|
||||
$ python -m pip install tox
|
||||
# Run the test suites
|
||||
$ tox
|
||||
|
||||
If you wish to run a subset of tests, use the nosetests convention:
|
||||
If you wish to run a subset of tests, use the pytest convention:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
# Run all the tests in a particular test file
|
||||
$ python setup.py nosetests --tests tests/fields/fields.py
|
||||
$ pytest tests/fields/test_fields.py
|
||||
# Run only particular test class in that file
|
||||
$ python setup.py nosetests --tests tests/fields/fields.py:FieldTest
|
||||
# Use the -s option if you want to print some debug statements or use pdb
|
||||
$ python setup.py nosetests --tests tests/fields/fields.py:FieldTest -s
|
||||
$ pytest tests/fields/test_fields.py::TestField
|
||||
|
||||
Community
|
||||
=========
|
||||
|
||||
207
benchmark.py
207
benchmark.py
@@ -1,207 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Simple benchmark comparing PyMongo and MongoEngine.
|
||||
|
||||
Sample run on a mid 2015 MacBook Pro (commit b282511):
|
||||
|
||||
Benchmarking...
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo
|
||||
2.58979988098
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo write_concern={"w": 0}
|
||||
1.26657605171
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine
|
||||
8.4351580143
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries without continual assign - MongoEngine
|
||||
7.20191693306
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True
|
||||
6.31104588509
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True
|
||||
6.07083487511
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False
|
||||
5.97704291344
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False
|
||||
5.9111430645
|
||||
"""
|
||||
|
||||
import timeit
|
||||
|
||||
|
||||
def main():
|
||||
print("Benchmarking...")
|
||||
|
||||
setup = """
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
connection.drop_database('timeit_test')
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
|
||||
db = connection.timeit_test
|
||||
noddy = db.noddy
|
||||
|
||||
for i in range(10000):
|
||||
example = {'fields': {}}
|
||||
for j in range(20):
|
||||
example['fields']['key' + str(j)] = 'value ' + str(j)
|
||||
|
||||
noddy.save(example)
|
||||
|
||||
myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - Pymongo""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
stmt = """
|
||||
from pymongo import MongoClient
|
||||
from pymongo.write_concern import WriteConcern
|
||||
connection = MongoClient()
|
||||
|
||||
db = connection.get_database('timeit_test', write_concern=WriteConcern(w=0))
|
||||
noddy = db.noddy
|
||||
|
||||
for i in range(10000):
|
||||
example = {'fields': {}}
|
||||
for j in range(20):
|
||||
example['fields']["key"+str(j)] = "value "+str(j)
|
||||
|
||||
noddy.save(example)
|
||||
|
||||
myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
setup = """
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
connection.drop_database('timeit_test')
|
||||
connection.close()
|
||||
|
||||
from mongoengine import Document, DictField, connect
|
||||
connect('timeit_test')
|
||||
|
||||
class Noddy(Document):
|
||||
fields = DictField()
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save()
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - MongoEngine""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
fields = {}
|
||||
for j in range(20):
|
||||
fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.fields = fields
|
||||
noddy.save()
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries without continual assign - MongoEngine""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(write_concern={"w": 0}, cascade=True)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(write_concern={"w": 0}, validate=False, cascade=True)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(validate=False, write_concern={"w": 0})
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(force_insert=True, write_concern={"w": 0}, validate=False)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
142
benchmarks/test_basic_doc_ops.py
Normal file
142
benchmarks/test_basic_doc_ops.py
Normal file
@@ -0,0 +1,142 @@
|
||||
from timeit import repeat
|
||||
|
||||
import mongoengine
|
||||
from mongoengine import (
|
||||
BooleanField,
|
||||
Document,
|
||||
EmailField,
|
||||
EmbeddedDocument,
|
||||
EmbeddedDocumentField,
|
||||
IntField,
|
||||
ListField,
|
||||
StringField,
|
||||
)
|
||||
|
||||
mongoengine.connect(db="mongoengine_benchmark_test")
|
||||
|
||||
|
||||
def timeit(f, n=10000):
|
||||
return min(repeat(f, repeat=3, number=n)) / float(n)
|
||||
|
||||
|
||||
def test_basic():
|
||||
class Book(Document):
|
||||
name = StringField()
|
||||
pages = IntField()
|
||||
tags = ListField(StringField())
|
||||
is_published = BooleanField()
|
||||
author_email = EmailField()
|
||||
|
||||
Book.drop_collection()
|
||||
|
||||
def init_book():
|
||||
return Book(
|
||||
name="Always be closing",
|
||||
pages=100,
|
||||
tags=["self-help", "sales"],
|
||||
is_published=True,
|
||||
author_email="alec@example.com",
|
||||
)
|
||||
|
||||
print("Doc initialization: %.3fus" % (timeit(init_book, 1000) * 10 ** 6))
|
||||
|
||||
b = init_book()
|
||||
print("Doc getattr: %.3fus" % (timeit(lambda: b.name, 10000) * 10 ** 6))
|
||||
|
||||
print(
|
||||
"Doc setattr: %.3fus"
|
||||
% (timeit(lambda: setattr(b, "name", "New name"), 10000) * 10 ** 6) # noqa B010
|
||||
)
|
||||
|
||||
print("Doc to mongo: %.3fus" % (timeit(b.to_mongo, 1000) * 10 ** 6))
|
||||
|
||||
print("Doc validation: %.3fus" % (timeit(b.validate, 1000) * 10 ** 6))
|
||||
|
||||
def save_book():
|
||||
b._mark_as_changed("name")
|
||||
b._mark_as_changed("tags")
|
||||
b.save()
|
||||
|
||||
print("Save to database: %.3fus" % (timeit(save_book, 100) * 10 ** 6))
|
||||
|
||||
son = b.to_mongo()
|
||||
print(
|
||||
"Load from SON: %.3fus" % (timeit(lambda: Book._from_son(son), 1000) * 10 ** 6)
|
||||
)
|
||||
|
||||
print(
|
||||
"Load from database: %.3fus" % (timeit(lambda: Book.objects[0], 100) * 10 ** 6)
|
||||
)
|
||||
|
||||
def create_and_delete_book():
|
||||
b = init_book()
|
||||
b.save()
|
||||
b.delete()
|
||||
|
||||
print(
|
||||
"Init + save to database + delete: %.3fms"
|
||||
% (timeit(create_and_delete_book, 10) * 10 ** 3)
|
||||
)
|
||||
|
||||
|
||||
def test_big_doc():
|
||||
class Contact(EmbeddedDocument):
|
||||
name = StringField()
|
||||
title = StringField()
|
||||
address = StringField()
|
||||
|
||||
class Company(Document):
|
||||
name = StringField()
|
||||
contacts = ListField(EmbeddedDocumentField(Contact))
|
||||
|
||||
Company.drop_collection()
|
||||
|
||||
def init_company():
|
||||
return Company(
|
||||
name="MongoDB, Inc.",
|
||||
contacts=[
|
||||
Contact(name="Contact %d" % x, title="CEO", address="Address %d" % x)
|
||||
for x in range(1000)
|
||||
],
|
||||
)
|
||||
|
||||
company = init_company()
|
||||
print("Big doc to mongo: %.3fms" % (timeit(company.to_mongo, 100) * 10 ** 3))
|
||||
|
||||
print("Big doc validation: %.3fms" % (timeit(company.validate, 1000) * 10 ** 3))
|
||||
|
||||
company.save()
|
||||
|
||||
def save_company():
|
||||
company._mark_as_changed("name")
|
||||
company._mark_as_changed("contacts")
|
||||
company.save()
|
||||
|
||||
print("Save to database: %.3fms" % (timeit(save_company, 100) * 10 ** 3))
|
||||
|
||||
son = company.to_mongo()
|
||||
print(
|
||||
"Load from SON: %.3fms"
|
||||
% (timeit(lambda: Company._from_son(son), 100) * 10 ** 3)
|
||||
)
|
||||
|
||||
print(
|
||||
"Load from database: %.3fms"
|
||||
% (timeit(lambda: Company.objects[0], 100) * 10 ** 3)
|
||||
)
|
||||
|
||||
def create_and_delete_company():
|
||||
c = init_company()
|
||||
c.save()
|
||||
c.delete()
|
||||
|
||||
print(
|
||||
"Init + save to database + delete: %.3fms"
|
||||
% (timeit(create_and_delete_company, 10) * 10 ** 3)
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_basic()
|
||||
print("-" * 100)
|
||||
test_big_doc()
|
||||
161
benchmarks/test_inserts.py
Normal file
161
benchmarks/test_inserts.py
Normal file
@@ -0,0 +1,161 @@
|
||||
import timeit
|
||||
|
||||
|
||||
def main():
|
||||
setup = """
|
||||
from pymongo import MongoClient
|
||||
|
||||
connection = MongoClient()
|
||||
connection.drop_database('mongoengine_benchmark_test')
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
from pymongo import MongoClient
|
||||
|
||||
connection = MongoClient()
|
||||
|
||||
db = connection.mongoengine_benchmark_test
|
||||
noddy = db.noddy
|
||||
|
||||
for i in range(10000):
|
||||
example = {'fields': {}}
|
||||
for j in range(20):
|
||||
example['fields']["key"+str(j)] = "value "+str(j)
|
||||
|
||||
noddy.insert_one(example)
|
||||
|
||||
myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("PyMongo: Creating 10000 dictionaries.")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(f"{t.timeit(1)}s")
|
||||
|
||||
stmt = """
|
||||
from pymongo import MongoClient, WriteConcern
|
||||
connection = MongoClient()
|
||||
|
||||
db = connection.mongoengine_benchmark_test
|
||||
noddy = db.noddy.with_options(write_concern=WriteConcern(w=0))
|
||||
|
||||
for i in range(10000):
|
||||
example = {'fields': {}}
|
||||
for j in range(20):
|
||||
example['fields']["key"+str(j)] = "value "+str(j)
|
||||
|
||||
noddy.insert_one(example)
|
||||
|
||||
myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(f"{t.timeit(1)}s")
|
||||
|
||||
setup = """
|
||||
from pymongo import MongoClient
|
||||
|
||||
connection = MongoClient()
|
||||
connection.drop_database('mongoengine_benchmark_test')
|
||||
connection.close()
|
||||
|
||||
from mongoengine import Document, DictField, connect
|
||||
connect("mongoengine_benchmark_test")
|
||||
|
||||
class Noddy(Document):
|
||||
fields = DictField()
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save()
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("MongoEngine: Creating 10000 dictionaries.")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(f"{t.timeit(1)}s")
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
fields = {}
|
||||
for j in range(20):
|
||||
fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.fields = fields
|
||||
noddy.save()
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("MongoEngine: Creating 10000 dictionaries (using a single field assignment).")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(f"{t.timeit(1)}s")
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(write_concern={"w": 0})
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(f"{t.timeit(1)}s")
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(write_concern={"w": 0}, validate=False)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print(
|
||||
'MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).'
|
||||
)
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(f"{t.timeit(1)}s")
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(force_insert=True, write_concern={"w": 0}, validate=False)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print(
|
||||
'MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).'
|
||||
)
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(f"{t.timeit(1)}s")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -33,8 +33,14 @@ clean:
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. Check $(BUILDDIR)/html/index.html"
|
||||
|
||||
html-readthedocs:
|
||||
$(SPHINXBUILD) -T -E -b readthedocs $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
|
||||
@@ -13,6 +13,7 @@ Documents
|
||||
|
||||
.. autoclass:: mongoengine.Document
|
||||
:members:
|
||||
:inherited-members:
|
||||
|
||||
.. attribute:: objects
|
||||
|
||||
@@ -21,12 +22,15 @@ Documents
|
||||
|
||||
.. autoclass:: mongoengine.EmbeddedDocument
|
||||
:members:
|
||||
:inherited-members:
|
||||
|
||||
.. autoclass:: mongoengine.DynamicDocument
|
||||
:members:
|
||||
:inherited-members:
|
||||
|
||||
.. autoclass:: mongoengine.DynamicEmbeddedDocument
|
||||
:members:
|
||||
:inherited-members:
|
||||
|
||||
.. autoclass:: mongoengine.document.MapReduceDocument
|
||||
:members:
|
||||
|
||||
@@ -1,113 +1,302 @@
|
||||
|
||||
|
||||
=========
|
||||
Changelog
|
||||
=========
|
||||
|
||||
dev
|
||||
===
|
||||
- Subfield resolve error in generic_emdedded_document query #1651 #1652
|
||||
- use each modifier only with $position #1673 #1675
|
||||
- Improve LazyReferenceField and GenericLazyReferenceField with nested fields #1704
|
||||
- Fix validation error instance in GenericEmbeddedDocumentField #1067
|
||||
- Update cached fields when fields argument is given #1712
|
||||
- Add a db parameter to register_connection for compatibility with connect
|
||||
- Use insert_one, insert_many in Document.insert #1491
|
||||
- Use new update_one, update_many on document/queryset update #1491
|
||||
- Use insert_one, insert_many in Document.insert #1491
|
||||
- Fix reload(fields) affect changed fields #1371
|
||||
- Fix Read-only access to database fails when trying to create indexes #1338
|
||||
Development
|
||||
===========
|
||||
- (Fill this out as you fix issues and develop your features).
|
||||
|
||||
Changes in 0.23.1
|
||||
===========
|
||||
- Bug fix: ignore LazyReferenceFields when clearing _changed_fields #2484
|
||||
- Improve connection doc #2481
|
||||
|
||||
Changes in 0.23.0
|
||||
=================
|
||||
- Bugfix: manually setting SequenceField in DynamicDocument doesn't increment the counter #2471
|
||||
- Add MongoDB 4.2 and 4.4 to CI
|
||||
- Add support for allowDiskUse on querysets #2468
|
||||
|
||||
Changes in 0.22.1
|
||||
=================
|
||||
- Declare that Py3.5 is not supported in package metadata #2449
|
||||
- Moved CI from Travis to Github-Actions
|
||||
|
||||
Changes in 0.22.0
|
||||
=================
|
||||
- Fix LazyReferenceField dereferencing in embedded documents #2426
|
||||
- Fix regarding the recent use of Cursor.__spec in .count() that was interfering with mongomock #2425
|
||||
- Drop support for Python 3.5 by introducing f-strings in the codebase
|
||||
|
||||
Changes in 0.21.0
|
||||
=================
|
||||
- Bug fix in DynamicDocument which is not parsing known fields in constructor like Document do #2412
|
||||
- When using pymongo >= 3.7, make use of Collection.count_documents instead of Collection.count
|
||||
and Cursor.count that got deprecated in pymongo >= 3.7.
|
||||
This should have a negative impact on performance of count see Issue #2219
|
||||
- Fix a bug that made the queryset drop the read_preference after clone().
|
||||
- Remove Py3.5 from CI as it reached EOL and add Python 3.9
|
||||
- Fix some issues related with db_field/field conflict in constructor #2414
|
||||
- BREAKING CHANGE: Fix the behavior of Doc.objects.limit(0) which should return all documents (similar to mongodb) #2311
|
||||
- Bug fix in ListField when updating the first item, it was saving the whole list, instead of
|
||||
just replacing the first item (as usually done when updating 1 item of the list) #2392
|
||||
- Add EnumField: ``mongoengine.fields.EnumField``
|
||||
- Refactoring - Remove useless code related to Document.__only_fields and Queryset.only_fields
|
||||
- Fix query transformation regarding special operators #2365
|
||||
- Bug Fix: Document.save() fails when shard_key is not _id #2154
|
||||
|
||||
Changes in 0.20.0
|
||||
=================
|
||||
- ATTENTION: Drop support for Python2
|
||||
- Add Mongo 4.0 to Travis
|
||||
- Fix error when setting a string as a ComplexDateTimeField #2253
|
||||
- Bump development Status classifier to Production/Stable #2232
|
||||
- Improve Queryset.get to avoid confusing MultipleObjectsReturned message in case multiple match are found #630
|
||||
- Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264
|
||||
- Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267
|
||||
- DictField validate failed without default connection (bug introduced in 0.19.0) #2239
|
||||
- Remove methods that were deprecated years ago:
|
||||
- name parameter in Field constructor e.g `StringField(name="...")`, was replaced by db_field
|
||||
- Queryset.slave_okay() was deprecated since pymongo3
|
||||
- dropDups was dropped with MongoDB3
|
||||
- ``Queryset._ensure_indexes`` and ``Queryset.ensure_indexes``, the right method to use is ``Document.ensure_indexes``
|
||||
- Added pre-commit for development/CI #2212
|
||||
- Renamed requirements-lint.txt to requirements-dev.txt #2212
|
||||
- Support for setting ReadConcern #2255
|
||||
|
||||
Changes in 0.19.1
|
||||
=================
|
||||
- Tests require Pillow < 7.0.0 as it dropped Python2 support
|
||||
- DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of
|
||||
pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079
|
||||
|
||||
Changes in 0.19.0
|
||||
=================
|
||||
- BREAKING CHANGE: ``class_check`` and ``read_preference`` keyword arguments are no longer available when filtering a ``QuerySet``. #2112
|
||||
- Instead of ``Doc.objects(foo=bar, read_preference=...)`` use ``Doc.objects(foo=bar).read_preference(...)``.
|
||||
- Instead of ``Doc.objects(foo=bar, class_check=False)`` use ``Doc.objects(foo=bar).clear_cls_query(...)``.
|
||||
- This change also renames the private ``QuerySet._initial_query`` attribute to ``_cls_query``.
|
||||
- BREAKING CHANGE: Removed the deprecated ``format`` param from ``QuerySet.explain``. #2113
|
||||
- BREAKING CHANGE: Renamed ``MongoEngineConnectionError`` to ``ConnectionFailure``. #2111
|
||||
- If you catch/use ``MongoEngineConnectionError`` in your code, you'll have to rename it.
|
||||
- BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103
|
||||
- From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required.
|
||||
- BREAKING CHANGE: A ``LazyReferenceField`` is now stored in the ``_data`` field of its parent as a ``DBRef``, ``Document``, or ``EmbeddedDocument`` (``ObjectId`` is no longer allowed). #2182
|
||||
- DEPRECATION: ``Q.empty`` & ``QNode.empty`` are marked as deprecated and will be removed in a next version of MongoEngine. #2210
|
||||
- Added ability to check if Q or QNode are empty by parsing them to bool.
|
||||
- Instead of ``Q(name="John").empty`` use ``not Q(name="John")``.
|
||||
- Fix updating/modifying/deleting/reloading a document that's sharded by a field with ``db_field`` specified. #2125
|
||||
- Only set no_cursor_timeout when requested (fixes an incompatibility with MongoDB 4.2) #2148
|
||||
- ``ListField`` now accepts an optional ``max_length`` parameter. #2110
|
||||
- Improve error message related to InvalidDocumentError #2180
|
||||
- Added BulkWriteError to replace NotUniqueError which was misleading in bulk write insert #2152
|
||||
- Added ability to compare Q and Q operations #2204
|
||||
- Added ability to use a db alias on query_counter #2194
|
||||
- Added ability to specify collations for querysets with ``Doc.objects.collation`` #2024
|
||||
- Fix updates of a list field by negative index #2094
|
||||
- Switch from nosetest to pytest as test runner #2114
|
||||
- The codebase is now formatted using ``black``. #2109
|
||||
- Documentation improvements:
|
||||
- Documented how `pymongo.monitoring` can be used to log all queries issued by MongoEngine to the driver.
|
||||
|
||||
Changes in 0.18.2
|
||||
=================
|
||||
- Replace deprecated PyMongo v2.x methods with their v3.x equivalents in the ``SequenceField``. #2097
|
||||
- Various code clarity and documentation improvements.
|
||||
|
||||
Changes in 0.18.1
|
||||
=================
|
||||
- Fix a bug introduced in 0.18.0 which was causing ``Document.save`` to update all the fields instead of updating only the modified fields. This bug only occurred when using a custom PK. #2082
|
||||
- Add Python 3.7 to Travis CI. #2058
|
||||
|
||||
Changes in 0.18.0
|
||||
=================
|
||||
- Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2.
|
||||
- MongoEngine now requires PyMongo >= v3.4. Travis CI now tests against MongoDB v3.4 – v3.6 and PyMongo v3.4 – v3.6. #2017 #2066
|
||||
- Improve performance by avoiding a call to ``to_mongo`` in ``Document.save``. #2049
|
||||
- Connection/disconnection improvements:
|
||||
- Expose ``mongoengine.connection.disconnect`` and ``mongoengine.connection.disconnect_all``.
|
||||
- Fix disconnecting. #566 #1599 #605 #607 #1213 #565
|
||||
- Improve documentation of ``connect``/``disconnect``.
|
||||
- Fix issue when using multiple connections to the same mongo with different credentials. #2047
|
||||
- ``connect`` fails immediately when db name contains invalid characters. #2031 #1718
|
||||
- Fix the default write concern of ``Document.save`` that was overwriting the connection write concern. #568
|
||||
- Fix querying on ``List(EmbeddedDocument)`` subclasses fields. #1961 #1492
|
||||
- Fix querying on ``(Generic)EmbeddedDocument`` subclasses fields. #475
|
||||
- Fix ``QuerySet.aggregate`` so that it takes limit and skip value into account. #2029
|
||||
- Generate unique indices for ``SortedListField`` and ``EmbeddedDocumentListFields``. #2020
|
||||
- BREAKING CHANGE: Changed the behavior of a custom field validator (i.e ``validation`` parameter of a ``Field``). It is now expected to raise a ``ValidationError`` instead of returning ``True``/``False``. #2050
|
||||
- BREAKING CHANGES (associated with connection/disconnection fixes):
|
||||
- Calling ``connect`` 2 times with the same alias and different parameter will raise an error (should call ``disconnect`` first).
|
||||
- ``disconnect`` now clears ``mongoengine.connection._connection_settings``.
|
||||
- ``disconnect`` now clears the cached attribute ``Document._collection``.
|
||||
- BREAKING CHANGE: ``EmbeddedDocument.save`` & ``.reload`` no longer exist. #1552
|
||||
|
||||
Changes in 0.17.0
|
||||
=================
|
||||
- POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (``_cls``, ``_id``) when using ``QuerySet.as_pymongo``. #1976
|
||||
- Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time. #1995
|
||||
- DEPRECATION: ``EmbeddedDocument.save`` & ``.reload`` are marked as deprecated and will be removed in a next version of MongoEngine. #1552
|
||||
- Fix ``QuerySet.only`` working improperly after using ``QuerySet.count`` of the same instance of a ``QuerySet``.
|
||||
- Fix ``batch_size`` that was not copied when cloning a ``QuerySet`` object. #2011
|
||||
- Fix ``InvalidStringData`` error when using ``modify`` on a ``BinaryField``. #1127
|
||||
- Fix test suite and CI to support MongoDB v3.4. #1445
|
||||
- Fix reference fields querying the database on each access if value contains orphan DBRefs.
|
||||
|
||||
Changes in 0.16.3
|
||||
=================
|
||||
- Fix ``$push`` with the ``$position`` operator not working with lists in embedded documents. #1965
|
||||
|
||||
Changes in 0.16.2
|
||||
=================
|
||||
- Fix ``Document.save`` that fails when called with ``write_concern=None`` (regression of 0.16.1). #1958
|
||||
|
||||
Changes in 0.16.1
|
||||
=================
|
||||
- Fix ``_cls`` that is not set properly in the ``Document`` constructor (regression). #1950
|
||||
- Fix a bug in the ``_delta`` method - update of a ``ListField`` depends on an unrelated dynamic field update. #1733
|
||||
- Remove PyMongo's deprecated ``Collection.save`` method and use ``Collection.insert_one`` instead. #1899
|
||||
|
||||
Changes in 0.16.0
|
||||
=================
|
||||
- POTENTIAL BREAKING CHANGES:
|
||||
- ``EmbeddedDocumentField`` will no longer accept references to Document classes in its constructor. #1661
|
||||
- Get rid of the ``basecls`` parameter from the ``DictField`` constructor (dead code). #1876
|
||||
- Default value of the ``ComplexDateTime`` field is now ``None`` (and no longer the current datetime). #1368
|
||||
- Fix an unhashable ``TypeError`` when referencing a ``Document`` with a compound key in an ``EmbeddedDocument``. #1685
|
||||
- Fix a bug where an ``EmbeddedDocument`` with the same id as its parent would not be tracked for changes. #1768
|
||||
- Fix the fact that a bulk ``QuerySet.insert`` was not setting primary keys of inserted document instances. #1919
|
||||
- Fix a bug when referencing an abstract class in a ``ReferenceField``. #1920
|
||||
- Allow modifications to the document made in ``pre_save_post_validation`` to be taken into account. #1202
|
||||
- Replace MongoDB v2.4 tests in Travis CI with MongoDB v3.2. #1903
|
||||
- Fix side effects of using ``QuerySet.no_dereference`` on other documents. #1677
|
||||
- Fix ``TypeError`` when using lazy Django translation objects as translated choices. #1879
|
||||
- Improve Python 2-3 codebase compatibility. #1889
|
||||
- Fix support for changing the default value of the ``ComplexDateTime`` field. #1368
|
||||
- Improve error message in case an ``EmbeddedDocumentListField`` receives an ``EmbeddedDocument`` instance instead of a list. #1877
|
||||
- Fix the ``inc`` and ``dec`` operators for the ``DecimalField``. #1517 #1320
|
||||
- Ignore ``killcursors`` queries in ``query_counter`` context manager. #1869
|
||||
- Fix the fact that ``query_counter`` was modifying the initial profiling level in case it was != 0. #1870
|
||||
- Repair the ``no_sub_classes`` context manager + fix the fact that it was swallowing exceptions. #1865
|
||||
- Fix index creation error that was swallowed by ``hasattr`` under Python 2. #1688
|
||||
- ``QuerySet.limit`` function behaviour: Passing 0 as parameter will return all the documents in the cursor. #1611
|
||||
- Bulk insert updates the IDs of the input documents instances. #1919
|
||||
- Fix a harmless bug related to ``GenericReferenceField`` where modifications in the generic-referenced document were tracked in the parent. #1934
|
||||
- Improve validation of the ``BinaryField``. #273
|
||||
- Implement lazy regex compiling in Field classes to improve ``import mongoengine`` performance. #1806
|
||||
- Update ``GridFSProxy.__str__`` so that it would always print both the filename and grid_id. #710
|
||||
- Add ``__repr__`` to ``Q`` and ``QCombination`` classes. #1843
|
||||
- Fix bug in the ``BaseList.__iter__`` operator (was occuring when modifying a BaseList while iterating over it). #1676
|
||||
- Add a ``DateField``. #513
|
||||
- Various improvements to the documentation.
|
||||
- Various code quality improvements.
|
||||
|
||||
Changes in 0.15.3
|
||||
=================
|
||||
- ``Queryset.update/update_one`` methods now return an ``UpdateResult`` when ``full_result=True`` is provided and no longer a dict. #1491
|
||||
- Improve ``LazyReferenceField`` and ``GenericLazyReferenceField`` with nested fields. #1704
|
||||
- Fix the subfield resolve error in ``generic_emdedded_document`` query. #1651 #1652
|
||||
- Use each modifier only with ``$position``. #1673 #1675
|
||||
- Fix validation errors in the ``GenericEmbeddedDocumentField``. #1067
|
||||
- Update cached fields when a ``fields`` argument is given. #1712
|
||||
- Add a ``db`` parameter to ``register_connection`` for compatibility with ``connect``.
|
||||
- Use PyMongo v3.x's ``insert_one`` and ``insert_many`` in ``Document.insert``. #1491
|
||||
- Use PyMongo v3.x's ``update_one`` and ``update_many`` in ``Document.update`` and ``QuerySet.update``. #1491
|
||||
- Fix how ``reload(fields)`` affects changed fields. #1371
|
||||
- Fix a bug where the read-only access to the database fails when trying to create indexes. #1338
|
||||
|
||||
Changes in 0.15.0
|
||||
=================
|
||||
- Add LazyReferenceField and GenericLazyReferenceField to address #1230
|
||||
- Add ``LazyReferenceField`` and ``GenericLazyReferenceField``. #1230
|
||||
|
||||
Changes in 0.14.1
|
||||
=================
|
||||
- Removed SemiStrictDict and started using a regular dict for `BaseDocument._data` #1630
|
||||
- Added support for the `$position` param in the `$push` operator #1566
|
||||
- Fixed `DateTimeField` interpreting an empty string as today #1533
|
||||
- Added a missing `__ne__` method to the `GridFSProxy` class #1632
|
||||
- Fixed `BaseQuerySet._fields_to_db_fields` #1553
|
||||
- Remove ``SemiStrictDict`` and start using a regular dict for ``BaseDocument._data``. #1630
|
||||
- Add support for the ``$position`` param in the ``$push`` operator. #1566
|
||||
- Fix ``DateTimeField`` interpreting an empty string as today. #1533
|
||||
- Add a missing ``__ne__`` method to the ``GridFSProxy`` class. #1632
|
||||
- Fix ``BaseQuerySet._fields_to_db_fields``. #1553
|
||||
|
||||
Changes in 0.14.0
|
||||
=================
|
||||
- BREAKING CHANGE: Removed the `coerce_types` param from `QuerySet.as_pymongo` #1549
|
||||
- POTENTIAL BREAKING CHANGE: Made EmbeddedDocument not hashable by default #1528
|
||||
- Improved code quality #1531, #1540, #1541, #1547
|
||||
- BREAKING CHANGE: Remove the ``coerce_types`` param from ``QuerySet.as_pymongo``. #1549
|
||||
- POTENTIAL BREAKING CHANGE: Make ``EmbeddedDocument`` not hashable by default. #1528
|
||||
- Improve code quality. #1531, #1540, #1541, #1547
|
||||
|
||||
Changes in 0.13.0
|
||||
=================
|
||||
- POTENTIAL BREAKING CHANGE: Added Unicode support to the `EmailField`, see
|
||||
docs/upgrade.rst for details.
|
||||
- POTENTIAL BREAKING CHANGE: Added Unicode support to the ``EmailField``, see docs/upgrade.rst for details.
|
||||
|
||||
Changes in 0.12.0
|
||||
=================
|
||||
- POTENTIAL BREAKING CHANGE: Fixed limit/skip/hint/batch_size chaining #1476
|
||||
- POTENTIAL BREAKING CHANGE: Changed a public `QuerySet.clone_into` method to a private `QuerySet._clone_into` #1476
|
||||
- Fixed the way `Document.objects.create` works with duplicate IDs #1485
|
||||
- Fixed connecting to a replica set with PyMongo 2.x #1436
|
||||
- Fixed using sets in field choices #1481
|
||||
- Fixed deleting items from a `ListField` #1318
|
||||
- Fixed an obscure error message when filtering by `field__in=non_iterable`. #1237
|
||||
- Fixed behavior of a `dec` update operator #1450
|
||||
- Added a `rename` update operator #1454
|
||||
- Added validation for the `db_field` parameter #1448
|
||||
- Fixed the error message displayed when querying an `EmbeddedDocumentField` by an invalid value #1440
|
||||
- Fixed the error message displayed when validating unicode URLs #1486
|
||||
- Raise an error when trying to save an abstract document #1449
|
||||
- POTENTIAL BREAKING CHANGE: Fix ``limit``/``skip``/``hint``/``batch_size`` chaining. #1476
|
||||
- POTENTIAL BREAKING CHANGE: Change a public ``QuerySet.clone_into`` method to a private ``QuerySet._clone_into``. #1476
|
||||
- Fix the way ``Document.objects.create`` works with duplicate IDs. #1485
|
||||
- Fix connecting to a replica set with PyMongo 2.x. #1436
|
||||
- Fix using sets in field choices. #1481
|
||||
- Fix deleting items from a ``ListField``. #1318
|
||||
- Fix an obscure error message when filtering by ``field__in=non_iterable``. #1237
|
||||
- Fix behavior of a ``dec`` update operator. #1450
|
||||
- Add a ``rename`` update operator. #1454
|
||||
- Add validation for the ``db_field`` parameter. #1448
|
||||
- Fix the error message displayed when querying an ``EmbeddedDocumentField`` by an invalid value. #1440
|
||||
- Fix the error message displayed when validating Unicode URLs. #1486
|
||||
- Raise an error when trying to save an abstract document. #1449
|
||||
|
||||
Changes in 0.11.0
|
||||
=================
|
||||
- BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428
|
||||
- BREAKING CHANGE: Dropped Python 2.6 support. #1428
|
||||
- BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428
|
||||
- BREAKING CHANGE: Accessing a broken reference will raise a `DoesNotExist` error. In the past it used to return `None`. #1334
|
||||
- Fixed absent rounding for DecimalField when `force_string` is set. #1103
|
||||
- BREAKING CHANGE: Rename ``ConnectionError`` to ``MongoEngineConnectionError`` since the former is a built-in exception name in Python v3.x. #1428
|
||||
- BREAKING CHANGE: Drop Python v2.6 support. #1428
|
||||
- BREAKING CHANGE: ``from mongoengine.base import ErrorClass`` won't work anymore for any error from ``mongoengine.errors`` (e.g. ``ValidationError``). Use ``from mongoengine.errors import ErrorClass instead``. #1428
|
||||
- BREAKING CHANGE: Accessing a broken reference will raise a ``DoesNotExist`` error. In the past it used to return ``None``. #1334
|
||||
- Fix absent rounding for the ``DecimalField`` when ``force_string`` is set. #1103
|
||||
|
||||
Changes in 0.10.8
|
||||
=================
|
||||
- Added support for QuerySet.batch_size (#1426)
|
||||
- Fixed query set iteration within iteration #1427
|
||||
- Fixed an issue where specifying a MongoDB URI host would override more information than it should #1421
|
||||
- Added ability to filter the generic reference field by ObjectId and DBRef #1425
|
||||
- Fixed delete cascade for models with a custom primary key field #1247
|
||||
- Added ability to specify an authentication mechanism (e.g. X.509) #1333
|
||||
- Added support for falsey primary keys (e.g. doc.pk = 0) #1354
|
||||
- Fixed QuerySet#sum/average for fields w/ explicit db_field #1417
|
||||
- Fixed filtering by embedded_doc=None #1422
|
||||
- Added support for cursor.comment #1420
|
||||
- Fixed doc.get_<field>_display #1419
|
||||
- Fixed __repr__ method of the StrictDict #1424
|
||||
- Added a deprecation warning for Python 2.6
|
||||
- Add support for ``QuerySet.batch_size``. (#1426)
|
||||
- Fix a query set iteration within an iteration. #1427
|
||||
- Fix an issue where specifying a MongoDB URI host would override more information than it should. #1421
|
||||
- Add an ability to filter the ``GenericReferenceField`` by an ``ObjectId`` and a ``DBRef``. #1425
|
||||
- Fix cascading deletes for models with a custom primary key field. #1247
|
||||
- Add ability to specify an authentication mechanism (e.g. X.509). #1333
|
||||
- Add support for falsy primary keys (e.g. ``doc.pk = 0``). #1354
|
||||
- Fix ``QuerySet.sum/average`` for fields w/ an explicit ``db_field``. #1417
|
||||
- Fix filtering by ``embedded_doc=None``. #1422
|
||||
- Add support for ``Cursor.comment``. #1420
|
||||
- Fix ``doc.get_<field>_display`` methods. #1419
|
||||
- Fix the ``__repr__`` method of the ``StrictDict`` #1424
|
||||
- Add a deprecation warning for Python v2.6.
|
||||
|
||||
Changes in 0.10.7
|
||||
=================
|
||||
- Dropped Python 3.2 support #1390
|
||||
- Fixed the bug where dynamic doc has index inside a dict field #1278
|
||||
- Fixed: ListField minus index assignment does not work #1128
|
||||
- Fixed cascade delete mixing among collections #1224
|
||||
- Add `signal_kwargs` argument to `Document.save`, `Document.delete` and `BaseQuerySet.insert` to be passed to signals calls #1206
|
||||
- Raise `OperationError` when trying to do a `drop_collection` on document with no collection set.
|
||||
- count on ListField of EmbeddedDocumentField fails. #1187
|
||||
- Fixed long fields stored as int32 in Python 3. #1253
|
||||
- MapField now handles unicodes keys correctly. #1267
|
||||
- ListField now handles negative indicies correctly. #1270
|
||||
- Fixed AttributeError when initializing EmbeddedDocument with positional args. #681
|
||||
- Fixed no_cursor_timeout error with pymongo 3.0+ #1304
|
||||
- Replaced map-reduce based QuerySet.sum/average with aggregation-based implementations #1336
|
||||
- Fixed support for `__` to escape field names that match operators names in `update` #1351
|
||||
- Fixed BaseDocument#_mark_as_changed #1369
|
||||
- Added support for pickling QuerySet instances. #1397
|
||||
- Fixed connecting to a list of hosts #1389
|
||||
- Fixed a bug where accessing broken references wouldn't raise a DoesNotExist error #1334
|
||||
- Fixed not being able to specify use_db_field=False on ListField(EmbeddedDocumentField) instances #1218
|
||||
- Improvements to the dictionary fields docs #1383
|
||||
- Drop Python 3.2 support #1390
|
||||
- Fix a bug where a dynamic doc has an index inside a dict field. #1278
|
||||
- Fix: ``ListField`` minus index assignment does not work. #1128
|
||||
- Fix cascade delete mixing among collections. #1224
|
||||
- Add ``signal_kwargs`` argument to ``Document.save``, ``Document.delete`` and ``BaseQuerySet.insert`` to be passed to signals calls. #1206
|
||||
- Raise ``OperationError`` when trying to do a ``drop_collection`` on document with no collection set.
|
||||
- Fix a bug where a count on ``ListField`` of ``EmbeddedDocumentField`` fails. #1187
|
||||
- Fix ``LongField`` values stored as int32 in Python 3. #1253
|
||||
- ``MapField`` now handles unicode keys correctly. #1267
|
||||
- ``ListField`` now handles negative indicies correctly. #1270
|
||||
- Fix an ``AttributeError`` when initializing an ``EmbeddedDocument`` with positional args. #681
|
||||
- Fix a ``no_cursor_timeout`` error with PyMongo v3.x. #1304
|
||||
- Replace map-reduce based ``QuerySet.sum/average`` with aggregation-based implementations. #1336
|
||||
- Fix support for ``__`` to escape field names that match operators' names in ``update``. #1351
|
||||
- Fix ``BaseDocument._mark_as_changed``. #1369
|
||||
- Add support for pickling ``QuerySet`` instances. #1397
|
||||
- Fix connecting to a list of hosts. #1389
|
||||
- Fix a bug where accessing broken references wouldn't raise a ``DoesNotExist`` error. #1334
|
||||
- Fix not being able to specify ``use_db_field=False`` on ``ListField(EmbeddedDocumentField)`` instances. #1218
|
||||
- Improvements to the dictionary field's docs. #1383
|
||||
|
||||
Changes in 0.10.6
|
||||
=================
|
||||
- Add support for mocking MongoEngine based on mongomock. #1151
|
||||
- Fixed not being able to run tests on Windows. #1153
|
||||
- Fix not being able to run tests on Windows. #1153
|
||||
- Allow creation of sparse compound indexes. #1114
|
||||
- count on ListField of EmbeddedDocumentField fails. #1187
|
||||
|
||||
Changes in 0.10.5
|
||||
=================
|
||||
@@ -115,12 +304,12 @@ Changes in 0.10.5
|
||||
|
||||
Changes in 0.10.4
|
||||
=================
|
||||
- SaveConditionError is now importable from the top level package. #1165
|
||||
- upsert_one method added. #1157
|
||||
- ``SaveConditionError`` is now importable from the top level package. #1165
|
||||
- Add a ``QuerySet.upsert_one`` method. #1157
|
||||
|
||||
Changes in 0.10.3
|
||||
=================
|
||||
- Fix `read_preference` (it had chaining issues with PyMongo 2.x and it didn't work at all with PyMongo 3.x) #1042
|
||||
- Fix ``read_preference`` (it had chaining issues with PyMongo v2.x and it didn't work at all with PyMongo v3.x). #1042
|
||||
|
||||
Changes in 0.10.2
|
||||
=================
|
||||
@@ -130,16 +319,16 @@ Changes in 0.10.2
|
||||
|
||||
Changes in 0.10.1
|
||||
=================
|
||||
- Fix infinite recursion with CASCADE delete rules under specific conditions. #1046
|
||||
- Fix CachedReferenceField bug when loading cached docs as DBRef but failing to save them. #1047
|
||||
- Fix ignored chained options #842
|
||||
- Document save's save_condition error raises `SaveConditionError` exception #1070
|
||||
- Fix Document.reload for DynamicDocument. #1050
|
||||
- StrictDict & SemiStrictDict are shadowed at init time. #1105
|
||||
- Fix ListField minus index assignment does not work. #1119
|
||||
- Remove code that marks field as changed when the field has default but not existed in database #1126
|
||||
- Remove test dependencies (nose and rednose) from install dependencies list. #1079
|
||||
- Recursively build query when using elemMatch operator. #1130
|
||||
- Fix infinite recursion with cascade delete rules under specific conditions. #1046
|
||||
- Fix ``CachedReferenceField`` bug when loading cached docs as ``DBRef`` but failing to save them. #1047
|
||||
- Fix ignored chained options. #842
|
||||
- ``Document.save``'s ``save_condition`` error raises a ``SaveConditionError`` exception. #1070
|
||||
- Fix ``Document.reload`` for the ``DynamicDocument``. #1050
|
||||
- ``StrictDict`` & ``SemiStrictDict`` are shadowed at init time. #1105
|
||||
- Fix ``ListField`` negative index assignment not working. #1119
|
||||
- Remove code that marks a field as changed when the field has a default value but does not exist in the database. #1126
|
||||
- Remove test dependencies (nose and rednose) from install dependencies. #1079
|
||||
- Recursively build a query when using the ``elemMatch`` operator. #1130
|
||||
- Fix instance back references for lists of embedded documents. #1131
|
||||
|
||||
Changes in 0.10.0
|
||||
@@ -150,7 +339,7 @@ Changes in 0.10.0
|
||||
- Removed get_or_create() deprecated since 0.8.0. #300
|
||||
- Improve Document._created status when switch collection and db #1020
|
||||
- Queryset update doesn't go through field validation #453
|
||||
- Added support for specifying authentication source as option `authSource` in URI. #967
|
||||
- Added support for specifying authentication source as option ``authSource`` in URI. #967
|
||||
- Fixed mark_as_changed to handle higher/lower level fields changed. #927
|
||||
- ListField of embedded docs doesn't set the _instance attribute when iterating over it #914
|
||||
- Support += and *= for ListField #595
|
||||
@@ -166,7 +355,7 @@ Changes in 0.10.0
|
||||
- Fixes some internal _id handling issue. #961
|
||||
- Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652
|
||||
- Capped collection multiple of 256. #1011
|
||||
- Added `BaseQuerySet.aggregate_sum` and `BaseQuerySet.aggregate_average` methods.
|
||||
- Added ``BaseQuerySet.aggregate_sum`` and ``BaseQuerySet.aggregate_average`` methods.
|
||||
- Fix for delete with write_concern {'w': 0}. #1008
|
||||
- Allow dynamic lookup for more than two parts. #882
|
||||
- Added support for min_distance on geo queries. #831
|
||||
@@ -175,10 +364,10 @@ Changes in 0.10.0
|
||||
Changes in 0.9.0
|
||||
================
|
||||
- Update FileField when creating a new file #714
|
||||
- Added `EmbeddedDocumentListField` for Lists of Embedded Documents. #826
|
||||
- Added ``EmbeddedDocumentListField`` for Lists of Embedded Documents. #826
|
||||
- ComplexDateTimeField should fall back to None when null=True #864
|
||||
- Request Support for $min, $max Field update operators #863
|
||||
- `BaseDict` does not follow `setdefault` #866
|
||||
- ``BaseDict`` does not follow ``setdefault`` #866
|
||||
- Add support for $type operator # 766
|
||||
- Fix tests for pymongo 2.8+ #877
|
||||
- No module named 'django.utils.importlib' (Django dev) #872
|
||||
@@ -199,13 +388,13 @@ Changes in 0.9.0
|
||||
- Stop ensure_indexes running on a secondaries unless connection is through mongos #746
|
||||
- Not overriding default values when loading a subset of fields #399
|
||||
- Saving document doesn't create new fields in existing collection #620
|
||||
- Added `Queryset.aggregate` wrapper to aggregation framework #703
|
||||
- Added ``Queryset.aggregate`` wrapper to aggregation framework #703
|
||||
- Added support to show original model fields on to_json calls instead of db_field #697
|
||||
- Added Queryset.search_text to Text indexes searchs #700
|
||||
- Fixed tests for Django 1.7 #696
|
||||
- Follow ReferenceFields in EmbeddedDocuments with select_related #690
|
||||
- Added preliminary support for text indexes #680
|
||||
- Added `elemMatch` operator as well - `match` is too obscure #653
|
||||
- Added ``elemMatch`` operator as well - ``match`` is too obscure #653
|
||||
- Added support for progressive JPEG #486 #548
|
||||
- Allow strings to be used in index creation #675
|
||||
- Fixed EmbeddedDoc weakref proxy issue #592
|
||||
@@ -241,11 +430,11 @@ Changes in 0.9.0
|
||||
- Increase email field length to accommodate new TLDs #726
|
||||
- index_cls is ignored when deciding to set _cls as index prefix #733
|
||||
- Make 'db' argument to connection optional #737
|
||||
- Allow atomic update for the entire `DictField` #742
|
||||
- Allow atomic update for the entire ``DictField`` #742
|
||||
- Added MultiPointField, MultiLineField, MultiPolygonField
|
||||
- Fix multiple connections aliases being rewritten #748
|
||||
- Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791
|
||||
- Make `in_bulk()` respect `no_dereference()` #775
|
||||
- Make ``in_bulk()`` respect ``no_dereference()`` #775
|
||||
- Handle None from model __str__; Fixes #753 #754
|
||||
- _get_changed_fields fix for embedded documents with id field. #925
|
||||
|
||||
@@ -299,18 +488,15 @@ Changes in 0.8.4
|
||||
|
||||
Changes in 0.8.3
|
||||
================
|
||||
- Fixed EmbeddedDocuments with `id` also storing `_id` (#402)
|
||||
- Fixed EmbeddedDocuments with ``id`` also storing ``_id`` (#402)
|
||||
- Added get_proxy_object helper to filefields (#391)
|
||||
- Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365)
|
||||
- Fixed sum and average mapreduce dot notation support (#375, #376, #393)
|
||||
- Fixed as_pymongo to return the id (#386)
|
||||
- Document.select_related() now respects `db_alias` (#377)
|
||||
- Document.select_related() now respects ``db_alias`` (#377)
|
||||
- Reload uses shard_key if applicable (#384)
|
||||
- Dynamic fields are ordered based on creation and stored in _fields_ordered (#396)
|
||||
|
||||
**Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3
|
||||
|
||||
- Fixed pickling dynamic documents `_dynamic_fields` (#387)
|
||||
- Fixed pickling dynamic documents ``_dynamic_fields`` (#387)
|
||||
- Fixed ListField setslice and delslice dirty tracking (#390)
|
||||
- Added Django 1.5 PY3 support (#392)
|
||||
- Added match ($elemMatch) support for EmbeddedDocuments (#379)
|
||||
@@ -351,7 +537,7 @@ Changes in 0.8.0
|
||||
================
|
||||
- Fixed querying ReferenceField custom_id (#317)
|
||||
- Fixed pickle issues with collections (#316)
|
||||
- Added `get_next_value` preview for SequenceFields (#319)
|
||||
- Added ``get_next_value`` preview for SequenceFields (#319)
|
||||
- Added no_sub_classes context manager and queryset helper (#312)
|
||||
- Querysets now utilises a local cache
|
||||
- Changed __len__ behaviour in the queryset (#247, #311)
|
||||
@@ -380,7 +566,7 @@ Changes in 0.8.0
|
||||
- Updated connection to use MongoClient (#262, #274)
|
||||
- Fixed db_alias and inherited Documents (#143)
|
||||
- Documentation update for document errors (#124)
|
||||
- Deprecated `get_or_create` (#35)
|
||||
- Deprecated ``get_or_create`` (#35)
|
||||
- Updated inheritable objects created by upsert now contain _cls (#118)
|
||||
- Added support for creating documents with embedded documents in a single operation (#6)
|
||||
- Added to_json and from_json to Document (#1)
|
||||
@@ -501,7 +687,7 @@ Changes in 0.7.0
|
||||
- Fixed UnboundLocalError in composite index with pk field (#88)
|
||||
- Updated ReferenceField's to optionally store ObjectId strings
|
||||
this will become the default in 0.8 (#89)
|
||||
- Added FutureWarning - save will default to `cascade=False` in 0.8
|
||||
- Added FutureWarning - save will default to ``cascade=False`` in 0.8
|
||||
- Added example of indexing embedded document fields (#75)
|
||||
- Fixed ImageField resizing when forcing size (#80)
|
||||
- Add flexibility for fields handling bad data (#78)
|
||||
@@ -597,7 +783,7 @@ Changes in 0.6.8
|
||||
================
|
||||
- Fixed FileField losing reference when no default set
|
||||
- Removed possible race condition from FileField (grid_file)
|
||||
- Added assignment to save, can now do: `b = MyDoc(**kwargs).save()`
|
||||
- Added assignment to save, can now do: ``b = MyDoc(**kwargs).save()``
|
||||
- Added support for pull operations on nested EmbeddedDocuments
|
||||
- Added support for choices with GenericReferenceFields
|
||||
- Added support for choices with GenericEmbeddedDocumentFields
|
||||
@@ -612,7 +798,7 @@ Changes in 0.6.7
|
||||
- Fixed indexing on '_id' or 'pk' or 'id'
|
||||
- Invalid data from the DB now raises a InvalidDocumentError
|
||||
- Cleaned up the Validation Error - docs and code
|
||||
- Added meta `auto_create_index` so you can disable index creation
|
||||
- Added meta ``auto_create_index`` so you can disable index creation
|
||||
- Added write concern options to inserts
|
||||
- Fixed typo in meta for index options
|
||||
- Bug fix Read preference now passed correctly
|
||||
@@ -653,7 +839,6 @@ Changes in 0.6.1
|
||||
|
||||
Changes in 0.6
|
||||
==============
|
||||
|
||||
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
|
||||
- Added support for covered indexes when inheritance is off
|
||||
- No longer always upsert on save for items with a '_id'
|
||||
@@ -878,7 +1063,6 @@ Changes in v0.1.3
|
||||
querying takes place
|
||||
- A few minor bugfixes
|
||||
|
||||
|
||||
Changes in v0.1.2
|
||||
=================
|
||||
- Query values may be processed before before being used in queries
|
||||
@@ -887,7 +1071,6 @@ Changes in v0.1.2
|
||||
- Added ``BooleanField``
|
||||
- Added ``Document.reload()`` method
|
||||
|
||||
|
||||
Changes in v0.1.1
|
||||
=================
|
||||
- Documents may now use capped collections
|
||||
|
||||
@@ -1,16 +1,19 @@
|
||||
from mongoengine import *
|
||||
|
||||
connect('tumblelog')
|
||||
connect("tumblelog")
|
||||
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
name = StringField(max_length=120)
|
||||
|
||||
|
||||
class User(Document):
|
||||
email = StringField(required=True)
|
||||
first_name = StringField(max_length=50)
|
||||
last_name = StringField(max_length=50)
|
||||
|
||||
|
||||
class Post(Document):
|
||||
title = StringField(max_length=120, required=True)
|
||||
author = ReferenceField(User)
|
||||
@@ -18,54 +21,57 @@ class Post(Document):
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
# bugfix
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
|
||||
class TextPost(Post):
|
||||
content = StringField()
|
||||
|
||||
|
||||
class ImagePost(Post):
|
||||
image_path = StringField()
|
||||
|
||||
|
||||
class LinkPost(Post):
|
||||
link_url = StringField()
|
||||
|
||||
|
||||
Post.drop_collection()
|
||||
|
||||
john = User(email='jdoe@example.com', first_name='John', last_name='Doe')
|
||||
john = User(email="jdoe@example.com", first_name="John", last_name="Doe")
|
||||
john.save()
|
||||
|
||||
post1 = TextPost(title='Fun with MongoEngine', author=john)
|
||||
post1.content = 'Took a look at MongoEngine today, looks pretty cool.'
|
||||
post1.tags = ['mongodb', 'mongoengine']
|
||||
post1 = TextPost(title="Fun with MongoEngine", author=john)
|
||||
post1.content = "Took a look at MongoEngine today, looks pretty cool."
|
||||
post1.tags = ["mongodb", "mongoengine"]
|
||||
post1.save()
|
||||
|
||||
post2 = LinkPost(title='MongoEngine Documentation', author=john)
|
||||
post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs'
|
||||
post2.tags = ['mongoengine']
|
||||
post2 = LinkPost(title="MongoEngine Documentation", author=john)
|
||||
post2.link_url = "http://tractiondigital.com/labs/mongoengine/docs"
|
||||
post2.tags = ["mongoengine"]
|
||||
post2.save()
|
||||
|
||||
print 'ALL POSTS'
|
||||
print
|
||||
print("ALL POSTS")
|
||||
print()
|
||||
for post in Post.objects:
|
||||
print post.title
|
||||
print(post.title)
|
||||
# print '=' * post.title.count()
|
||||
print "=" * 20
|
||||
print("=" * 20)
|
||||
|
||||
if isinstance(post, TextPost):
|
||||
print post.content
|
||||
print(post.content)
|
||||
|
||||
if isinstance(post, LinkPost):
|
||||
print 'Link:', post.link_url
|
||||
print("Link:", post.link_url)
|
||||
|
||||
print
|
||||
print
|
||||
print()
|
||||
print()
|
||||
|
||||
print 'POSTS TAGGED \'MONGODB\''
|
||||
print
|
||||
for post in Post.objects(tags='mongodb'):
|
||||
print post.title
|
||||
print
|
||||
print("POSTS TAGGED 'MONGODB'")
|
||||
print()
|
||||
for post in Post.objects(tags="mongodb"):
|
||||
print(post.title)
|
||||
print()
|
||||
|
||||
num_posts = Post.objects(tags='mongodb').count()
|
||||
print 'Found %d posts with tag "mongodb"' % num_posts
|
||||
num_posts = Post.objects(tags="mongodb").count()
|
||||
print('Found %d posts with tag "mongodb"' % num_posts)
|
||||
|
||||
41
docs/conf.py
41
docs/conf.py
@@ -1,4 +1,3 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# MongoEngine documentation build configuration file, created by
|
||||
# sphinx-quickstart on Sun Nov 22 18:14:13 2009.
|
||||
@@ -11,7 +10,8 @@
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys, os
|
||||
import os
|
||||
import sys
|
||||
|
||||
import sphinx_rtd_theme
|
||||
|
||||
@@ -20,29 +20,29 @@ import mongoengine
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
sys.path.insert(0, os.path.abspath('..'))
|
||||
sys.path.insert(0, os.path.abspath(".."))
|
||||
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo']
|
||||
extensions = ["sphinx.ext.autodoc", "sphinx.ext.todo", "readthedocs_ext.readthedocs"]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
source_suffix = ".rst"
|
||||
|
||||
# The encoding of source files.
|
||||
# source_encoding = 'utf-8'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = u'MongoEngine'
|
||||
copyright = u'2009, MongoEngine Authors'
|
||||
project = "MongoEngine"
|
||||
copyright = "2009, MongoEngine Authors" # noqa: A001
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
@@ -68,7 +68,7 @@ release = mongoengine.get_version()
|
||||
|
||||
# List of directories, relative to source directory, that shouldn't be searched
|
||||
# for source files.
|
||||
exclude_trees = ['_build']
|
||||
exclude_trees = ["_build"]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
# default_role = None
|
||||
@@ -85,7 +85,7 @@ exclude_trees = ['_build']
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
pygments_style = "sphinx"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
@@ -95,14 +95,12 @@ pygments_style = 'sphinx'
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
html_theme_options = {
|
||||
'canonical_url': 'http://docs.mongoengine.org/en/latest/'
|
||||
}
|
||||
html_theme_options = {"canonical_url": "http://docs.mongoengine.org/en/latest/"}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
@@ -138,8 +136,8 @@ html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
html_sidebars = {
|
||||
'index': ['globaltoc.html', 'searchbox.html'],
|
||||
'**': ['localtoc.html', 'relations.html', 'searchbox.html']
|
||||
"index": ["globaltoc.html", "searchbox.html"],
|
||||
"**": ["localtoc.html", "relations.html", "searchbox.html"],
|
||||
}
|
||||
|
||||
|
||||
@@ -168,13 +166,13 @@ html_sidebars = {
|
||||
# html_file_suffix = ''
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'MongoEnginedoc'
|
||||
htmlhelp_basename = "MongoEnginedoc"
|
||||
|
||||
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
# The paper size ('letter' or 'a4').
|
||||
latex_paper_size = 'a4'
|
||||
latex_paper_size = "a4"
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
# latex_font_size = '10pt'
|
||||
@@ -182,8 +180,7 @@ latex_paper_size = 'a4'
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
('index', 'MongoEngine.tex', 'MongoEngine Documentation',
|
||||
'Ross Lawley', 'manual'),
|
||||
("index", "MongoEngine.tex", "MongoEngine Documentation", "Ross Lawley", "manual")
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
@@ -203,4 +200,4 @@ latex_documents = [
|
||||
# If false, no module index is generated.
|
||||
# latex_use_modindex = True
|
||||
|
||||
autoclass_content = 'both'
|
||||
autoclass_content = "both"
|
||||
|
||||
12
docs/faq.rst
Normal file
12
docs/faq.rst
Normal file
@@ -0,0 +1,12 @@
|
||||
==========================
|
||||
Frequently Asked Questions
|
||||
==========================
|
||||
|
||||
Does MongoEngine support asynchronous drivers (Motor, TxMongo)?
|
||||
---------------------------------------------------------------
|
||||
|
||||
No, MongoEngine is exclusively based on PyMongo and isn't designed to support other driver.
|
||||
If this is a requirement for your project, check the alternative: `uMongo`_ and `MotorEngine`_.
|
||||
|
||||
.. _uMongo: https://umongo.readthedocs.io/
|
||||
.. _MotorEngine: https://motorengine.readthedocs.io/
|
||||
@@ -4,33 +4,76 @@
|
||||
Connecting to MongoDB
|
||||
=====================
|
||||
|
||||
To connect to a running instance of :program:`mongod`, use the
|
||||
:func:`~mongoengine.connect` function. The first argument is the name of the
|
||||
database to connect to::
|
||||
Connections in MongoEngine are registered globally and are identified with aliases.
|
||||
If no ``alias`` is provided during the connection, it will use "default" as alias.
|
||||
|
||||
To connect to a running instance of :program:`mongod`, use the :func:`~mongoengine.connect`
|
||||
function. The first argument is the name of the database to connect to::
|
||||
|
||||
from mongoengine import connect
|
||||
connect('project1')
|
||||
|
||||
By default, MongoEngine assumes that the :program:`mongod` instance is running
|
||||
on **localhost** on port **27017**. If MongoDB is running elsewhere, you should
|
||||
provide the :attr:`host` and :attr:`port` arguments to
|
||||
:func:`~mongoengine.connect`::
|
||||
on **localhost** on port **27017**.
|
||||
|
||||
connect('project1', host='192.168.1.35', port=12345)
|
||||
If MongoDB is running elsewhere, you need to provide details on how to connect. There are two ways of
|
||||
doing this. Using a connection string in URI format (**this is the preferred method**) or individual attributes
|
||||
provided as keyword arguments.
|
||||
|
||||
If the database requires authentication, :attr:`username` and :attr:`password`
|
||||
arguments should be provided::
|
||||
Connect with URI string
|
||||
=======================
|
||||
|
||||
connect('project1', username='webapp', password='pwd123')
|
||||
When using a connection string in URI format you should specify the connection details
|
||||
as the :attr:`host` to :func:`~mongoengine.connect`. In a web application context for instance, the URI
|
||||
is typically read from the config file::
|
||||
|
||||
URI style connections are also supported -- just supply the URI as
|
||||
the :attr:`host` to
|
||||
:func:`~mongoengine.connect`::
|
||||
connect(host="mongodb://127.0.0.1:27017/my_db")
|
||||
|
||||
connect('project1', host='mongodb://localhost/database_name')
|
||||
If the database requires authentication, you can specify it in the
|
||||
URI. As each database can have its own users configured, you need to tell MongoDB
|
||||
where to look for the user you are working with, that's what the ``?authSource=admin`` bit
|
||||
of the MongoDB connection string is for::
|
||||
|
||||
# Connects to 'my_db' database by authenticating
|
||||
# with given credentials against the 'admin' database (by default as authSource isn't provided)
|
||||
connect(host="mongodb://my_user:my_password@127.0.0.1:27017/my_db")
|
||||
|
||||
# Equivalent to previous connection but explicitly states that
|
||||
# it should use admin as the authentication source database
|
||||
connect(host="mongodb://my_user:my_password@hostname:port/my_db?authSource=admin")
|
||||
|
||||
# Connects to 'my_db' database by authenticating
|
||||
# with given credentials against that same database
|
||||
connect(host="mongodb://my_user:my_password@127.0.0.1:27017/my_db?authSource=my_db")
|
||||
|
||||
The URI string can also be used to configure advanced parameters like ssl, replicaSet, etc. For more
|
||||
information or example about URI string, you can refer to the `official doc <https://docs.mongodb.com/manual/reference/connection-string/>`_::
|
||||
|
||||
connect(host="mongodb://my_user:my_password@127.0.0.1:27017/my_db?authSource=admin&ssl=true&replicaSet=globaldb")
|
||||
|
||||
.. note:: URI containing SRV records (e.g "mongodb+srv://server.example.com/") can be used as well
|
||||
|
||||
Connect with keyword attributes
|
||||
===============================
|
||||
|
||||
The second option for specifying the connection details is to provide the information as keyword
|
||||
attributes to :func:`~mongoengine.connect`::
|
||||
|
||||
connect('my_db', host='127.0.0.1', port=27017)
|
||||
|
||||
If the database requires authentication, :attr:`username`, :attr:`password`
|
||||
and :attr:`authentication_source` arguments should be provided::
|
||||
|
||||
connect('my_db', username='my_user', password='my_password', authentication_source='admin')
|
||||
|
||||
The set of attributes that :func:`~mongoengine.connect` recognizes includes but is not limited to:
|
||||
:attr:`host`, :attr:`port`, :attr:`read_preference`, :attr:`username`, :attr:`password`, :attr:`authentication_source`, :attr:`authentication_mechanism`,
|
||||
:attr:`replicaset`, :attr:`tls`, etc. Most of the parameters accepted by `pymongo.MongoClient <https://pymongo.readthedocs.io/en/stable/api/pymongo/mongo_client.html#pymongo.mongo_client.MongoClient>`_
|
||||
can be used with :func:`~mongoengine.connect` and will simply be forwarded when instantiating the `pymongo.MongoClient`.
|
||||
|
||||
.. note:: Database, username and password from URI string overrides
|
||||
corresponding parameters in :func:`~mongoengine.connect`: ::
|
||||
corresponding parameters in :func:`~mongoengine.connect`, this should
|
||||
obviously be avoided: ::
|
||||
|
||||
connect(
|
||||
db='test',
|
||||
@@ -39,25 +82,19 @@ the :attr:`host` to
|
||||
host='mongodb://admin:qwerty@localhost/production'
|
||||
)
|
||||
|
||||
will establish connection to ``production`` database using
|
||||
``admin`` username and ``qwerty`` password.
|
||||
will establish connection to ``production`` database using ``admin`` username and ``qwerty`` password.
|
||||
|
||||
Replica Sets
|
||||
============
|
||||
.. note:: Calling :func:`~mongoengine.connect` without argument will establish
|
||||
a connection to the "test" database by default
|
||||
|
||||
MongoEngine supports connecting to replica sets::
|
||||
Read Preferences
|
||||
================
|
||||
|
||||
from mongoengine import connect
|
||||
|
||||
# Regular connect
|
||||
connect('dbname', replicaset='rs-name')
|
||||
|
||||
# MongoDB URI-style connect
|
||||
connect(host='mongodb://localhost/dbname?replicaSet=rs-name')
|
||||
|
||||
Read preferences are supported through the connection or via individual
|
||||
As stated above, Read preferences are supported through the connection but also via individual
|
||||
queries by passing the read_preference ::
|
||||
|
||||
from pymongo import ReadPreference
|
||||
|
||||
Bar.objects().read_preference(ReadPreference.PRIMARY)
|
||||
Bar.objects(read_preference=ReadPreference.PRIMARY)
|
||||
|
||||
@@ -71,28 +108,61 @@ is used.
|
||||
In the background this uses :func:`~mongoengine.register_connection` to
|
||||
store the data and you can register all aliases up front if required.
|
||||
|
||||
Individual documents can also support multiple databases by providing a
|
||||
Documents defined in different database
|
||||
---------------------------------------
|
||||
Individual documents can be attached to different databases by providing a
|
||||
`db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef`
|
||||
objects to point across databases and collections. Below is an example schema,
|
||||
using 3 different databases to store data::
|
||||
|
||||
connect(alias='user-db-alias', db='user-db')
|
||||
connect(alias='book-db-alias', db='book-db')
|
||||
connect(alias='users-books-db-alias', db='users-books-db')
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
meta = {'db_alias': 'user-db'}
|
||||
meta = {'db_alias': 'user-db-alias'}
|
||||
|
||||
class Book(Document):
|
||||
name = StringField()
|
||||
|
||||
meta = {'db_alias': 'book-db'}
|
||||
meta = {'db_alias': 'book-db-alias'}
|
||||
|
||||
class AuthorBooks(Document):
|
||||
author = ReferenceField(User)
|
||||
book = ReferenceField(Book)
|
||||
|
||||
meta = {'db_alias': 'users-books-db'}
|
||||
meta = {'db_alias': 'users-books-db-alias'}
|
||||
|
||||
|
||||
Disconnecting an existing connection
|
||||
------------------------------------
|
||||
The function :func:`~mongoengine.disconnect` can be used to
|
||||
disconnect a particular connection. This can be used to change a
|
||||
connection globally::
|
||||
|
||||
from mongoengine import connect, disconnect
|
||||
connect('a_db', alias='db1')
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
meta = {'db_alias': 'db1'}
|
||||
|
||||
disconnect(alias='db1')
|
||||
|
||||
connect('another_db', alias='db1')
|
||||
|
||||
.. note:: Calling :func:`~mongoengine.disconnect` without argument
|
||||
will disconnect the "default" connection
|
||||
|
||||
.. note:: Since connections gets registered globally, it is important
|
||||
to use the `disconnect` function from MongoEngine and not the
|
||||
`disconnect()` method of an existing connection (pymongo.MongoClient)
|
||||
|
||||
.. note:: :class:`~mongoengine.Document` are caching the pymongo collection.
|
||||
using `disconnect` ensures that it gets cleaned as well
|
||||
|
||||
Context Managers
|
||||
================
|
||||
Sometimes you may want to switch the database or collection to query against.
|
||||
@@ -119,7 +189,7 @@ access to the same User document across databases::
|
||||
|
||||
Switch Collection
|
||||
-----------------
|
||||
The :class:`~mongoengine.context_managers.switch_collection` context manager
|
||||
The :func:`~mongoengine.context_managers.switch_collection` context manager
|
||||
allows you to change the collection for a given class allowing quick and easy
|
||||
access to the same Group document across collection::
|
||||
|
||||
|
||||
@@ -76,6 +76,7 @@ are as follows:
|
||||
* :class:`~mongoengine.fields.EmailField`
|
||||
* :class:`~mongoengine.fields.EmbeddedDocumentField`
|
||||
* :class:`~mongoengine.fields.EmbeddedDocumentListField`
|
||||
* :class:`~mongoengine.fields.EnumField`
|
||||
* :class:`~mongoengine.fields.FileField`
|
||||
* :class:`~mongoengine.fields.FloatField`
|
||||
* :class:`~mongoengine.fields.GenericEmbeddedDocumentField`
|
||||
@@ -85,6 +86,7 @@ are as follows:
|
||||
* :class:`~mongoengine.fields.ImageField`
|
||||
* :class:`~mongoengine.fields.IntField`
|
||||
* :class:`~mongoengine.fields.ListField`
|
||||
* :class:`~mongoengine.fields.LongField`
|
||||
* :class:`~mongoengine.fields.MapField`
|
||||
* :class:`~mongoengine.fields.ObjectIdField`
|
||||
* :class:`~mongoengine.fields.ReferenceField`
|
||||
@@ -155,7 +157,7 @@ arguments can be set on all fields:
|
||||
An iterable (e.g. list, tuple or set) of choices to which the value of this
|
||||
field should be limited.
|
||||
|
||||
Can be either be a nested tuples of value (stored in mongo) and a
|
||||
Can either be nested tuples of value (stored in mongo) and a
|
||||
human readable key ::
|
||||
|
||||
SIZE = (('S', 'Small'),
|
||||
@@ -175,6 +177,21 @@ arguments can be set on all fields:
|
||||
class Shirt(Document):
|
||||
size = StringField(max_length=3, choices=SIZE)
|
||||
|
||||
:attr:`validation` (Optional)
|
||||
A callable to validate the value of the field.
|
||||
The callable takes the value as parameter and should raise a ValidationError
|
||||
if validation fails
|
||||
|
||||
e.g ::
|
||||
|
||||
def _not_empty(val):
|
||||
if not val:
|
||||
raise ValidationError('value can not be empty')
|
||||
|
||||
class Person(Document):
|
||||
name = StringField(validation=_not_empty)
|
||||
|
||||
|
||||
:attr:`**kwargs` (Optional)
|
||||
You can supply additional metadata as arbitrary additional keyword
|
||||
arguments. You can not override existing attributes, however. Common
|
||||
@@ -273,12 +290,12 @@ as the constructor's argument::
|
||||
content = StringField()
|
||||
|
||||
|
||||
.. _one-to-many-with-listfields:
|
||||
.. _many-to-many-with-listfields:
|
||||
|
||||
One to Many with ListFields
|
||||
Many to Many with ListFields
|
||||
'''''''''''''''''''''''''''
|
||||
|
||||
If you are implementing a one to many relationship via a list of references,
|
||||
If you are implementing a many to many relationship via a list of references,
|
||||
then the references are stored as DBRefs and to query you need to pass an
|
||||
instance of the object to the query::
|
||||
|
||||
@@ -336,7 +353,7 @@ Its value can take any of the following constants:
|
||||
Deletion is denied if there still exist references to the object being
|
||||
deleted.
|
||||
:const:`mongoengine.NULLIFY`
|
||||
Any object's fields still referring to the object being deleted are removed
|
||||
Any object's fields still referring to the object being deleted are set to None
|
||||
(using MongoDB's "unset" operation), effectively nullifying the relationship.
|
||||
:const:`mongoengine.CASCADE`
|
||||
Any object containing fields that are referring to the object being deleted
|
||||
@@ -410,28 +427,15 @@ either a single field name, or a list or tuple of field names::
|
||||
first_name = StringField()
|
||||
last_name = StringField(unique_with='first_name')
|
||||
|
||||
Skipping Document validation on save
|
||||
------------------------------------
|
||||
You can also skip the whole document validation process by setting
|
||||
``validate=False`` when calling the :meth:`~mongoengine.document.Document.save`
|
||||
method::
|
||||
|
||||
class Recipient(Document):
|
||||
name = StringField()
|
||||
email = EmailField()
|
||||
|
||||
recipient = Recipient(name='admin', email='root@localhost')
|
||||
recipient.save() # will raise a ValidationError while
|
||||
recipient.save(validate=False) # won't
|
||||
|
||||
Document collections
|
||||
====================
|
||||
Document classes that inherit **directly** from :class:`~mongoengine.Document`
|
||||
will have their own **collection** in the database. The name of the collection
|
||||
is by default the name of the class, converted to lowercase (so in the example
|
||||
above, the collection would be called `page`). If you need to change the name
|
||||
of the collection (e.g. to use MongoEngine with an existing database), then
|
||||
create a class dictionary attribute called :attr:`meta` on your document, and
|
||||
is by default the name of the class converted to snake_case (e.g if your Document class
|
||||
is named `CompanyUser`, the corresponding collection would be `company_user`). If you need
|
||||
to change the name of the collection (e.g. to use MongoEngine with an existing database),
|
||||
then create a class dictionary attribute called :attr:`meta` on your document, and
|
||||
set :attr:`collection` to the name of the collection that you want your
|
||||
document class to use::
|
||||
|
||||
@@ -492,7 +496,9 @@ the field name with a **#**::
|
||||
]
|
||||
}
|
||||
|
||||
If a dictionary is passed then the following options are available:
|
||||
If a dictionary is passed then additional options become available. Valid options include,
|
||||
but are not limited to:
|
||||
|
||||
|
||||
:attr:`fields` (Default: None)
|
||||
The fields to index. Specified in the same format as described above.
|
||||
@@ -513,8 +519,15 @@ If a dictionary is passed then the following options are available:
|
||||
Allows you to automatically expire data from a collection by setting the
|
||||
time in seconds to expire the a field.
|
||||
|
||||
:attr:`name` (Optional)
|
||||
Allows you to specify a name for the index
|
||||
|
||||
:attr:`collation` (Optional)
|
||||
Allows to create case insensitive indexes (MongoDB v3.4+ only)
|
||||
|
||||
.. note::
|
||||
|
||||
Additional options are forwarded as **kwargs to pymongo's create_index method.
|
||||
Inheritance adds extra fields indices see: :ref:`document-inheritance`.
|
||||
|
||||
Global index default options
|
||||
@@ -526,16 +539,15 @@ There are a few top level defaults for all indexes that can be set::
|
||||
title = StringField()
|
||||
rating = StringField()
|
||||
meta = {
|
||||
'index_options': {},
|
||||
'index_opts': {},
|
||||
'index_background': True,
|
||||
'index_cls': False,
|
||||
'auto_create_index': True,
|
||||
'index_drop_dups': True,
|
||||
}
|
||||
|
||||
|
||||
:attr:`index_options` (Optional)
|
||||
Set any default index options - see the `full options list <http://docs.mongodb.org/manual/reference/method/db.collection.ensureIndex/#db.collection.ensureIndex>`_
|
||||
:attr:`index_opts` (Optional)
|
||||
Set any default index options - see the `full options list <https://docs.mongodb.com/manual/reference/method/db.collection.createIndex/#db.collection.createIndex>`_
|
||||
|
||||
:attr:`index_background` (Optional)
|
||||
Set the default value for if an index should be indexed in the background
|
||||
@@ -549,12 +561,6 @@ There are a few top level defaults for all indexes that can be set::
|
||||
in systems where indexes are managed separately. Disabling this will improve
|
||||
performance.
|
||||
|
||||
:attr:`index_drop_dups` (Optional)
|
||||
Set the default value for if an index should drop duplicates
|
||||
|
||||
.. note:: Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning
|
||||
and has no effect
|
||||
|
||||
|
||||
Compound Indexes and Indexing sub documents
|
||||
-------------------------------------------
|
||||
@@ -690,11 +696,16 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. ::
|
||||
Shard keys
|
||||
==========
|
||||
|
||||
If your collection is sharded, then you need to specify the shard key as a tuple,
|
||||
using the :attr:`shard_key` attribute of :attr:`~mongoengine.Document.meta`.
|
||||
This ensures that the shard key is sent with the query when calling the
|
||||
:meth:`~mongoengine.document.Document.save` or
|
||||
:meth:`~mongoengine.document.Document.update` method on an existing
|
||||
If your collection is sharded by multiple keys, then you can improve shard
|
||||
routing (and thus the performance of your application) by specifying the shard
|
||||
key, using the :attr:`shard_key` attribute of
|
||||
:attr:`~mongoengine.Document.meta`. The shard key should be defined as a tuple.
|
||||
|
||||
This ensures that the full shard key is sent with the query when calling
|
||||
methods such as :meth:`~mongoengine.document.Document.save`,
|
||||
:meth:`~mongoengine.document.Document.update`,
|
||||
:meth:`~mongoengine.document.Document.modify`, or
|
||||
:meth:`~mongoengine.document.Document.delete` on an existing
|
||||
:class:`~mongoengine.Document` instance::
|
||||
|
||||
class LogEntry(Document):
|
||||
@@ -704,7 +715,8 @@ This ensures that the shard key is sent with the query when calling the
|
||||
data = StringField()
|
||||
|
||||
meta = {
|
||||
'shard_key': ('machine', 'timestamp',)
|
||||
'shard_key': ('machine', 'timestamp'),
|
||||
'indexes': ('machine', 'timestamp'),
|
||||
}
|
||||
|
||||
.. _document-inheritance:
|
||||
@@ -714,7 +726,7 @@ Document inheritance
|
||||
|
||||
To create a specialised type of a :class:`~mongoengine.Document` you have
|
||||
defined, you may subclass it and add any extra fields or methods you may need.
|
||||
As this is new class is not a direct subclass of
|
||||
As this new class is not a direct subclass of
|
||||
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
|
||||
will use the same collection as its superclass uses. This allows for more
|
||||
convenient and efficient retrieval of related documents -- all you need do is
|
||||
@@ -734,6 +746,30 @@ document.::
|
||||
.. note:: From 0.8 onwards :attr:`allow_inheritance` defaults
|
||||
to False, meaning you must set it to True to use inheritance.
|
||||
|
||||
Setting :attr:`allow_inheritance` to True should also be used in
|
||||
:class:`~mongoengine.EmbeddedDocument` class in case you need to subclass it
|
||||
|
||||
When it comes to querying using :attr:`.objects()`, querying `Page.objects()` will query
|
||||
both `Page` and `DatedPage` whereas querying `DatedPage` will only query the `DatedPage` documents.
|
||||
Behind the scenes, MongoEngine deals with inheritance by adding a :attr:`_cls` attribute that contains
|
||||
the class name in every documents. When a document is loaded, MongoEngine checks
|
||||
it's :attr:`_cls` attribute and use that class to construct the instance.::
|
||||
|
||||
Page(title='a funky title').save()
|
||||
DatedPage(title='another title', date=datetime.utcnow()).save()
|
||||
|
||||
print(Page.objects().count()) # 2
|
||||
print(DatedPage.objects().count()) # 1
|
||||
|
||||
# print documents in their native form
|
||||
# we remove 'id' to avoid polluting the output with unnecessary detail
|
||||
qs = Page.objects.exclude('id').as_pymongo()
|
||||
print(list(qs))
|
||||
# [
|
||||
# {'_cls': u 'Page', 'title': 'a funky title'},
|
||||
# {'_cls': u 'Page.DatedPage', 'title': u 'another title', 'date': datetime.datetime(2019, 12, 13, 20, 16, 59, 993000)}
|
||||
# ]
|
||||
|
||||
Working with existing data
|
||||
--------------------------
|
||||
As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and
|
||||
|
||||
@@ -41,34 +41,6 @@ already exist, then any changes will be updated atomically. For example::
|
||||
.. seealso::
|
||||
:ref:`guide-atomic-updates`
|
||||
|
||||
Pre save data validation and cleaning
|
||||
-------------------------------------
|
||||
MongoEngine allows you to create custom cleaning rules for your documents when
|
||||
calling :meth:`~mongoengine.Document.save`. By providing a custom
|
||||
:meth:`~mongoengine.Document.clean` method you can do any pre validation / data
|
||||
cleaning.
|
||||
|
||||
This might be useful if you want to ensure a default value based on other
|
||||
document values for example::
|
||||
|
||||
class Essay(Document):
|
||||
status = StringField(choices=('Published', 'Draft'), required=True)
|
||||
pub_date = DateTimeField()
|
||||
|
||||
def clean(self):
|
||||
"""Ensures that only published essays have a `pub_date` and
|
||||
automatically sets the pub_date if published and not set"""
|
||||
if self.status == 'Draft' and self.pub_date is not None:
|
||||
msg = 'Draft entries should not have a publication date.'
|
||||
raise ValidationError(msg)
|
||||
# Set the pub_date for published items if not set.
|
||||
if self.status == 'Published' and self.pub_date is None:
|
||||
self.pub_date = datetime.now()
|
||||
|
||||
.. note::
|
||||
Cleaning is only called if validation is turned on and when calling
|
||||
:meth:`~mongoengine.Document.save`.
|
||||
|
||||
Cascading Saves
|
||||
---------------
|
||||
If your document contains :class:`~mongoengine.fields.ReferenceField` or
|
||||
|
||||
@@ -2,16 +2,15 @@
|
||||
GridFS
|
||||
======
|
||||
|
||||
.. versionadded:: 0.4
|
||||
|
||||
Writing
|
||||
-------
|
||||
|
||||
GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field
|
||||
object. This field acts as a file-like object and provides a couple of
|
||||
different ways of inserting and retrieving data. Arbitrary metadata such as
|
||||
content type can also be stored alongside the files. In the following example,
|
||||
a document is created to store details about animals, including a photo::
|
||||
content type can also be stored alongside the files. The object returned when accessing a
|
||||
FileField is a proxy to `Pymongo's GridFS <https://api.mongodb.com/python/current/examples/gridfs.html#gridfs-example>`_
|
||||
In the following example, a document is created to store details about animals, including a photo::
|
||||
|
||||
class Animal(Document):
|
||||
genus = StringField()
|
||||
@@ -20,8 +19,8 @@ a document is created to store details about animals, including a photo::
|
||||
|
||||
marmot = Animal(genus='Marmota', family='Sciuridae')
|
||||
|
||||
marmot_photo = open('marmot.jpg', 'rb')
|
||||
marmot.photo.put(marmot_photo, content_type = 'image/jpeg')
|
||||
with open('marmot.jpg', 'rb') as fd:
|
||||
marmot.photo.put(fd, content_type = 'image/jpeg')
|
||||
marmot.save()
|
||||
|
||||
Retrieval
|
||||
@@ -34,6 +33,20 @@ field. The file can also be retrieved just as easily::
|
||||
photo = marmot.photo.read()
|
||||
content_type = marmot.photo.content_type
|
||||
|
||||
.. note:: If you need to read() the content of a file multiple times, you'll need to "rewind"
|
||||
the file-like object using `seek`::
|
||||
|
||||
marmot = Animal.objects(genus='Marmota').first()
|
||||
content1 = marmot.photo.read()
|
||||
assert content1 != ""
|
||||
|
||||
content2 = marmot.photo.read() # will be empty
|
||||
assert content2 == ""
|
||||
|
||||
marmot.photo.seek(0) # rewind the file by setting the current position of the cursor in the file to 0
|
||||
content3 = marmot.photo.read()
|
||||
assert content3 == content1
|
||||
|
||||
Streaming
|
||||
---------
|
||||
|
||||
@@ -53,7 +66,8 @@ Deletion
|
||||
|
||||
Deleting stored files is achieved with the :func:`delete` method::
|
||||
|
||||
marmot.photo.delete()
|
||||
marmot.photo.delete() # Deletes the GridFS document
|
||||
marmot.save() # Saves the GridFS reference (being None) contained in the marmot instance
|
||||
|
||||
.. warning::
|
||||
|
||||
@@ -71,4 +85,5 @@ Files can be replaced with the :func:`replace` method. This works just like
|
||||
the :func:`put` method so even metadata can (and should) be replaced::
|
||||
|
||||
another_marmot = open('another_marmot.png', 'rb')
|
||||
marmot.photo.replace(another_marmot, content_type='image/png')
|
||||
marmot.photo.replace(another_marmot, content_type='image/png') # Replaces the GridFS document
|
||||
marmot.save() # Replaces the GridFS reference contained in marmot instance
|
||||
|
||||
@@ -10,7 +10,10 @@ User Guide
|
||||
defining-documents
|
||||
document-instances
|
||||
querying
|
||||
validation
|
||||
gridfs
|
||||
signals
|
||||
text-indexes
|
||||
migration
|
||||
logging-monitoring
|
||||
mongomock
|
||||
|
||||
@@ -12,7 +12,7 @@ MongoEngine is available on PyPI, so you can use :program:`pip`:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install mongoengine
|
||||
$ python -m pip install mongoengine
|
||||
|
||||
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
||||
<http://pypi.python.org/pypi/mongoengine/>`_ and run
|
||||
|
||||
80
docs/guide/logging-monitoring.rst
Normal file
80
docs/guide/logging-monitoring.rst
Normal file
@@ -0,0 +1,80 @@
|
||||
==================
|
||||
Logging/Monitoring
|
||||
==================
|
||||
|
||||
It is possible to use `pymongo.monitoring <https://api.mongodb.com/python/current/api/pymongo/monitoring.html>`_ to monitor
|
||||
the driver events (e.g: queries, connections, etc). This can be handy if you want to monitor the queries issued by
|
||||
MongoEngine to the driver.
|
||||
|
||||
To use `pymongo.monitoring` with MongoEngine, you need to make sure that you are registering the listeners
|
||||
**before** establishing the database connection (i.e calling `connect`):
|
||||
|
||||
The following snippet provides a basic logging of all command events:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import logging
|
||||
from pymongo import monitoring
|
||||
from mongoengine import *
|
||||
|
||||
log = logging.getLogger()
|
||||
log.setLevel(logging.DEBUG)
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
|
||||
class CommandLogger(monitoring.CommandListener):
|
||||
|
||||
def started(self, event):
|
||||
log.debug("Command {0.command_name} with request id "
|
||||
"{0.request_id} started on server "
|
||||
"{0.connection_id}".format(event))
|
||||
|
||||
def succeeded(self, event):
|
||||
log.debug("Command {0.command_name} with request id "
|
||||
"{0.request_id} on server {0.connection_id} "
|
||||
"succeeded in {0.duration_micros} "
|
||||
"microseconds".format(event))
|
||||
|
||||
def failed(self, event):
|
||||
log.debug("Command {0.command_name} with request id "
|
||||
"{0.request_id} on server {0.connection_id} "
|
||||
"failed in {0.duration_micros} "
|
||||
"microseconds".format(event))
|
||||
|
||||
monitoring.register(CommandLogger())
|
||||
|
||||
|
||||
class Jedi(Document):
|
||||
name = StringField()
|
||||
|
||||
|
||||
connect()
|
||||
|
||||
|
||||
log.info('GO!')
|
||||
|
||||
log.info('Saving an item through MongoEngine...')
|
||||
Jedi(name='Obi-Wan Kenobii').save()
|
||||
|
||||
log.info('Querying through MongoEngine...')
|
||||
obiwan = Jedi.objects.first()
|
||||
|
||||
log.info('Updating through MongoEngine...')
|
||||
obiwan.name = 'Obi-Wan Kenobi'
|
||||
obiwan.save()
|
||||
|
||||
|
||||
Executing this prints the following output::
|
||||
|
||||
INFO:root:GO!
|
||||
INFO:root:Saving an item through MongoEngine...
|
||||
DEBUG:root:Command insert with request id 1681692777 started on server ('localhost', 27017)
|
||||
DEBUG:root:Command insert with request id 1681692777 on server ('localhost', 27017) succeeded in 562 microseconds
|
||||
INFO:root:Querying through MongoEngine...
|
||||
DEBUG:root:Command find with request id 1714636915 started on server ('localhost', 27017)
|
||||
DEBUG:root:Command find with request id 1714636915 on server ('localhost', 27017) succeeded in 341 microseconds
|
||||
INFO:root:Updating through MongoEngine...
|
||||
DEBUG:root:Command update with request id 1957747793 started on server ('localhost', 27017)
|
||||
DEBUG:root:Command update with request id 1957747793 on server ('localhost', 27017) succeeded in 455 microseconds
|
||||
|
||||
More details can of course be obtained by checking the `event` argument from the `CommandListener`.
|
||||
267
docs/guide/migration.rst
Normal file
267
docs/guide/migration.rst
Normal file
@@ -0,0 +1,267 @@
|
||||
===================
|
||||
Documents migration
|
||||
===================
|
||||
|
||||
The structure of your documents and their associated mongoengine schemas are likely
|
||||
to change over the lifetime of an application. This section provides guidance and
|
||||
recommendations on how to deal with migrations.
|
||||
|
||||
Due to the very flexible nature of mongodb, migrations of models aren't trivial and
|
||||
for people that know about `alembic` for `sqlalchemy`, there is unfortunately no equivalent
|
||||
library that will manage the migration in an automatic fashion for mongoengine.
|
||||
|
||||
Example 1: Addition of a field
|
||||
==============================
|
||||
|
||||
Let's start by taking a simple example of a model change and review the different option you
|
||||
have to deal with the migration.
|
||||
|
||||
Let's assume we start with the following schema and save an instance:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
User(name="John Doe").save()
|
||||
|
||||
# print the objects as they exist in mongodb
|
||||
print(User.objects().as_pymongo()) # [{u'_id': ObjectId('5d06b9c3d7c1f18db3e7c874'), u'name': u'John Doe'}]
|
||||
|
||||
On the next version of your application, let's now assume that a new field `enabled` gets added to the
|
||||
existing ``User`` model with a `default=True`. Thus you simply update the ``User`` class to the following:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class User(Document):
|
||||
name = StringField(required=True)
|
||||
enabled = BooleanField(default=True)
|
||||
|
||||
Without applying any migration, we now reload an object from the database into the ``User`` class
|
||||
and checks its `enabled` attribute:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
assert User.objects.count() == 1
|
||||
user = User.objects().first()
|
||||
assert user.enabled is True
|
||||
assert User.objects(enabled=True).count() == 0 # uh?
|
||||
assert User.objects(enabled=False).count() == 0 # uh?
|
||||
|
||||
# this is consistent with what we have in the database
|
||||
# in fact, 'enabled' does not exist
|
||||
print(User.objects().as_pymongo().first()) # {u'_id': ObjectId('5d06b9c3d7c1f18db3e7c874'), u'name': u'John'}
|
||||
assert User.objects(enabled=None).count() == 1
|
||||
|
||||
As you can see, even if the document wasn't updated, mongoengine applies the default value seamlessly when it
|
||||
loads the pymongo dict into a ``User`` instance. At first sight it looks like you don't need to migrate the
|
||||
existing documents when adding new fields but this actually leads to inconsistencies when it comes to querying.
|
||||
|
||||
In fact, when querying, mongoengine isn't trying to account for the default value of the new field and so
|
||||
if you don't actually migrate the existing documents, you are taking a risk that querying/updating
|
||||
will be missing relevant record.
|
||||
|
||||
When adding fields/modifying default values, you can use any of the following to do the migration
|
||||
as a standalone script:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# Use mongoengine to set a default value for a given field
|
||||
User.objects().update(enabled=True)
|
||||
# or use pymongo
|
||||
user_coll = User._get_collection()
|
||||
user_coll.update_many({}, {'$set': {'enabled': True}})
|
||||
|
||||
|
||||
Example 2: Inheritance change
|
||||
=============================
|
||||
|
||||
Let's consider the following example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Human(Document):
|
||||
name = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Jedi(Human):
|
||||
dark_side = BooleanField()
|
||||
light_saber_color = StringField()
|
||||
|
||||
Jedi(name="Darth Vader", dark_side=True, light_saber_color="red").save()
|
||||
Jedi(name="Obi Wan Kenobi", dark_side=False, light_saber_color="blue").save()
|
||||
|
||||
assert Human.objects.count() == 2
|
||||
assert Jedi.objects.count() == 2
|
||||
|
||||
# Let's check how these documents got stored in mongodb
|
||||
print(Jedi.objects.as_pymongo())
|
||||
# [
|
||||
# {'_id': ObjectId('5fac4aaaf61d7fb06046e0f9'), '_cls': 'Human.Jedi', 'name': 'Darth Vader', 'dark_side': True, 'light_saber_color': 'red'},
|
||||
# {'_id': ObjectId('5fac4ac4f61d7fb06046e0fa'), '_cls': 'Human.Jedi', 'name': 'Obi Wan Kenobi', 'dark_side': False, 'light_saber_color': 'blue'}
|
||||
# ]
|
||||
|
||||
As you can observe, when you use inheritance, MongoEngine stores a field named '_cls' behind the scene to keep
|
||||
track of the Document class.
|
||||
|
||||
Let's now take the scenario that you want to refactor the inheritance schema and:
|
||||
- Have the Jedi's with dark_side=True/False become GoodJedi's/DarkSith
|
||||
- get rid of the 'dark_side' field
|
||||
|
||||
move to the following schemas:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# unchanged
|
||||
class Human(Document):
|
||||
name = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
# attribute 'dark_side' removed
|
||||
class GoodJedi(Human):
|
||||
light_saber_color = StringField()
|
||||
|
||||
# new class
|
||||
class BadSith(Human):
|
||||
light_saber_color = StringField()
|
||||
|
||||
MongoEngine doesn't know about the change or how to map them with the existing data
|
||||
so if you don't apply any migration, you will observe a strange behavior, as if the collection was suddenly
|
||||
empty.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# As a reminder, the documents that we inserted
|
||||
# have the _cls field = 'Human.Jedi'
|
||||
|
||||
# Following has no match
|
||||
# because the query that is used behind the scene is
|
||||
# filtering on {'_cls': 'Human.GoodJedi'}
|
||||
assert GoodJedi.objects().count() == 0
|
||||
|
||||
# Following has also no match
|
||||
# because it is filtering on {'_cls': {'$in': ('Human', 'Human.GoodJedi', 'Human.BadSith')}}
|
||||
# which has no match
|
||||
assert Human.objects.count() == 0
|
||||
assert Human.objects.first() is None
|
||||
|
||||
# If we bypass MongoEngine and make use of underlying driver (PyMongo)
|
||||
# we can see that the documents are there
|
||||
humans_coll = Human._get_collection()
|
||||
assert humans_coll.count_documents({}) == 2
|
||||
# print first document
|
||||
print(humans_coll.find_one())
|
||||
# {'_id': ObjectId('5fac4aaaf61d7fb06046e0f9'), '_cls': 'Human.Jedi', 'name': 'Darth Vader', 'dark_side': True, 'light_saber_color': 'red'}
|
||||
|
||||
As you can see, first obvious problem is that we need to modify '_cls' values based on existing values of
|
||||
'dark_side' documents.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
humans_coll = Human._get_collection()
|
||||
old_class = 'Human.Jedi'
|
||||
good_jedi_class = 'Human.GoodJedi'
|
||||
bad_sith_class = 'Human.BadSith'
|
||||
humans_coll.update_many({'_cls': old_class, 'dark_side': False}, {'$set': {'_cls': good_jedi_class}})
|
||||
humans_coll.update_many({'_cls': old_class, 'dark_side': True}, {'$set': {'_cls': bad_sith_class}})
|
||||
|
||||
Let's now check if querying improved in MongoEngine:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
assert GoodJedi.objects().count() == 1 # Hoorah!
|
||||
assert BadSith.objects().count() == 1 # Hoorah!
|
||||
assert Human.objects.count() == 2 # Hoorah!
|
||||
|
||||
# let's now check that documents load correctly
|
||||
jedi = GoodJedi.objects().first()
|
||||
# raises FieldDoesNotExist: The fields "{'dark_side'}" do not exist on the document "Human.GoodJedi"
|
||||
|
||||
In fact we only took care of renaming the _cls values but we havn't removed the 'dark_side' fields
|
||||
which does not exist anymore on the GoodJedi's and BadSith's models.
|
||||
Let's remove the field from the collections:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
humans_coll = Human._get_collection()
|
||||
humans_coll.update_many({}, {'$unset': {'dark_side': 1}})
|
||||
|
||||
.. note:: We did this migration in 2 different steps for the sake of example but it could have been combined
|
||||
with the migration of the _cls fields: ::
|
||||
|
||||
humans_coll.update_many(
|
||||
{'_cls': old_class, 'dark_side': False},
|
||||
{
|
||||
'$set': {'_cls': good_jedi_class},
|
||||
'$unset': {'dark_side': 1}
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
And verify that the documents now load correctly:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
jedi = GoodJedi.objects().first()
|
||||
assert jedi.name == "Obi Wan Kenobi"
|
||||
|
||||
sith = BadSith.objects().first()
|
||||
assert sith.name == "Darth Vader"
|
||||
|
||||
|
||||
An other way of dealing with this migration is to iterate over
|
||||
the documents and update/replace them one by one. This is way slower but
|
||||
it is often useful for complex migrations of Document models.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
for doc in humans_coll.find():
|
||||
if doc['_cls'] == 'Human.Jedi':
|
||||
doc['_cls'] = 'Human.BadSith' if doc['dark_side'] else 'Human.GoodJedi'
|
||||
doc.pop('dark_side')
|
||||
humans_coll.replace_one({'_id': doc['_id']}, doc)
|
||||
|
||||
.. warning:: Be aware of this `flaw <https://groups.google.com/g/mongodb-user/c/AFC1ia7MHzk>`_ if you modify documents while iterating
|
||||
|
||||
Recommendations
|
||||
===============
|
||||
|
||||
- Write migration scripts whenever you do changes to the model schemas
|
||||
- Using :class:`~mongoengine.DynamicDocument` or ``meta = {"strict": False}`` may help to avoid some migrations or to have the 2 versions of your application to co-exist.
|
||||
- Write post-processing checks to verify that migrations script worked. See below
|
||||
|
||||
Post-processing checks
|
||||
======================
|
||||
|
||||
The following recipe can be used to sanity check a Document collection after you applied migration.
|
||||
It does not make any assumption on what was migrated, it will fetch 1000 objects randomly and
|
||||
run some quick checks on the documents to make sure the document looks OK. As it is, it will fail
|
||||
on the first occurrence of an error but this is something that can be adapted based on your needs.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def get_random_oids(collection, sample_size):
|
||||
pipeline = [{"$project": {'_id': 1}}, {"$sample": {"size": sample_size}}]
|
||||
return [s['_id'] for s in collection.aggregate(pipeline)]
|
||||
|
||||
def get_random_documents(DocCls, sample_size):
|
||||
doc_collection = DocCls._get_collection()
|
||||
random_oids = get_random_oids(doc_collection, sample_size)
|
||||
return DocCls.objects(id__in=random_oids)
|
||||
|
||||
def check_documents(DocCls, sample_size):
|
||||
for doc in get_random_documents(DocCls, sample_size):
|
||||
# general validation (types and values)
|
||||
doc.validate()
|
||||
|
||||
# load all subfields,
|
||||
# this may trigger additional queries if you have ReferenceFields
|
||||
# so it may be slow
|
||||
for field in doc._fields:
|
||||
try:
|
||||
getattr(doc, field)
|
||||
except Exception:
|
||||
LOG.warning(f"Could not load field {field} in Document {doc.id}")
|
||||
raise
|
||||
|
||||
check_documents(Human, sample_size=1000)
|
||||
@@ -19,3 +19,30 @@ or with an alias:
|
||||
|
||||
connect('mongoenginetest', host='mongomock://localhost', alias='testdb')
|
||||
conn = get_connection('testdb')
|
||||
|
||||
Example of test file:
|
||||
---------------------
|
||||
.. code-block:: python
|
||||
|
||||
import unittest
|
||||
from mongoengine import connect, disconnect
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
|
||||
class TestPerson(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
connect('mongoenginetest', host='mongomock://localhost')
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
disconnect()
|
||||
|
||||
def test_thing(self):
|
||||
pers = Person(name='John')
|
||||
pers.save()
|
||||
|
||||
fresh_pers = Person.objects().first()
|
||||
assert fresh_pers.name == 'John'
|
||||
|
||||
@@ -64,7 +64,7 @@ Available operators are as follows:
|
||||
* ``gt`` -- greater than
|
||||
* ``gte`` -- greater than or equal to
|
||||
* ``not`` -- negate a standard check, may be used before other operators (e.g.
|
||||
``Q(age__not__mod=5)``)
|
||||
``Q(age__not__mod=(5, 0))``)
|
||||
* ``in`` -- value is in list (a list of values should be provided)
|
||||
* ``nin`` -- value is not in list (a list of values should be provided)
|
||||
* ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values
|
||||
@@ -222,6 +222,18 @@ keyword argument::
|
||||
|
||||
.. versionadded:: 0.4
|
||||
|
||||
Sorting/Ordering results
|
||||
========================
|
||||
It is possible to order the results by 1 or more keys using :meth:`~mongoengine.queryset.QuerySet.order_by`.
|
||||
The order may be specified by prepending each of the keys by "+" or "-". Ascending order is assumed if there's no prefix.::
|
||||
|
||||
# Order by ascending date
|
||||
blogs = BlogPost.objects().order_by('date') # equivalent to .order_by('+date')
|
||||
|
||||
# Order by ascending date first, then descending title
|
||||
blogs = BlogPost.objects().order_by('+date', '-title')
|
||||
|
||||
|
||||
Limiting and skipping results
|
||||
=============================
|
||||
Just as with traditional ORMs, you may limit the number of results returned or
|
||||
@@ -349,9 +361,9 @@ Just as with limiting and skipping results, there is a method on a
|
||||
You could technically use ``len(User.objects)`` to get the same result, but it
|
||||
would be significantly slower than :meth:`~mongoengine.queryset.QuerySet.count`.
|
||||
When you execute a server-side count query, you let MongoDB do the heavy
|
||||
lifting and you receive a single integer over the wire. Meanwhile, len()
|
||||
lifting and you receive a single integer over the wire. Meanwhile, ``len()``
|
||||
retrieves all the results, places them in a local cache, and finally counts
|
||||
them. If we compare the performance of the two operations, len() is much slower
|
||||
them. If we compare the performance of the two operations, ``len()`` is much slower
|
||||
than :meth:`~mongoengine.queryset.QuerySet.count`.
|
||||
|
||||
Further aggregation
|
||||
@@ -386,6 +398,25 @@ would be generating "tag-clouds"::
|
||||
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
|
||||
|
||||
|
||||
MongoDB aggregation API
|
||||
-----------------------
|
||||
If you need to run aggregation pipelines, MongoEngine provides an entry point to `Pymongo's aggregation framework <https://api.mongodb.com/python/current/examples/aggregation.html#aggregation-framework>`_
|
||||
through :meth:`~mongoengine.queryset.QuerySet.aggregate`. Check out Pymongo's documentation for the syntax and pipeline.
|
||||
An example of its use would be::
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
|
||||
Person(name='John').save()
|
||||
Person(name='Bob').save()
|
||||
|
||||
pipeline = [
|
||||
{"$sort" : {"name" : -1}},
|
||||
{"$project": {"_id": 0, "name": {"$toUpper": "$name"}}}
|
||||
]
|
||||
data = Person.objects().aggregate(pipeline)
|
||||
assert data == [{'name': 'BOB'}, {'name': 'JOHN'}]
|
||||
|
||||
Query efficiency and performance
|
||||
================================
|
||||
|
||||
@@ -456,14 +487,14 @@ data. To turn off dereferencing of the results of a query use
|
||||
:func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so::
|
||||
|
||||
post = Post.objects.no_dereference().first()
|
||||
assert(isinstance(post.author, ObjectId))
|
||||
assert(isinstance(post.author, DBRef))
|
||||
|
||||
You can also turn off all dereferencing for a fixed period by using the
|
||||
:class:`~mongoengine.context_managers.no_dereference` context manager::
|
||||
|
||||
with no_dereference(Post) as Post:
|
||||
post = Post.objects.first()
|
||||
assert(isinstance(post.author, ObjectId))
|
||||
assert(isinstance(post.author, DBRef))
|
||||
|
||||
# Outside the context manager dereferencing occurs.
|
||||
assert(isinstance(post.author, User))
|
||||
@@ -566,7 +597,8 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`::
|
||||
['database', 'mongodb']
|
||||
|
||||
From MongoDB version 2.6, push operator supports $position value which allows
|
||||
to push values with index.
|
||||
to push values with index::
|
||||
|
||||
>>> post = BlogPost(title="Test", tags=["mongo"])
|
||||
>>> post.save()
|
||||
>>> post.update(push__tags__0=["database", "code"])
|
||||
@@ -577,7 +609,7 @@ to push values with index.
|
||||
.. note::
|
||||
Currently only top level lists are handled, future versions of mongodb /
|
||||
pymongo plan to support nested positional operators. See `The $ positional
|
||||
operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_.
|
||||
operator <https://docs.mongodb.com/manual/tutorial/update-documents/#Updating-The%24positionaloperator>`_.
|
||||
|
||||
Server-side javascript execution
|
||||
================================
|
||||
|
||||
@@ -113,6 +113,10 @@ handlers within your subclass::
|
||||
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||
signals.post_save.connect(Author.post_save, sender=Author)
|
||||
|
||||
.. warning::
|
||||
|
||||
Note that EmbeddedDocument only supports pre/post_init signals. pre/post_save, etc should be attached to Document's class only. Attaching pre_save to an EmbeddedDocument is ignored silently.
|
||||
|
||||
Finally, you can also use this small decorator to quickly create a number of
|
||||
signals and attach them to your :class:`~mongoengine.Document` or
|
||||
:class:`~mongoengine.EmbeddedDocument` subclasses as class decorators::
|
||||
|
||||
122
docs/guide/validation.rst
Normal file
122
docs/guide/validation.rst
Normal file
@@ -0,0 +1,122 @@
|
||||
====================
|
||||
Document Validation
|
||||
====================
|
||||
|
||||
By design, MongoEngine strictly validates the documents right before they are inserted in MongoDB
|
||||
and makes sure they are consistent with the fields defined in your models.
|
||||
|
||||
MongoEngine makes the assumption that the documents that exists in the DB are compliant with the schema.
|
||||
This means that Mongoengine will not validate a document when an object is loaded from the DB into an instance
|
||||
of your model but this operation may fail under some circumstances (e.g. if there is a field in
|
||||
the document fetched from the database that is not defined in your model).
|
||||
|
||||
|
||||
Built-in validation
|
||||
===================
|
||||
|
||||
Mongoengine provides different fields that encapsulate the corresponding validation
|
||||
out of the box. Validation runs when calling `.validate()` or `.save()`
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from mongoengine import Document, EmailField
|
||||
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
age = IntField(min_value=0, max_value=99)
|
||||
|
||||
user = User(email='invalid@', age=24)
|
||||
user.validate() # raises ValidationError (Invalid email address: ['email'])
|
||||
user.save() # raises ValidationError (Invalid email address: ['email'])
|
||||
|
||||
user2 = User(email='john.doe@garbage.com', age=1000)
|
||||
user2.save() # raises ValidationError (Integer value is too large: ['age'])
|
||||
|
||||
Custom validation
|
||||
=================
|
||||
|
||||
The following feature can be used to customize the validation:
|
||||
|
||||
* Field `validation` parameter
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def not_john_doe(name):
|
||||
if name == 'John Doe':
|
||||
raise ValidationError("John Doe is not a valid name")
|
||||
|
||||
class Person(Document):
|
||||
full_name = StringField(validation=not_john_doe)
|
||||
|
||||
Person(full_name='Billy Doe').save()
|
||||
Person(full_name='John Doe').save() # raises ValidationError (John Doe is not a valid name)
|
||||
|
||||
|
||||
* Document `clean` method
|
||||
|
||||
This method is called as part of :meth:`~mongoengine.document.Document.save` and should be used to provide
|
||||
custom model validation and/or to modify some of the field values prior to validation.
|
||||
For instance, you could use it to automatically provide a value for a field, or to do validation
|
||||
that requires access to more than a single field.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Essay(Document):
|
||||
status = StringField(choices=('Published', 'Draft'), required=True)
|
||||
pub_date = DateTimeField()
|
||||
|
||||
def clean(self):
|
||||
# Validate that only published essays have a `pub_date`
|
||||
if self.status == 'Draft' and self.pub_date is not None:
|
||||
raise ValidationError('Draft entries should not have a publication date.')
|
||||
# Set the pub_date for published items if not set.
|
||||
if self.status == 'Published' and self.pub_date is None:
|
||||
self.pub_date = datetime.now()
|
||||
|
||||
.. note::
|
||||
Cleaning is only called if validation is turned on and when calling
|
||||
:meth:`~mongoengine.Document.save`.
|
||||
|
||||
* Adding custom Field classes
|
||||
|
||||
We recommend as much as possible to use fields provided by MongoEngine. However, it is also possible
|
||||
to subclass a Field and encapsulate some validation by overriding the `validate` method
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class AgeField(IntField):
|
||||
|
||||
def validate(self, value):
|
||||
super(AgeField, self).validate(value) # let IntField.validate run first
|
||||
if value == 60:
|
||||
self.error('60 is not allowed')
|
||||
|
||||
class Person(Document):
|
||||
age = AgeField(min_value=0, max_value=99)
|
||||
|
||||
Person(age=20).save() # passes
|
||||
Person(age=1000).save() # raises ValidationError (Integer value is too large: ['age'])
|
||||
Person(age=60).save() # raises ValidationError (Person:None) (60 is not allowed: ['age'])
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
When overriding `validate`, use `self.error("your-custom-error")` instead of raising ValidationError explicitly,
|
||||
it will provide a better context with the error message
|
||||
|
||||
Skipping validation
|
||||
====================
|
||||
|
||||
Although discouraged as it allows to violate fields constraints, if for some reason you need to disable
|
||||
the validation and cleaning of a document when you call :meth:`~mongoengine.document.Document.save`, you can use `.save(validate=False)`.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Person(Document):
|
||||
age = IntField(max_value=100)
|
||||
|
||||
Person(age=1000).save() # raises ValidationError (Integer value is too large)
|
||||
|
||||
Person(age=1000).save(validate=False)
|
||||
person = Person.objects.first()
|
||||
assert person.age == 1000
|
||||
@@ -7,7 +7,7 @@ MongoDB. To install it, simply run
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install -U mongoengine
|
||||
$ python -m pip install -U mongoengine
|
||||
|
||||
:doc:`tutorial`
|
||||
A quick tutorial building a tumblelog to get you up and running with
|
||||
@@ -23,9 +23,18 @@ MongoDB. To install it, simply run
|
||||
:doc:`upgrade`
|
||||
How to upgrade MongoEngine.
|
||||
|
||||
:doc:`faq`
|
||||
Frequently Asked Questions
|
||||
|
||||
:doc:`django`
|
||||
Using MongoEngine and Django
|
||||
|
||||
MongoDB and driver support
|
||||
--------------------------
|
||||
|
||||
MongoEngine is based on the PyMongo driver and tested against multiple versions of MongoDB.
|
||||
For further details, please refer to the `readme <https://github.com/MongoEngine/mongoengine#mongoengine>`_.
|
||||
|
||||
Community
|
||||
---------
|
||||
|
||||
@@ -73,6 +82,7 @@ formats for offline reading.
|
||||
apireference
|
||||
changelog
|
||||
upgrade
|
||||
faq
|
||||
django
|
||||
|
||||
Indices and tables
|
||||
@@ -81,4 +91,3 @@ Indices and tables
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
||||
|
||||
3
docs/requirements.txt
Normal file
3
docs/requirements.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
Sphinx==3.3.0
|
||||
sphinx-rtd-theme==0.5.0
|
||||
readthedocs-sphinx-ext==2.1.1
|
||||
@@ -18,7 +18,7 @@ location --- running it locally will be easier, but if that is not an option
|
||||
then it may be run on a remote server. If you haven't installed MongoEngine,
|
||||
simply use pip to install it like so::
|
||||
|
||||
$ pip install mongoengine
|
||||
$ python -m pip install mongoengine
|
||||
|
||||
Before we can start using MongoEngine, we need to tell it how to connect to our
|
||||
instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect`
|
||||
|
||||
@@ -6,6 +6,11 @@ Development
|
||||
***********
|
||||
(Fill this out whenever you introduce breaking changes to MongoEngine)
|
||||
|
||||
URLField's constructor no longer takes `verify_exists`
|
||||
|
||||
0.15.0
|
||||
******
|
||||
|
||||
0.14.0
|
||||
******
|
||||
This release includes a few bug fixes and a significant code cleanup. The most
|
||||
@@ -47,7 +52,7 @@ rename its occurrences.
|
||||
This release includes a major rehaul of MongoEngine's code quality and
|
||||
introduces a few breaking changes. It also touches many different parts of
|
||||
the package and although all the changes have been tested and scrutinized,
|
||||
you're encouraged to thorougly test the upgrade.
|
||||
you're encouraged to thoroughly test the upgrade.
|
||||
|
||||
First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`.
|
||||
If you import or catch this exception, you'll need to rename it in your code.
|
||||
@@ -80,10 +85,10 @@ by default from now on.
|
||||
|
||||
The 0.8.7 package on pypi was corrupted. If upgrading from 0.8.7 to 0.9.0 please follow: ::
|
||||
|
||||
pip uninstall pymongo
|
||||
pip uninstall mongoengine
|
||||
pip install pymongo==2.8
|
||||
pip install mongoengine
|
||||
python -m pip uninstall pymongo
|
||||
python -m pip uninstall mongoengine
|
||||
python -m pip install pymongo==2.8
|
||||
python -m pip install mongoengine
|
||||
|
||||
0.8.7
|
||||
*****
|
||||
@@ -148,7 +153,7 @@ inherited classes like so: ::
|
||||
|
||||
# 4. Remove indexes
|
||||
info = collection.index_information()
|
||||
indexes_to_drop = [key for key, value in info.iteritems()
|
||||
indexes_to_drop = [key for key, value in info.items()
|
||||
if '_types' in dict(value['key'])]
|
||||
for index in indexes_to_drop:
|
||||
collection.drop_index(index)
|
||||
|
||||
@@ -1,36 +1,43 @@
|
||||
# Import submodules so that we can expose their __all__
|
||||
from mongoengine import connection
|
||||
from mongoengine import document
|
||||
from mongoengine import errors
|
||||
from mongoengine import fields
|
||||
from mongoengine import queryset
|
||||
from mongoengine import signals
|
||||
from mongoengine import (
|
||||
connection,
|
||||
document,
|
||||
errors,
|
||||
fields,
|
||||
queryset,
|
||||
signals,
|
||||
)
|
||||
|
||||
# Import everything from each submodule so that it can be accessed via
|
||||
# mongoengine, e.g. instead of `from mongoengine.connection import connect`,
|
||||
# users can simply use `from mongoengine import connect`, or even
|
||||
# `from mongoengine import *` and then `connect('testdb')`.
|
||||
from mongoengine.connection import *
|
||||
from mongoengine.document import *
|
||||
from mongoengine.errors import *
|
||||
from mongoengine.fields import *
|
||||
from mongoengine.queryset import *
|
||||
from mongoengine.signals import *
|
||||
from mongoengine.connection import * # noqa: F401
|
||||
from mongoengine.document import * # noqa: F401
|
||||
from mongoengine.errors import * # noqa: F401
|
||||
from mongoengine.fields import * # noqa: F401
|
||||
from mongoengine.queryset import * # noqa: F401
|
||||
from mongoengine.signals import * # noqa: F401
|
||||
|
||||
__all__ = (
|
||||
list(document.__all__)
|
||||
+ list(fields.__all__)
|
||||
+ list(connection.__all__)
|
||||
+ list(queryset.__all__)
|
||||
+ list(signals.__all__)
|
||||
+ list(errors.__all__)
|
||||
)
|
||||
|
||||
|
||||
__all__ = (list(document.__all__) + list(fields.__all__) +
|
||||
list(connection.__all__) + list(queryset.__all__) +
|
||||
list(signals.__all__) + list(errors.__all__))
|
||||
|
||||
|
||||
VERSION = (0, 15, 0)
|
||||
VERSION = (0, 23, 1)
|
||||
|
||||
|
||||
def get_version():
|
||||
"""Return the VERSION as a string, e.g. for VERSION == (0, 10, 7),
|
||||
return '0.10.7'.
|
||||
"""Return the VERSION as a string.
|
||||
|
||||
For example, if `VERSION == (0, 10, 7)`, return '0.10.7'.
|
||||
"""
|
||||
return '.'.join(map(str, VERSION))
|
||||
return ".".join(map(str, VERSION))
|
||||
|
||||
|
||||
__version__ = get_version()
|
||||
|
||||
@@ -12,17 +12,22 @@ from mongoengine.base.metaclasses import *
|
||||
|
||||
__all__ = (
|
||||
# common
|
||||
'UPDATE_OPERATORS', '_document_registry', 'get_document',
|
||||
|
||||
"UPDATE_OPERATORS",
|
||||
"_document_registry",
|
||||
"get_document",
|
||||
# datastructures
|
||||
'BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference',
|
||||
|
||||
"BaseDict",
|
||||
"BaseList",
|
||||
"EmbeddedDocumentList",
|
||||
"LazyReference",
|
||||
# document
|
||||
'BaseDocument',
|
||||
|
||||
"BaseDocument",
|
||||
# fields
|
||||
'BaseField', 'ComplexBaseField', 'ObjectIdField', 'GeoJsonBaseField',
|
||||
|
||||
"BaseField",
|
||||
"ComplexBaseField",
|
||||
"ObjectIdField",
|
||||
"GeoJsonBaseField",
|
||||
# metaclasses
|
||||
'DocumentMetaclass', 'TopLevelDocumentMetaclass'
|
||||
"DocumentMetaclass",
|
||||
"TopLevelDocumentMetaclass",
|
||||
)
|
||||
|
||||
@@ -1,32 +1,62 @@
|
||||
from mongoengine.errors import NotRegistered
|
||||
|
||||
__all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry')
|
||||
__all__ = ("UPDATE_OPERATORS", "get_document", "_document_registry")
|
||||
|
||||
|
||||
UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'mul',
|
||||
'pop', 'push', 'push_all', 'pull',
|
||||
'pull_all', 'add_to_set', 'set_on_insert',
|
||||
'min', 'max', 'rename'])
|
||||
UPDATE_OPERATORS = {
|
||||
"set",
|
||||
"unset",
|
||||
"inc",
|
||||
"dec",
|
||||
"mul",
|
||||
"pop",
|
||||
"push",
|
||||
"push_all",
|
||||
"pull",
|
||||
"pull_all",
|
||||
"add_to_set",
|
||||
"set_on_insert",
|
||||
"min",
|
||||
"max",
|
||||
"rename",
|
||||
}
|
||||
|
||||
|
||||
_document_registry = {}
|
||||
|
||||
|
||||
def get_document(name):
|
||||
"""Get a document class by name."""
|
||||
"""Get a registered Document class by name."""
|
||||
doc = _document_registry.get(name, None)
|
||||
if not doc:
|
||||
# Possible old style name
|
||||
single_end = name.split('.')[-1]
|
||||
compound_end = '.%s' % single_end
|
||||
possible_match = [k for k in _document_registry.keys()
|
||||
if k.endswith(compound_end) or k == single_end]
|
||||
single_end = name.split(".")[-1]
|
||||
compound_end = ".%s" % single_end
|
||||
possible_match = [
|
||||
k for k in _document_registry if k.endswith(compound_end) or k == single_end
|
||||
]
|
||||
if len(possible_match) == 1:
|
||||
doc = _document_registry.get(possible_match.pop(), None)
|
||||
if not doc:
|
||||
raise NotRegistered("""
|
||||
raise NotRegistered(
|
||||
"""
|
||||
`%s` has not been registered in the document registry.
|
||||
Importing the document class automatically registers it, has it
|
||||
been imported?
|
||||
""".strip() % name)
|
||||
""".strip()
|
||||
% name
|
||||
)
|
||||
return doc
|
||||
|
||||
|
||||
def _get_documents_by_db(connection_alias, default_connection_alias):
|
||||
"""Get all registered Documents class attached to a given database"""
|
||||
|
||||
def get_doc_alias(doc_cls):
|
||||
return doc_cls._meta.get("db_alias", default_connection_alias)
|
||||
|
||||
return [
|
||||
doc_cls
|
||||
for doc_cls in _document_registry.values()
|
||||
if get_doc_alias(doc_cls) == connection_alias
|
||||
]
|
||||
|
||||
@@ -1,13 +1,41 @@
|
||||
import itertools
|
||||
import weakref
|
||||
|
||||
from bson import DBRef
|
||||
import six
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
|
||||
|
||||
__all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference')
|
||||
__all__ = (
|
||||
"BaseDict",
|
||||
"StrictDict",
|
||||
"BaseList",
|
||||
"EmbeddedDocumentList",
|
||||
"LazyReference",
|
||||
)
|
||||
|
||||
|
||||
def mark_as_changed_wrapper(parent_method):
|
||||
"""Decorator that ensures _mark_as_changed method gets called."""
|
||||
|
||||
def wrapper(self, *args, **kwargs):
|
||||
# Can't use super() in the decorator.
|
||||
result = parent_method(self, *args, **kwargs)
|
||||
self._mark_as_changed()
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def mark_key_as_changed_wrapper(parent_method):
|
||||
"""Decorator that ensures _mark_as_changed method gets called with the key argument"""
|
||||
|
||||
def wrapper(self, key, *args, **kwargs):
|
||||
# Can't use super() in the decorator.
|
||||
result = parent_method(self, key, *args, **kwargs)
|
||||
self._mark_as_changed(key)
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class BaseDict(dict):
|
||||
@@ -18,46 +46,36 @@ class BaseDict(dict):
|
||||
_name = None
|
||||
|
||||
def __init__(self, dict_items, instance, name):
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
BaseDocument = _import_class("BaseDocument")
|
||||
|
||||
if isinstance(instance, (Document, EmbeddedDocument)):
|
||||
if isinstance(instance, BaseDocument):
|
||||
self._instance = weakref.proxy(instance)
|
||||
self._name = name
|
||||
super(BaseDict, self).__init__(dict_items)
|
||||
super().__init__(dict_items)
|
||||
|
||||
def __getitem__(self, key, *args, **kwargs):
|
||||
value = super(BaseDict, self).__getitem__(key)
|
||||
def get(self, key, default=None):
|
||||
# get does not use __getitem__ by default so we must override it as well
|
||||
try:
|
||||
return self.__getitem__(key)
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
def __getitem__(self, key):
|
||||
value = super().__getitem__(key)
|
||||
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||
value._instance = self._instance
|
||||
elif not isinstance(value, BaseDict) and isinstance(value, dict):
|
||||
value = BaseDict(value, None, '%s.%s' % (self._name, key))
|
||||
super(BaseDict, self).__setitem__(key, value)
|
||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||
value = BaseDict(value, None, f"{self._name}.{key}")
|
||||
super().__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
elif not isinstance(value, BaseList) and isinstance(value, list):
|
||||
value = BaseList(value, None, '%s.%s' % (self._name, key))
|
||||
super(BaseDict, self).__setitem__(key, value)
|
||||
elif isinstance(value, list) and not isinstance(value, BaseList):
|
||||
value = BaseList(value, None, f"{self._name}.{key}")
|
||||
super().__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
return value
|
||||
|
||||
def __setitem__(self, key, value, *args, **kwargs):
|
||||
self._mark_as_changed(key)
|
||||
return super(BaseDict, self).__setitem__(key, value)
|
||||
|
||||
def __delete__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).__delete__(*args, **kwargs)
|
||||
|
||||
def __delitem__(self, key, *args, **kwargs):
|
||||
self._mark_as_changed(key)
|
||||
return super(BaseDict, self).__delitem__(key)
|
||||
|
||||
def __delattr__(self, key, *args, **kwargs):
|
||||
self._mark_as_changed(key)
|
||||
return super(BaseDict, self).__delattr__(key)
|
||||
|
||||
def __getstate__(self):
|
||||
self.instance = None
|
||||
self._dereferenced = False
|
||||
@@ -67,30 +85,19 @@ class BaseDict(dict):
|
||||
self = state
|
||||
return self
|
||||
|
||||
def clear(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).clear()
|
||||
|
||||
def pop(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).pop(*args, **kwargs)
|
||||
|
||||
def popitem(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).popitem()
|
||||
|
||||
def setdefault(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).setdefault(*args, **kwargs)
|
||||
|
||||
def update(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).update(*args, **kwargs)
|
||||
__setitem__ = mark_key_as_changed_wrapper(dict.__setitem__)
|
||||
__delattr__ = mark_key_as_changed_wrapper(dict.__delattr__)
|
||||
__delitem__ = mark_key_as_changed_wrapper(dict.__delitem__)
|
||||
pop = mark_as_changed_wrapper(dict.pop)
|
||||
clear = mark_as_changed_wrapper(dict.clear)
|
||||
update = mark_as_changed_wrapper(dict.update)
|
||||
popitem = mark_as_changed_wrapper(dict.popitem)
|
||||
setdefault = mark_as_changed_wrapper(dict.setdefault)
|
||||
|
||||
def _mark_as_changed(self, key=None):
|
||||
if hasattr(self._instance, '_mark_as_changed'):
|
||||
if hasattr(self._instance, "_mark_as_changed"):
|
||||
if key:
|
||||
self._instance._mark_as_changed('%s.%s' % (self._name, key))
|
||||
self._instance._mark_as_changed(f"{self._name}.{key}")
|
||||
else:
|
||||
self._instance._mark_as_changed(self._name)
|
||||
|
||||
@@ -103,52 +110,41 @@ class BaseList(list):
|
||||
_name = None
|
||||
|
||||
def __init__(self, list_items, instance, name):
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
BaseDocument = _import_class("BaseDocument")
|
||||
|
||||
if isinstance(instance, (Document, EmbeddedDocument)):
|
||||
if isinstance(instance, BaseDocument):
|
||||
self._instance = weakref.proxy(instance)
|
||||
self._name = name
|
||||
super(BaseList, self).__init__(list_items)
|
||||
super().__init__(list_items)
|
||||
|
||||
def __getitem__(self, key, *args, **kwargs):
|
||||
value = super(BaseList, self).__getitem__(key)
|
||||
def __getitem__(self, key):
|
||||
# change index to positive value because MongoDB does not support negative one
|
||||
if isinstance(key, int) and key < 0:
|
||||
key = len(self) + key
|
||||
value = super().__getitem__(key)
|
||||
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
if isinstance(key, slice):
|
||||
# When receiving a slice operator, we don't convert the structure and bind
|
||||
# to parent's instance. This is buggy for now but would require more work to be handled properly
|
||||
return value
|
||||
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||
value._instance = self._instance
|
||||
elif not isinstance(value, BaseDict) and isinstance(value, dict):
|
||||
value = BaseDict(value, None, '%s.%s' % (self._name, key))
|
||||
super(BaseList, self).__setitem__(key, value)
|
||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||
# Replace dict by BaseDict
|
||||
value = BaseDict(value, None, f"{self._name}.{key}")
|
||||
super().__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
elif not isinstance(value, BaseList) and isinstance(value, list):
|
||||
value = BaseList(value, None, '%s.%s' % (self._name, key))
|
||||
super(BaseList, self).__setitem__(key, value)
|
||||
elif isinstance(value, list) and not isinstance(value, BaseList):
|
||||
# Replace list by BaseList
|
||||
value = BaseList(value, None, f"{self._name}.{key}")
|
||||
super().__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
return value
|
||||
|
||||
def __iter__(self):
|
||||
for i in six.moves.range(self.__len__()):
|
||||
yield self[i]
|
||||
|
||||
def __setitem__(self, key, value, *args, **kwargs):
|
||||
if isinstance(key, slice):
|
||||
self._mark_as_changed()
|
||||
else:
|
||||
self._mark_as_changed(key)
|
||||
return super(BaseList, self).__setitem__(key, value)
|
||||
|
||||
def __delitem__(self, key, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__delitem__(key)
|
||||
|
||||
def __setslice__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__setslice__(*args, **kwargs)
|
||||
|
||||
def __delslice__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__delslice__(*args, **kwargs)
|
||||
yield from super().__iter__()
|
||||
|
||||
def __getstate__(self):
|
||||
self.instance = None
|
||||
@@ -159,53 +155,40 @@ class BaseList(list):
|
||||
self = state
|
||||
return self
|
||||
|
||||
def __iadd__(self, other):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__iadd__(other)
|
||||
def __setitem__(self, key, value):
|
||||
changed_key = key
|
||||
if isinstance(key, slice):
|
||||
# In case of slice, we don't bother to identify the exact elements being updated
|
||||
# instead, we simply marks the whole list as changed
|
||||
changed_key = None
|
||||
|
||||
def __imul__(self, other):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__imul__(other)
|
||||
result = super().__setitem__(key, value)
|
||||
self._mark_as_changed(changed_key)
|
||||
return result
|
||||
|
||||
def append(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).append(*args, **kwargs)
|
||||
|
||||
def extend(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).extend(*args, **kwargs)
|
||||
|
||||
def insert(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).insert(*args, **kwargs)
|
||||
|
||||
def pop(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).pop(*args, **kwargs)
|
||||
|
||||
def remove(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).remove(*args, **kwargs)
|
||||
|
||||
def reverse(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).reverse()
|
||||
|
||||
def sort(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).sort(*args, **kwargs)
|
||||
append = mark_as_changed_wrapper(list.append)
|
||||
extend = mark_as_changed_wrapper(list.extend)
|
||||
insert = mark_as_changed_wrapper(list.insert)
|
||||
pop = mark_as_changed_wrapper(list.pop)
|
||||
remove = mark_as_changed_wrapper(list.remove)
|
||||
reverse = mark_as_changed_wrapper(list.reverse)
|
||||
sort = mark_as_changed_wrapper(list.sort)
|
||||
__delitem__ = mark_as_changed_wrapper(list.__delitem__)
|
||||
__iadd__ = mark_as_changed_wrapper(list.__iadd__)
|
||||
__imul__ = mark_as_changed_wrapper(list.__imul__)
|
||||
|
||||
def _mark_as_changed(self, key=None):
|
||||
if hasattr(self._instance, '_mark_as_changed'):
|
||||
if key:
|
||||
self._instance._mark_as_changed(
|
||||
'%s.%s' % (self._name, key % len(self))
|
||||
)
|
||||
if hasattr(self._instance, "_mark_as_changed"):
|
||||
if key is not None:
|
||||
self._instance._mark_as_changed(f"{self._name}.{key % len(self)}")
|
||||
else:
|
||||
self._instance._mark_as_changed(self._name)
|
||||
|
||||
|
||||
class EmbeddedDocumentList(BaseList):
|
||||
def __init__(self, list_items, instance, name):
|
||||
super().__init__(list_items, instance, name)
|
||||
self._instance = instance
|
||||
|
||||
@classmethod
|
||||
def __match_all(cls, embedded_doc, kwargs):
|
||||
@@ -214,7 +197,7 @@ class EmbeddedDocumentList(BaseList):
|
||||
"""
|
||||
for key, expected_value in kwargs.items():
|
||||
doc_val = getattr(embedded_doc, key)
|
||||
if doc_val != expected_value and six.text_type(doc_val) != expected_value:
|
||||
if doc_val != expected_value and str(doc_val) != expected_value:
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -225,15 +208,14 @@ class EmbeddedDocumentList(BaseList):
|
||||
return embedded_docs
|
||||
return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)]
|
||||
|
||||
def __init__(self, list_items, instance, name):
|
||||
super(EmbeddedDocumentList, self).__init__(list_items, instance, name)
|
||||
self._instance = instance
|
||||
|
||||
def filter(self, **kwargs):
|
||||
"""
|
||||
Filters the list by only including embedded documents with the
|
||||
given keyword arguments.
|
||||
|
||||
This method only supports simple comparison (e.g. .filter(name='John Doe'))
|
||||
and does not support operators like __gte, __lte, __icontains like queryset.filter does
|
||||
|
||||
:param kwargs: The keyword arguments corresponding to the fields to
|
||||
filter on. *Multiple arguments are treated as if they are ANDed
|
||||
together.*
|
||||
@@ -288,12 +270,10 @@ class EmbeddedDocumentList(BaseList):
|
||||
"""
|
||||
values = self.__only_matches(self, kwargs)
|
||||
if len(values) == 0:
|
||||
raise DoesNotExist(
|
||||
'%s matching query does not exist.' % self._name
|
||||
)
|
||||
raise DoesNotExist("%s matching query does not exist." % self._name)
|
||||
elif len(values) > 1:
|
||||
raise MultipleObjectsReturned(
|
||||
'%d items returned, instead of 1' % len(values)
|
||||
"%d items returned, instead of 1" % len(values)
|
||||
)
|
||||
|
||||
return values[0]
|
||||
@@ -307,11 +287,11 @@ class EmbeddedDocumentList(BaseList):
|
||||
|
||||
def create(self, **values):
|
||||
"""
|
||||
Creates a new embedded document and saves it to the database.
|
||||
Creates a new instance of the EmbeddedDocument and appends it to this EmbeddedDocumentList.
|
||||
|
||||
.. note::
|
||||
The embedded document changes are not automatically saved
|
||||
to the database after calling this method.
|
||||
the instance of the EmbeddedDocument is not automatically saved to the database.
|
||||
You still need to call .save() on the parent Document.
|
||||
|
||||
:param values: A dictionary of values for the embedded document.
|
||||
:return: The new embedded document instance.
|
||||
@@ -372,24 +352,24 @@ class EmbeddedDocumentList(BaseList):
|
||||
return len(values)
|
||||
|
||||
|
||||
class StrictDict(object):
|
||||
class StrictDict:
|
||||
__slots__ = ()
|
||||
_special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create'])
|
||||
_special_fields = {"get", "pop", "iteritems", "items", "keys", "create"}
|
||||
_classes = {}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
for k, v in kwargs.iteritems():
|
||||
for k, v in kwargs.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
def __getitem__(self, key):
|
||||
key = '_reserved_' + key if key in self._special_fields else key
|
||||
key = "_reserved_" + key if key in self._special_fields else key
|
||||
try:
|
||||
return getattr(self, key)
|
||||
except AttributeError:
|
||||
raise KeyError(key)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
key = '_reserved_' + key if key in self._special_fields else key
|
||||
key = "_reserved_" + key if key in self._special_fields else key
|
||||
return setattr(self, key, value)
|
||||
|
||||
def __contains__(self, key):
|
||||
@@ -426,37 +406,42 @@ class StrictDict(object):
|
||||
return (key for key in self.__slots__ if hasattr(self, key))
|
||||
|
||||
def __len__(self):
|
||||
return len(list(self.iteritems()))
|
||||
return len(list(self.items()))
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.items() == other.items()
|
||||
return list(self.items()) == list(other.items())
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.items() != other.items()
|
||||
return not (self == other)
|
||||
|
||||
@classmethod
|
||||
def create(cls, allowed_keys):
|
||||
allowed_keys_tuple = tuple(('_reserved_' + k if k in cls._special_fields else k) for k in allowed_keys)
|
||||
allowed_keys_tuple = tuple(
|
||||
("_reserved_" + k if k in cls._special_fields else k) for k in allowed_keys
|
||||
)
|
||||
allowed_keys = frozenset(allowed_keys_tuple)
|
||||
if allowed_keys not in cls._classes:
|
||||
|
||||
class SpecificStrictDict(cls):
|
||||
__slots__ = allowed_keys_tuple
|
||||
|
||||
def __repr__(self):
|
||||
return '{%s}' % ', '.join('"{0!s}": {1!r}'.format(k, v) for k, v in self.items())
|
||||
return "{%s}" % ", ".join(
|
||||
f'"{k!s}": {v!r}' for k, v in self.items()
|
||||
)
|
||||
|
||||
cls._classes[allowed_keys] = SpecificStrictDict
|
||||
return cls._classes[allowed_keys]
|
||||
|
||||
|
||||
class LazyReference(DBRef):
|
||||
__slots__ = ('_cached_doc', 'passthrough', 'document_type')
|
||||
__slots__ = ("_cached_doc", "passthrough", "document_type")
|
||||
|
||||
def fetch(self, force=False):
|
||||
if not self._cached_doc or force:
|
||||
self._cached_doc = self.document_type.objects.get(pk=self.pk)
|
||||
if not self._cached_doc:
|
||||
raise DoesNotExist('Trying to dereference unknown document %s' % (self))
|
||||
raise DoesNotExist("Trying to dereference unknown document %s" % (self))
|
||||
return self._cached_doc
|
||||
|
||||
@property
|
||||
@@ -467,7 +452,7 @@ class LazyReference(DBRef):
|
||||
self.document_type = document_type
|
||||
self._cached_doc = cached_doc
|
||||
self.passthrough = passthrough
|
||||
super(LazyReference, self).__init__(self.document_type._get_collection_name(), pk)
|
||||
super().__init__(self.document_type._get_collection_name(), pk)
|
||||
|
||||
def __getitem__(self, name):
|
||||
if not self.passthrough:
|
||||
@@ -476,7 +461,7 @@ class LazyReference(DBRef):
|
||||
return document[name]
|
||||
|
||||
def __getattr__(self, name):
|
||||
if not object.__getattribute__(self, 'passthrough'):
|
||||
if not object.__getattribute__(self, "passthrough"):
|
||||
raise AttributeError()
|
||||
document = self.fetch()
|
||||
try:
|
||||
@@ -485,4 +470,4 @@ class LazyReference(DBRef):
|
||||
raise AttributeError()
|
||||
|
||||
def __repr__(self):
|
||||
return "<LazyReference(%s, %r)>" % (self.document_type, self.pk)
|
||||
return f"<LazyReference({self.document_type}, {self.pk!r})>"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,29 +1,27 @@
|
||||
import operator
|
||||
import warnings
|
||||
import weakref
|
||||
|
||||
from bson import DBRef, ObjectId, SON
|
||||
import pymongo
|
||||
import six
|
||||
from bson import SON, DBRef, ObjectId
|
||||
|
||||
from mongoengine.base.common import UPDATE_OPERATORS
|
||||
from mongoengine.base.datastructures import (BaseDict, BaseList,
|
||||
EmbeddedDocumentList)
|
||||
from mongoengine.base.datastructures import (
|
||||
BaseDict,
|
||||
BaseList,
|
||||
EmbeddedDocumentList,
|
||||
)
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import ValidationError
|
||||
from mongoengine.errors import DeprecatedError, ValidationError
|
||||
|
||||
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField")
|
||||
|
||||
|
||||
__all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField',
|
||||
'GeoJsonBaseField')
|
||||
|
||||
|
||||
class BaseField(object):
|
||||
class BaseField:
|
||||
"""A base class for fields in a MongoDB document. Instances of this class
|
||||
may be added to subclasses of `Document` to define a document's schema.
|
||||
|
||||
.. versionchanged:: 0.5 - added verbose and help text
|
||||
"""
|
||||
name = None
|
||||
|
||||
name = None # set in TopLevelDocumentMetaclass
|
||||
_geo_index = False
|
||||
_auto_gen = False # Call `generate` to generate a value
|
||||
_auto_dereference = True
|
||||
@@ -34,14 +32,23 @@ class BaseField(object):
|
||||
creation_counter = 0
|
||||
auto_creation_counter = -1
|
||||
|
||||
def __init__(self, db_field=None, name=None, required=False, default=None,
|
||||
unique=False, unique_with=None, primary_key=False,
|
||||
validation=None, choices=None, null=False, sparse=False,
|
||||
**kwargs):
|
||||
def __init__(
|
||||
self,
|
||||
db_field=None,
|
||||
required=False,
|
||||
default=None,
|
||||
unique=False,
|
||||
unique_with=None,
|
||||
primary_key=False,
|
||||
validation=None,
|
||||
choices=None,
|
||||
null=False,
|
||||
sparse=False,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
:param db_field: The database field to store this field in
|
||||
(defaults to the name of the field)
|
||||
:param name: Deprecated - use db_field
|
||||
:param required: If the field is required. Whether it has to have a
|
||||
value or not. Defaults to False.
|
||||
:param default: (optional) The default value for this field if no value
|
||||
@@ -52,10 +59,10 @@ class BaseField(object):
|
||||
unique with.
|
||||
:param primary_key: Mark this field as the primary key. Defaults to False.
|
||||
:param validation: (optional) A callable to validate the value of the
|
||||
field. Generally this is deprecated in favour of the
|
||||
`FIELD.validate` method
|
||||
field. The callable takes the value as parameter and should raise
|
||||
a ValidationError if validation fails
|
||||
:param choices: (optional) The valid choices
|
||||
:param null: (optional) Is the field value can be null. If no and there is a default value
|
||||
:param null: (optional) If the field value can be null. If no and there is a default value
|
||||
then the default value is set
|
||||
:param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False`
|
||||
means that uniqueness won't be enforced for `None` values
|
||||
@@ -65,11 +72,8 @@ class BaseField(object):
|
||||
existing attributes. Common metadata includes `verbose_name` and
|
||||
`help_text`.
|
||||
"""
|
||||
self.db_field = (db_field or name) if not primary_key else '_id'
|
||||
self.db_field = db_field if not primary_key else "_id"
|
||||
|
||||
if name:
|
||||
msg = 'Field\'s "name" attribute deprecated in favour of "db_field"'
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
self.required = required or primary_key
|
||||
self.default = default
|
||||
self.unique = bool(unique or unique_with)
|
||||
@@ -82,17 +86,14 @@ class BaseField(object):
|
||||
self._owner_document = None
|
||||
|
||||
# Make sure db_field is a string (if it's explicitly defined).
|
||||
if (
|
||||
self.db_field is not None and
|
||||
not isinstance(self.db_field, six.string_types)
|
||||
):
|
||||
raise TypeError('db_field should be a string.')
|
||||
if self.db_field is not None and not isinstance(self.db_field, str):
|
||||
raise TypeError("db_field should be a string.")
|
||||
|
||||
# Make sure db_field doesn't contain any forbidden characters.
|
||||
if isinstance(self.db_field, six.string_types) and (
|
||||
'.' in self.db_field or
|
||||
'\0' in self.db_field or
|
||||
self.db_field.startswith('$')
|
||||
if isinstance(self.db_field, str) and (
|
||||
"." in self.db_field
|
||||
or "\0" in self.db_field
|
||||
or self.db_field.startswith("$")
|
||||
):
|
||||
raise ValueError(
|
||||
'field names cannot contain dots (".") or null characters '
|
||||
@@ -102,15 +103,17 @@ class BaseField(object):
|
||||
# Detect and report conflicts between metadata and base properties.
|
||||
conflicts = set(dir(self)) & set(kwargs)
|
||||
if conflicts:
|
||||
raise TypeError('%s already has attribute(s): %s' % (
|
||||
self.__class__.__name__, ', '.join(conflicts)))
|
||||
raise TypeError(
|
||||
"%s already has attribute(s): %s"
|
||||
% (self.__class__.__name__, ", ".join(conflicts))
|
||||
)
|
||||
|
||||
# Assign metadata to the instance
|
||||
# This efficient method is available because no __slots__ are defined.
|
||||
self.__dict__.update(kwargs)
|
||||
|
||||
# Adjust the appropriate creation counter, and save our local copy.
|
||||
if self.db_field == '_id':
|
||||
if self.db_field == "_id":
|
||||
self.creation_counter = BaseField.auto_creation_counter
|
||||
BaseField.auto_creation_counter -= 1
|
||||
else:
|
||||
@@ -118,8 +121,7 @@ class BaseField(object):
|
||||
BaseField.creation_counter += 1
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
"""Descriptor for retrieving a value from a field in a document.
|
||||
"""
|
||||
"""Descriptor for retrieving a value from a field in a document."""
|
||||
if instance is None:
|
||||
# Document class being used rather than a document object
|
||||
return self
|
||||
@@ -128,11 +130,9 @@ class BaseField(object):
|
||||
return instance._data.get(self.name)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
"""Descriptor for assigning a value to a field in a document.
|
||||
"""
|
||||
|
||||
# If setting to None and there is a default
|
||||
# Then set the value to the default value
|
||||
"""Descriptor for assigning a value to a field in a document."""
|
||||
# If setting to None and there is a default value provided for this
|
||||
# field, then set the value to the default value.
|
||||
if value is None:
|
||||
if self.null:
|
||||
value = None
|
||||
@@ -143,24 +143,29 @@ class BaseField(object):
|
||||
|
||||
if instance._initialised:
|
||||
try:
|
||||
if (self.name not in instance._data or
|
||||
instance._data[self.name] != value):
|
||||
value_has_changed = (
|
||||
self.name not in instance._data
|
||||
or instance._data[self.name] != value
|
||||
)
|
||||
if value_has_changed:
|
||||
instance._mark_as_changed(self.name)
|
||||
except Exception:
|
||||
# Values cant be compared eg: naive and tz datetimes
|
||||
# So mark it as changed
|
||||
# Some values can't be compared and throw an error when we
|
||||
# attempt to do so (e.g. tz-naive and tz-aware datetimes).
|
||||
# Mark the field as changed in such cases.
|
||||
instance._mark_as_changed(self.name)
|
||||
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
if isinstance(value, EmbeddedDocument):
|
||||
value._instance = weakref.proxy(instance)
|
||||
elif isinstance(value, (list, tuple)):
|
||||
for v in value:
|
||||
if isinstance(v, EmbeddedDocument):
|
||||
v._instance = weakref.proxy(instance)
|
||||
|
||||
instance._data[self.name] = value
|
||||
|
||||
def error(self, message='', errors=None, field_name=None):
|
||||
def error(self, message="", errors=None, field_name=None):
|
||||
"""Raise a ValidationError."""
|
||||
field_name = field_name if field_name else self.name
|
||||
raise ValidationError(message, errors=errors, field_name=field_name)
|
||||
@@ -177,11 +182,11 @@ class BaseField(object):
|
||||
"""Helper method to call to_mongo with proper inputs."""
|
||||
f_inputs = self.to_mongo.__code__.co_varnames
|
||||
ex_vars = {}
|
||||
if 'fields' in f_inputs:
|
||||
ex_vars['fields'] = fields
|
||||
if "fields" in f_inputs:
|
||||
ex_vars["fields"] = fields
|
||||
|
||||
if 'use_db_field' in f_inputs:
|
||||
ex_vars['use_db_field'] = use_db_field
|
||||
if "use_db_field" in f_inputs:
|
||||
ex_vars["use_db_field"] = use_db_field
|
||||
|
||||
return self.to_mongo(value, **ex_vars)
|
||||
|
||||
@@ -196,8 +201,8 @@ class BaseField(object):
|
||||
pass
|
||||
|
||||
def _validate_choices(self, value):
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
Document = _import_class("Document")
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
|
||||
choice_list = self.choices
|
||||
if isinstance(next(iter(choice_list)), (list, tuple)):
|
||||
@@ -207,16 +212,12 @@ class BaseField(object):
|
||||
# Choices which are other types of Documents
|
||||
if isinstance(value, (Document, EmbeddedDocument)):
|
||||
if not any(isinstance(value, c) for c in choice_list):
|
||||
self.error(
|
||||
'Value must be an instance of %s' % (
|
||||
six.text_type(choice_list)
|
||||
)
|
||||
)
|
||||
self.error("Value must be an instance of %s" % (choice_list))
|
||||
# Choices which are types other than Documents
|
||||
else:
|
||||
values = value if isinstance(value, (list, tuple)) else [value]
|
||||
if len(set(values) - set(choice_list)):
|
||||
self.error('Value must be one of %s' % six.text_type(choice_list))
|
||||
self.error("Value must be one of %s" % str(choice_list))
|
||||
|
||||
def _validate(self, value, **kwargs):
|
||||
# Check the Choices Constraint
|
||||
@@ -226,11 +227,23 @@ class BaseField(object):
|
||||
# check validation argument
|
||||
if self.validation is not None:
|
||||
if callable(self.validation):
|
||||
if not self.validation(value):
|
||||
self.error('Value does not match custom validation method')
|
||||
try:
|
||||
# breaking change of 0.18
|
||||
# Get rid of True/False-type return for the validation method
|
||||
# in favor of having validation raising a ValidationError
|
||||
ret = self.validation(value)
|
||||
if ret is not None:
|
||||
raise DeprecatedError(
|
||||
"validation argument for `%s` must not return anything, "
|
||||
"it should raise a ValidationError if validation fails"
|
||||
% self.name
|
||||
)
|
||||
except ValidationError as ex:
|
||||
self.error(str(ex))
|
||||
else:
|
||||
raise ValueError('validation argument for "%s" must be a '
|
||||
'callable.' % self.name)
|
||||
raise ValueError(
|
||||
'validation argument for `"%s"` must be a ' "callable." % self.name
|
||||
)
|
||||
|
||||
self.validate(value, **kwargs)
|
||||
|
||||
@@ -252,11 +265,22 @@ class ComplexBaseField(BaseField):
|
||||
Allows for nesting of embedded documents inside complex types.
|
||||
Handles the lazy dereferencing of a queryset by lazily dereferencing all
|
||||
items in a list / dict rather than one at a time.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
|
||||
field = None
|
||||
def __init__(self, field=None, **kwargs):
|
||||
self.field = field
|
||||
super().__init__(**kwargs)
|
||||
|
||||
@staticmethod
|
||||
def _lazy_load_refs(instance, name, ref_values, *, max_depth):
|
||||
_dereference = _import_class("DeReference")()
|
||||
documents = _dereference(
|
||||
ref_values,
|
||||
max_depth=max_depth,
|
||||
instance=instance,
|
||||
name=name,
|
||||
)
|
||||
return documents
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
"""Descriptor to automatically dereference references."""
|
||||
@@ -264,28 +288,37 @@ class ComplexBaseField(BaseField):
|
||||
# Document class being used rather than a document object
|
||||
return self
|
||||
|
||||
ReferenceField = _import_class('ReferenceField')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
|
||||
dereference = (self._auto_dereference and
|
||||
(self.field is None or isinstance(self.field,
|
||||
(GenericReferenceField, ReferenceField))))
|
||||
ReferenceField = _import_class("ReferenceField")
|
||||
GenericReferenceField = _import_class("GenericReferenceField")
|
||||
EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField")
|
||||
|
||||
_dereference = _import_class('DeReference')()
|
||||
auto_dereference = instance._fields[self.name]._auto_dereference
|
||||
|
||||
self._auto_dereference = instance._fields[self.name]._auto_dereference
|
||||
if instance._initialised and dereference and instance._data.get(self.name):
|
||||
instance._data[self.name] = _dereference(
|
||||
instance._data.get(self.name), max_depth=1, instance=instance,
|
||||
name=self.name
|
||||
dereference = auto_dereference and (
|
||||
self.field is None
|
||||
or isinstance(self.field, (GenericReferenceField, ReferenceField))
|
||||
)
|
||||
|
||||
value = super(ComplexBaseField, self).__get__(instance, owner)
|
||||
if (
|
||||
instance._initialised
|
||||
and dereference
|
||||
and instance._data.get(self.name)
|
||||
and not getattr(instance._data[self.name], "_dereferenced", False)
|
||||
):
|
||||
ref_values = instance._data.get(self.name)
|
||||
instance._data[self.name] = self._lazy_load_refs(
|
||||
ref_values=ref_values, instance=instance, name=self.name, max_depth=1
|
||||
)
|
||||
if hasattr(instance._data[self.name], "_dereferenced"):
|
||||
instance._data[self.name]._dereferenced = True
|
||||
|
||||
value = super().__get__(instance, owner)
|
||||
|
||||
# Convert lists / values so we can watch for any changes on them
|
||||
if isinstance(value, (list, tuple)):
|
||||
if (issubclass(type(self), EmbeddedDocumentListField) and
|
||||
not isinstance(value, EmbeddedDocumentList)):
|
||||
if issubclass(type(self), EmbeddedDocumentListField) and not isinstance(
|
||||
value, EmbeddedDocumentList
|
||||
):
|
||||
value = EmbeddedDocumentList(value, instance, self.name)
|
||||
elif not isinstance(value, BaseList):
|
||||
value = BaseList(value, instance, self.name)
|
||||
@@ -294,11 +327,14 @@ class ComplexBaseField(BaseField):
|
||||
value = BaseDict(value, instance, self.name)
|
||||
instance._data[self.name] = value
|
||||
|
||||
if (self._auto_dereference and instance._initialised and
|
||||
isinstance(value, (BaseList, BaseDict)) and
|
||||
not value._dereferenced):
|
||||
value = _dereference(
|
||||
value, max_depth=1, instance=instance, name=self.name
|
||||
if (
|
||||
auto_dereference
|
||||
and instance._initialised
|
||||
and isinstance(value, (BaseList, BaseDict))
|
||||
and not value._dereferenced
|
||||
):
|
||||
value = self._lazy_load_refs(
|
||||
ref_values=value, instance=instance, name=self.name, max_depth=1
|
||||
)
|
||||
value._dereferenced = True
|
||||
instance._data[self.name] = value
|
||||
@@ -307,66 +343,75 @@ class ComplexBaseField(BaseField):
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert a MongoDB-compatible type to a Python type."""
|
||||
if isinstance(value, six.string_types):
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
|
||||
if hasattr(value, 'to_python'):
|
||||
if hasattr(value, "to_python"):
|
||||
return value.to_python()
|
||||
|
||||
BaseDocument = _import_class("BaseDocument")
|
||||
if isinstance(value, BaseDocument):
|
||||
# Something is wrong, return the value as it is
|
||||
return value
|
||||
|
||||
is_list = False
|
||||
if not hasattr(value, 'items'):
|
||||
if not hasattr(value, "items"):
|
||||
try:
|
||||
is_list = True
|
||||
value = {k: v for k, v in enumerate(value)}
|
||||
value = {idx: v for idx, v in enumerate(value)}
|
||||
except TypeError: # Not iterable return the value
|
||||
return value
|
||||
|
||||
if self.field:
|
||||
self.field._auto_dereference = self._auto_dereference
|
||||
value_dict = {key: self.field.to_python(item)
|
||||
for key, item in value.items()}
|
||||
value_dict = {
|
||||
key: self.field.to_python(item) for key, item in value.items()
|
||||
}
|
||||
else:
|
||||
Document = _import_class('Document')
|
||||
Document = _import_class("Document")
|
||||
value_dict = {}
|
||||
for k, v in value.items():
|
||||
if isinstance(v, Document):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
if v.pk is None:
|
||||
self.error('You can only reference documents once they'
|
||||
' have been saved to the database')
|
||||
self.error(
|
||||
"You can only reference documents once they"
|
||||
" have been saved to the database"
|
||||
)
|
||||
collection = v._get_collection_name()
|
||||
value_dict[k] = DBRef(collection, v.pk)
|
||||
elif hasattr(v, 'to_python'):
|
||||
elif hasattr(v, "to_python"):
|
||||
value_dict[k] = v.to_python()
|
||||
else:
|
||||
value_dict[k] = self.to_python(v)
|
||||
|
||||
if is_list: # Convert back to a list
|
||||
return [v for _, v in sorted(value_dict.items(),
|
||||
key=operator.itemgetter(0))]
|
||||
return [
|
||||
v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0))
|
||||
]
|
||||
return value_dict
|
||||
|
||||
def to_mongo(self, value, use_db_field=True, fields=None):
|
||||
"""Convert a Python type to a MongoDB-compatible type."""
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
Document = _import_class("Document")
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
GenericReferenceField = _import_class("GenericReferenceField")
|
||||
|
||||
if isinstance(value, six.string_types):
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
|
||||
if hasattr(value, 'to_mongo'):
|
||||
if hasattr(value, "to_mongo"):
|
||||
if isinstance(value, Document):
|
||||
return GenericReferenceField().to_mongo(value)
|
||||
cls = value.__class__
|
||||
val = value.to_mongo(use_db_field, fields)
|
||||
# If it's a document that is not inherited add _cls
|
||||
if isinstance(value, EmbeddedDocument):
|
||||
val['_cls'] = cls.__name__
|
||||
val["_cls"] = cls.__name__
|
||||
return val
|
||||
|
||||
is_list = False
|
||||
if not hasattr(value, 'items'):
|
||||
if not hasattr(value, "items"):
|
||||
try:
|
||||
is_list = True
|
||||
value = {k: v for k, v in enumerate(value)}
|
||||
@@ -376,48 +421,51 @@ class ComplexBaseField(BaseField):
|
||||
if self.field:
|
||||
value_dict = {
|
||||
key: self.field._to_mongo_safe_call(item, use_db_field, fields)
|
||||
for key, item in value.iteritems()
|
||||
for key, item in value.items()
|
||||
}
|
||||
else:
|
||||
value_dict = {}
|
||||
for k, v in value.iteritems():
|
||||
for k, v in value.items():
|
||||
if isinstance(v, Document):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
if v.pk is None:
|
||||
self.error('You can only reference documents once they'
|
||||
' have been saved to the database')
|
||||
self.error(
|
||||
"You can only reference documents once they"
|
||||
" have been saved to the database"
|
||||
)
|
||||
|
||||
# If its a document that is not inheritable it won't have
|
||||
# any _cls data so make it a generic reference allows
|
||||
# us to dereference
|
||||
meta = getattr(v, '_meta', {})
|
||||
allow_inheritance = meta.get('allow_inheritance')
|
||||
meta = getattr(v, "_meta", {})
|
||||
allow_inheritance = meta.get("allow_inheritance")
|
||||
if not allow_inheritance and not self.field:
|
||||
value_dict[k] = GenericReferenceField().to_mongo(v)
|
||||
else:
|
||||
collection = v._get_collection_name()
|
||||
value_dict[k] = DBRef(collection, v.pk)
|
||||
elif hasattr(v, 'to_mongo'):
|
||||
elif hasattr(v, "to_mongo"):
|
||||
cls = v.__class__
|
||||
val = v.to_mongo(use_db_field, fields)
|
||||
# If it's a document that is not inherited add _cls
|
||||
if isinstance(v, (Document, EmbeddedDocument)):
|
||||
val['_cls'] = cls.__name__
|
||||
val["_cls"] = cls.__name__
|
||||
value_dict[k] = val
|
||||
else:
|
||||
value_dict[k] = self.to_mongo(v, use_db_field, fields)
|
||||
|
||||
if is_list: # Convert back to a list
|
||||
return [v for _, v in sorted(value_dict.items(),
|
||||
key=operator.itemgetter(0))]
|
||||
return [
|
||||
v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0))
|
||||
]
|
||||
return value_dict
|
||||
|
||||
def validate(self, value):
|
||||
"""If field is provided ensure the value is valid."""
|
||||
errors = {}
|
||||
if self.field:
|
||||
if hasattr(value, 'iteritems') or hasattr(value, 'items'):
|
||||
sequence = value.iteritems()
|
||||
if hasattr(value, "items"):
|
||||
sequence = value.items()
|
||||
else:
|
||||
sequence = enumerate(value)
|
||||
for k, v in sequence:
|
||||
@@ -430,11 +478,10 @@ class ComplexBaseField(BaseField):
|
||||
|
||||
if errors:
|
||||
field_class = self.field.__class__.__name__
|
||||
self.error('Invalid %s item (%s)' % (field_class, value),
|
||||
errors=errors)
|
||||
self.error(f"Invalid {field_class} item ({value})", errors=errors)
|
||||
# Don't allow empty values if required
|
||||
if self.required and not value:
|
||||
self.error('Field is required and cannot be empty')
|
||||
self.error("Field is required and cannot be empty")
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
return self.to_mongo(value)
|
||||
@@ -464,10 +511,9 @@ class ObjectIdField(BaseField):
|
||||
def to_mongo(self, value):
|
||||
if not isinstance(value, ObjectId):
|
||||
try:
|
||||
return ObjectId(six.text_type(value))
|
||||
return ObjectId(str(value))
|
||||
except Exception as e:
|
||||
# e.message attribute has been deprecated since Python 2.6
|
||||
self.error(six.text_type(e))
|
||||
self.error(str(e))
|
||||
return value
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
@@ -475,84 +521,82 @@ class ObjectIdField(BaseField):
|
||||
|
||||
def validate(self, value):
|
||||
try:
|
||||
ObjectId(six.text_type(value))
|
||||
ObjectId(str(value))
|
||||
except Exception:
|
||||
self.error('Invalid Object ID')
|
||||
self.error("Invalid ObjectID")
|
||||
|
||||
|
||||
class GeoJsonBaseField(BaseField):
|
||||
"""A geo json field storing a geojson style object.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
"""A geo json field storing a geojson style object."""
|
||||
|
||||
_geo_index = pymongo.GEOSPHERE
|
||||
_type = 'GeoBase'
|
||||
_type = "GeoBase"
|
||||
|
||||
def __init__(self, auto_index=True, *args, **kwargs):
|
||||
"""
|
||||
:param bool auto_index: Automatically create a '2dsphere' index.\
|
||||
Defaults to `True`.
|
||||
"""
|
||||
self._name = '%sField' % self._type
|
||||
self._name = "%sField" % self._type
|
||||
if not auto_index:
|
||||
self._geo_index = False
|
||||
super(GeoJsonBaseField, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def validate(self, value):
|
||||
"""Validate the GeoJson object based on its type."""
|
||||
if isinstance(value, dict):
|
||||
if set(value.keys()) == set(['type', 'coordinates']):
|
||||
if value['type'] != self._type:
|
||||
self.error('%s type must be "%s"' %
|
||||
(self._name, self._type))
|
||||
return self.validate(value['coordinates'])
|
||||
if set(value.keys()) == {"type", "coordinates"}:
|
||||
if value["type"] != self._type:
|
||||
self.error(f'{self._name} type must be "{self._type}"')
|
||||
return self.validate(value["coordinates"])
|
||||
else:
|
||||
self.error('%s can only accept a valid GeoJson dictionary'
|
||||
' or lists of (x, y)' % self._name)
|
||||
self.error(
|
||||
"%s can only accept a valid GeoJson dictionary"
|
||||
" or lists of (x, y)" % self._name
|
||||
)
|
||||
return
|
||||
elif not isinstance(value, (list, tuple)):
|
||||
self.error('%s can only accept lists of [x, y]' % self._name)
|
||||
self.error("%s can only accept lists of [x, y]" % self._name)
|
||||
return
|
||||
|
||||
validate = getattr(self, '_validate_%s' % self._type.lower())
|
||||
validate = getattr(self, "_validate_%s" % self._type.lower())
|
||||
error = validate(value)
|
||||
if error:
|
||||
self.error(error)
|
||||
|
||||
def _validate_polygon(self, value, top_level=True):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'Polygons must contain list of linestrings'
|
||||
return "Polygons must contain list of linestrings"
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0]
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid Polygon must contain at least one valid linestring'
|
||||
return "Invalid Polygon must contain at least one valid linestring"
|
||||
|
||||
errors = []
|
||||
for val in value:
|
||||
error = self._validate_linestring(val, False)
|
||||
if not error and val[0] != val[-1]:
|
||||
error = 'LineStrings must start and end at the same point'
|
||||
error = "LineStrings must start and end at the same point"
|
||||
if error and error not in errors:
|
||||
errors.append(error)
|
||||
if errors:
|
||||
if top_level:
|
||||
return 'Invalid Polygon:\n%s' % ', '.join(errors)
|
||||
return "Invalid Polygon:\n%s" % ", ".join(errors)
|
||||
else:
|
||||
return '%s' % ', '.join(errors)
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_linestring(self, value, top_level=True):
|
||||
"""Validate a linestring."""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'LineStrings must contain list of coordinate pairs'
|
||||
return "LineStrings must contain list of coordinate pairs"
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0]
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid LineString must contain at least one valid point'
|
||||
return "Invalid LineString must contain at least one valid point"
|
||||
|
||||
errors = []
|
||||
for val in value:
|
||||
@@ -561,29 +605,30 @@ class GeoJsonBaseField(BaseField):
|
||||
errors.append(error)
|
||||
if errors:
|
||||
if top_level:
|
||||
return 'Invalid LineString:\n%s' % ', '.join(errors)
|
||||
return "Invalid LineString:\n%s" % ", ".join(errors)
|
||||
else:
|
||||
return '%s' % ', '.join(errors)
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_point(self, value):
|
||||
"""Validate each set of coords"""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'Points must be a list of coordinate pairs'
|
||||
return "Points must be a list of coordinate pairs"
|
||||
elif not len(value) == 2:
|
||||
return 'Value (%s) must be a two-dimensional point' % repr(value)
|
||||
elif (not isinstance(value[0], (float, int)) or
|
||||
not isinstance(value[1], (float, int))):
|
||||
return 'Both values (%s) in point must be float or int' % repr(value)
|
||||
return "Value (%s) must be a two-dimensional point" % repr(value)
|
||||
elif not isinstance(value[0], (float, int)) or not isinstance(
|
||||
value[1], (float, int)
|
||||
):
|
||||
return "Both values (%s) in point must be float or int" % repr(value)
|
||||
|
||||
def _validate_multipoint(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'MultiPoint must be a list of Point'
|
||||
return "MultiPoint must be a list of Point"
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0]
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid MultiPoint must contain at least one valid point'
|
||||
return "Invalid MultiPoint must contain at least one valid point"
|
||||
|
||||
errors = []
|
||||
for point in value:
|
||||
@@ -592,17 +637,17 @@ class GeoJsonBaseField(BaseField):
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
return '%s' % ', '.join(errors)
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_multilinestring(self, value, top_level=True):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'MultiLineString must be a list of LineString'
|
||||
return "MultiLineString must be a list of LineString"
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0]
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid MultiLineString must contain at least one valid linestring'
|
||||
return "Invalid MultiLineString must contain at least one valid linestring"
|
||||
|
||||
errors = []
|
||||
for linestring in value:
|
||||
@@ -612,19 +657,19 @@ class GeoJsonBaseField(BaseField):
|
||||
|
||||
if errors:
|
||||
if top_level:
|
||||
return 'Invalid MultiLineString:\n%s' % ', '.join(errors)
|
||||
return "Invalid MultiLineString:\n%s" % ", ".join(errors)
|
||||
else:
|
||||
return '%s' % ', '.join(errors)
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_multipolygon(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'MultiPolygon must be a list of Polygon'
|
||||
return "MultiPolygon must be a list of Polygon"
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0][0]
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid MultiPolygon must contain at least one valid Polygon'
|
||||
return "Invalid MultiPolygon must contain at least one valid Polygon"
|
||||
|
||||
errors = []
|
||||
for polygon in value:
|
||||
@@ -633,9 +678,9 @@ class GeoJsonBaseField(BaseField):
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
return 'Invalid MultiPolygon:\n%s' % ', '.join(errors)
|
||||
return "Invalid MultiPolygon:\n%s" % ", ".join(errors)
|
||||
|
||||
def to_mongo(self, value):
|
||||
if isinstance(value, dict):
|
||||
return value
|
||||
return SON([('type', self._type), ('coordinates', value)])
|
||||
return SON([("type", self._type), ("coordinates", value)])
|
||||
|
||||
@@ -1,68 +1,75 @@
|
||||
import itertools
|
||||
import warnings
|
||||
|
||||
import six
|
||||
|
||||
from mongoengine.base.common import _document_registry
|
||||
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
|
||||
from mongoengine.base.fields import (
|
||||
BaseField,
|
||||
ComplexBaseField,
|
||||
ObjectIdField,
|
||||
)
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import InvalidDocumentError
|
||||
from mongoengine.queryset import (DO_NOTHING, DoesNotExist,
|
||||
from mongoengine.queryset import (
|
||||
DO_NOTHING,
|
||||
DoesNotExist,
|
||||
MultipleObjectsReturned,
|
||||
QuerySetManager)
|
||||
QuerySetManager,
|
||||
)
|
||||
|
||||
|
||||
__all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass')
|
||||
__all__ = ("DocumentMetaclass", "TopLevelDocumentMetaclass")
|
||||
|
||||
|
||||
class DocumentMetaclass(type):
|
||||
"""Metaclass for all documents."""
|
||||
|
||||
# TODO lower complexity of this method
|
||||
def __new__(cls, name, bases, attrs):
|
||||
flattened_bases = cls._get_bases(bases)
|
||||
super_new = super(DocumentMetaclass, cls).__new__
|
||||
def __new__(mcs, name, bases, attrs):
|
||||
flattened_bases = mcs._get_bases(bases)
|
||||
super_new = super().__new__
|
||||
|
||||
# If a base class just call super
|
||||
metaclass = attrs.get('my_metaclass')
|
||||
metaclass = attrs.get("my_metaclass")
|
||||
if metaclass and issubclass(metaclass, DocumentMetaclass):
|
||||
return super_new(cls, name, bases, attrs)
|
||||
return super_new(mcs, name, bases, attrs)
|
||||
|
||||
attrs['_is_document'] = attrs.get('_is_document', False)
|
||||
attrs['_cached_reference_fields'] = []
|
||||
attrs["_is_document"] = attrs.get("_is_document", False)
|
||||
attrs["_cached_reference_fields"] = []
|
||||
|
||||
# EmbeddedDocuments could have meta data for inheritance
|
||||
if 'meta' in attrs:
|
||||
attrs['_meta'] = attrs.pop('meta')
|
||||
if "meta" in attrs:
|
||||
attrs["_meta"] = attrs.pop("meta")
|
||||
|
||||
# EmbeddedDocuments should inherit meta data
|
||||
if '_meta' not in attrs:
|
||||
if "_meta" not in attrs:
|
||||
meta = MetaDict()
|
||||
for base in flattened_bases[::-1]:
|
||||
# Add any mixin metadata from plain objects
|
||||
if hasattr(base, 'meta'):
|
||||
if hasattr(base, "meta"):
|
||||
meta.merge(base.meta)
|
||||
elif hasattr(base, '_meta'):
|
||||
elif hasattr(base, "_meta"):
|
||||
meta.merge(base._meta)
|
||||
attrs['_meta'] = meta
|
||||
attrs['_meta']['abstract'] = False # 789: EmbeddedDocument shouldn't inherit abstract
|
||||
attrs["_meta"] = meta
|
||||
attrs["_meta"][
|
||||
"abstract"
|
||||
] = False # 789: EmbeddedDocument shouldn't inherit abstract
|
||||
|
||||
# If allow_inheritance is True, add a "_cls" string field to the attrs
|
||||
if attrs['_meta'].get('allow_inheritance'):
|
||||
StringField = _import_class('StringField')
|
||||
attrs['_cls'] = StringField()
|
||||
if attrs["_meta"].get("allow_inheritance"):
|
||||
StringField = _import_class("StringField")
|
||||
attrs["_cls"] = StringField()
|
||||
|
||||
# Handle document Fields
|
||||
|
||||
# Merge all fields from subclasses
|
||||
doc_fields = {}
|
||||
for base in flattened_bases[::-1]:
|
||||
if hasattr(base, '_fields'):
|
||||
if hasattr(base, "_fields"):
|
||||
doc_fields.update(base._fields)
|
||||
|
||||
# Standard object mixin - merge in any Fields
|
||||
if not hasattr(base, '_meta'):
|
||||
if not hasattr(base, "_meta"):
|
||||
base_fields = {}
|
||||
for attr_name, attr_value in base.__dict__.iteritems():
|
||||
for attr_name, attr_value in base.__dict__.items():
|
||||
if not isinstance(attr_value, BaseField):
|
||||
continue
|
||||
attr_value.name = attr_name
|
||||
@@ -74,7 +81,7 @@ class DocumentMetaclass(type):
|
||||
|
||||
# Discover any document fields
|
||||
field_names = {}
|
||||
for attr_name, attr_value in attrs.iteritems():
|
||||
for attr_name, attr_value in attrs.items():
|
||||
if not isinstance(attr_value, BaseField):
|
||||
continue
|
||||
attr_value.name = attr_name
|
||||
@@ -83,27 +90,29 @@ class DocumentMetaclass(type):
|
||||
doc_fields[attr_name] = attr_value
|
||||
|
||||
# Count names to ensure no db_field redefinitions
|
||||
field_names[attr_value.db_field] = field_names.get(
|
||||
attr_value.db_field, 0) + 1
|
||||
field_names[attr_value.db_field] = (
|
||||
field_names.get(attr_value.db_field, 0) + 1
|
||||
)
|
||||
|
||||
# Ensure no duplicate db_fields
|
||||
duplicate_db_fields = [k for k, v in field_names.items() if v > 1]
|
||||
if duplicate_db_fields:
|
||||
msg = ('Multiple db_fields defined for: %s ' %
|
||||
', '.join(duplicate_db_fields))
|
||||
msg = "Multiple db_fields defined for: %s " % ", ".join(duplicate_db_fields)
|
||||
raise InvalidDocumentError(msg)
|
||||
|
||||
# Set _fields and db_field maps
|
||||
attrs['_fields'] = doc_fields
|
||||
attrs['_db_field_map'] = {k: getattr(v, 'db_field', k)
|
||||
for k, v in doc_fields.items()}
|
||||
attrs['_reverse_db_field_map'] = {
|
||||
v: k for k, v in attrs['_db_field_map'].items()
|
||||
attrs["_fields"] = doc_fields
|
||||
attrs["_db_field_map"] = {
|
||||
k: getattr(v, "db_field", k) for k, v in doc_fields.items()
|
||||
}
|
||||
attrs["_reverse_db_field_map"] = {
|
||||
v: k for k, v in attrs["_db_field_map"].items()
|
||||
}
|
||||
|
||||
attrs['_fields_ordered'] = tuple(i[1] for i in sorted(
|
||||
(v.creation_counter, v.name)
|
||||
for v in doc_fields.itervalues()))
|
||||
attrs["_fields_ordered"] = tuple(
|
||||
i[1]
|
||||
for i in sorted((v.creation_counter, v.name) for v in doc_fields.values())
|
||||
)
|
||||
|
||||
#
|
||||
# Set document hierarchy
|
||||
@@ -111,34 +120,37 @@ class DocumentMetaclass(type):
|
||||
superclasses = ()
|
||||
class_name = [name]
|
||||
for base in flattened_bases:
|
||||
if (not getattr(base, '_is_base_cls', True) and
|
||||
not getattr(base, '_meta', {}).get('abstract', True)):
|
||||
if not getattr(base, "_is_base_cls", True) and not getattr(
|
||||
base, "_meta", {}
|
||||
).get("abstract", True):
|
||||
# Collate hierarchy for _cls and _subclasses
|
||||
class_name.append(base.__name__)
|
||||
|
||||
if hasattr(base, '_meta'):
|
||||
if hasattr(base, "_meta"):
|
||||
# Warn if allow_inheritance isn't set and prevent
|
||||
# inheritance of classes where inheritance is set to False
|
||||
allow_inheritance = base._meta.get('allow_inheritance')
|
||||
if not allow_inheritance and not base._meta.get('abstract'):
|
||||
raise ValueError('Document %s may not be subclassed' %
|
||||
base.__name__)
|
||||
allow_inheritance = base._meta.get("allow_inheritance")
|
||||
if not allow_inheritance and not base._meta.get("abstract"):
|
||||
raise ValueError(
|
||||
"Document %s may not be subclassed. "
|
||||
'To enable inheritance, use the "allow_inheritance" meta attribute.'
|
||||
% base.__name__
|
||||
)
|
||||
|
||||
# Get superclasses from last base superclass
|
||||
document_bases = [b for b in flattened_bases
|
||||
if hasattr(b, '_class_name')]
|
||||
document_bases = [b for b in flattened_bases if hasattr(b, "_class_name")]
|
||||
if document_bases:
|
||||
superclasses = document_bases[0]._superclasses
|
||||
superclasses += (document_bases[0]._class_name,)
|
||||
|
||||
_cls = '.'.join(reversed(class_name))
|
||||
attrs['_class_name'] = _cls
|
||||
attrs['_superclasses'] = superclasses
|
||||
attrs['_subclasses'] = (_cls, )
|
||||
attrs['_types'] = attrs['_subclasses'] # TODO depreciate _types
|
||||
_cls = ".".join(reversed(class_name))
|
||||
attrs["_class_name"] = _cls
|
||||
attrs["_superclasses"] = superclasses
|
||||
attrs["_subclasses"] = (_cls,)
|
||||
attrs["_types"] = attrs["_subclasses"] # TODO depreciate _types
|
||||
|
||||
# Create the new_class
|
||||
new_class = super_new(cls, name, bases, attrs)
|
||||
new_class = super_new(mcs, name, bases, attrs)
|
||||
|
||||
# Set _subclasses
|
||||
for base in document_bases:
|
||||
@@ -146,8 +158,12 @@ class DocumentMetaclass(type):
|
||||
base._subclasses += (_cls,)
|
||||
base._types = base._subclasses # TODO depreciate _types
|
||||
|
||||
(Document, EmbeddedDocument, DictField,
|
||||
CachedReferenceField) = cls._import_classes()
|
||||
(
|
||||
Document,
|
||||
EmbeddedDocument,
|
||||
DictField,
|
||||
CachedReferenceField,
|
||||
) = mcs._import_classes()
|
||||
|
||||
if issubclass(new_class, Document):
|
||||
new_class._collection = None
|
||||
@@ -155,97 +171,77 @@ class DocumentMetaclass(type):
|
||||
# Add class to the _document_registry
|
||||
_document_registry[new_class._class_name] = new_class
|
||||
|
||||
# In Python 2, User-defined methods objects have special read-only
|
||||
# attributes 'im_func' and 'im_self' which contain the function obj
|
||||
# and class instance object respectively. With Python 3 these special
|
||||
# attributes have been replaced by __func__ and __self__. The Blinker
|
||||
# module continues to use im_func and im_self, so the code below
|
||||
# copies __func__ into im_func and __self__ into im_self for
|
||||
# classmethod objects in Document derived classes.
|
||||
if six.PY3:
|
||||
for val in new_class.__dict__.values():
|
||||
if isinstance(val, classmethod):
|
||||
f = val.__get__(new_class)
|
||||
if hasattr(f, '__func__') and not hasattr(f, 'im_func'):
|
||||
f.__dict__.update({'im_func': getattr(f, '__func__')})
|
||||
if hasattr(f, '__self__') and not hasattr(f, 'im_self'):
|
||||
f.__dict__.update({'im_self': getattr(f, '__self__')})
|
||||
|
||||
# Handle delete rules
|
||||
for field in new_class._fields.itervalues():
|
||||
for field in new_class._fields.values():
|
||||
f = field
|
||||
if f.owner_document is None:
|
||||
f.owner_document = new_class
|
||||
delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING)
|
||||
delete_rule = getattr(f, "reverse_delete_rule", DO_NOTHING)
|
||||
if isinstance(f, CachedReferenceField):
|
||||
|
||||
if issubclass(new_class, EmbeddedDocument):
|
||||
raise InvalidDocumentError('CachedReferenceFields is not '
|
||||
'allowed in EmbeddedDocuments')
|
||||
if not f.document_type:
|
||||
raise InvalidDocumentError(
|
||||
'Document is not available to sync')
|
||||
"CachedReferenceFields is not allowed in EmbeddedDocuments"
|
||||
)
|
||||
|
||||
if f.auto_sync:
|
||||
f.start_listener()
|
||||
|
||||
f.document_type._cached_reference_fields.append(f)
|
||||
|
||||
if isinstance(f, ComplexBaseField) and hasattr(f, 'field'):
|
||||
delete_rule = getattr(f.field,
|
||||
'reverse_delete_rule',
|
||||
DO_NOTHING)
|
||||
if isinstance(f, ComplexBaseField) and hasattr(f, "field"):
|
||||
delete_rule = getattr(f.field, "reverse_delete_rule", DO_NOTHING)
|
||||
if isinstance(f, DictField) and delete_rule != DO_NOTHING:
|
||||
msg = ('Reverse delete rules are not supported '
|
||||
'for %s (field: %s)' %
|
||||
(field.__class__.__name__, field.name))
|
||||
msg = (
|
||||
"Reverse delete rules are not supported "
|
||||
"for %s (field: %s)" % (field.__class__.__name__, field.name)
|
||||
)
|
||||
raise InvalidDocumentError(msg)
|
||||
|
||||
f = field.field
|
||||
|
||||
if delete_rule != DO_NOTHING:
|
||||
if issubclass(new_class, EmbeddedDocument):
|
||||
msg = ('Reverse delete rules are not supported for '
|
||||
'EmbeddedDocuments (field: %s)' % field.name)
|
||||
msg = (
|
||||
"Reverse delete rules are not supported for "
|
||||
"EmbeddedDocuments (field: %s)" % field.name
|
||||
)
|
||||
raise InvalidDocumentError(msg)
|
||||
f.document_type.register_delete_rule(new_class,
|
||||
field.name, delete_rule)
|
||||
f.document_type.register_delete_rule(new_class, field.name, delete_rule)
|
||||
|
||||
if (field.name and hasattr(Document, field.name) and
|
||||
EmbeddedDocument not in new_class.mro()):
|
||||
msg = ('%s is a document method and not a valid '
|
||||
'field name' % field.name)
|
||||
if (
|
||||
field.name
|
||||
and hasattr(Document, field.name)
|
||||
and EmbeddedDocument not in new_class.mro()
|
||||
):
|
||||
msg = "%s is a document method and not a valid field name" % field.name
|
||||
raise InvalidDocumentError(msg)
|
||||
|
||||
return new_class
|
||||
|
||||
def add_to_class(self, name, value):
|
||||
setattr(self, name, value)
|
||||
|
||||
@classmethod
|
||||
def _get_bases(cls, bases):
|
||||
def _get_bases(mcs, bases):
|
||||
if isinstance(bases, BasesTuple):
|
||||
return bases
|
||||
seen = []
|
||||
bases = cls.__get_bases(bases)
|
||||
bases = mcs.__get_bases(bases)
|
||||
unique_bases = (b for b in bases if not (b in seen or seen.append(b)))
|
||||
return BasesTuple(unique_bases)
|
||||
|
||||
@classmethod
|
||||
def __get_bases(cls, bases):
|
||||
def __get_bases(mcs, bases):
|
||||
for base in bases:
|
||||
if base is object:
|
||||
continue
|
||||
yield base
|
||||
for child_base in cls.__get_bases(base.__bases__):
|
||||
yield child_base
|
||||
yield from mcs.__get_bases(base.__bases__)
|
||||
|
||||
@classmethod
|
||||
def _import_classes(cls):
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
DictField = _import_class('DictField')
|
||||
CachedReferenceField = _import_class('CachedReferenceField')
|
||||
def _import_classes(mcs):
|
||||
Document = _import_class("Document")
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
DictField = _import_class("DictField")
|
||||
CachedReferenceField = _import_class("CachedReferenceField")
|
||||
return Document, EmbeddedDocument, DictField, CachedReferenceField
|
||||
|
||||
|
||||
@@ -254,182 +250,201 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
collection in the database.
|
||||
"""
|
||||
|
||||
def __new__(cls, name, bases, attrs):
|
||||
flattened_bases = cls._get_bases(bases)
|
||||
super_new = super(TopLevelDocumentMetaclass, cls).__new__
|
||||
def __new__(mcs, name, bases, attrs):
|
||||
flattened_bases = mcs._get_bases(bases)
|
||||
super_new = super().__new__
|
||||
|
||||
# Set default _meta data if base class, otherwise get user defined meta
|
||||
if attrs.get('my_metaclass') == TopLevelDocumentMetaclass:
|
||||
if attrs.get("my_metaclass") == TopLevelDocumentMetaclass:
|
||||
# defaults
|
||||
attrs['_meta'] = {
|
||||
'abstract': True,
|
||||
'max_documents': None,
|
||||
'max_size': None,
|
||||
'ordering': [], # default ordering applied at runtime
|
||||
'indexes': [], # indexes to be ensured at runtime
|
||||
'id_field': None,
|
||||
'index_background': False,
|
||||
'index_drop_dups': False,
|
||||
'index_opts': None,
|
||||
'delete_rules': None,
|
||||
|
||||
attrs["_meta"] = {
|
||||
"abstract": True,
|
||||
"max_documents": None,
|
||||
"max_size": None,
|
||||
"ordering": [], # default ordering applied at runtime
|
||||
"indexes": [], # indexes to be ensured at runtime
|
||||
"id_field": None,
|
||||
"index_background": False,
|
||||
"index_opts": None,
|
||||
"delete_rules": None,
|
||||
# allow_inheritance can be True, False, and None. True means
|
||||
# "allow inheritance", False means "don't allow inheritance",
|
||||
# None means "do whatever your parent does, or don't allow
|
||||
# inheritance if you're a top-level class".
|
||||
'allow_inheritance': None,
|
||||
"allow_inheritance": None,
|
||||
}
|
||||
attrs['_is_base_cls'] = True
|
||||
attrs['_meta'].update(attrs.get('meta', {}))
|
||||
attrs["_is_base_cls"] = True
|
||||
attrs["_meta"].update(attrs.get("meta", {}))
|
||||
else:
|
||||
attrs['_meta'] = attrs.get('meta', {})
|
||||
attrs["_meta"] = attrs.get("meta", {})
|
||||
# Explicitly set abstract to false unless set
|
||||
attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False)
|
||||
attrs['_is_base_cls'] = False
|
||||
attrs["_meta"]["abstract"] = attrs["_meta"].get("abstract", False)
|
||||
attrs["_is_base_cls"] = False
|
||||
|
||||
# Set flag marking as document class - as opposed to an object mixin
|
||||
attrs['_is_document'] = True
|
||||
attrs["_is_document"] = True
|
||||
|
||||
# Ensure queryset_class is inherited
|
||||
if 'objects' in attrs:
|
||||
manager = attrs['objects']
|
||||
if hasattr(manager, 'queryset_class'):
|
||||
attrs['_meta']['queryset_class'] = manager.queryset_class
|
||||
if "objects" in attrs:
|
||||
manager = attrs["objects"]
|
||||
if hasattr(manager, "queryset_class"):
|
||||
attrs["_meta"]["queryset_class"] = manager.queryset_class
|
||||
|
||||
# Clean up top level meta
|
||||
if 'meta' in attrs:
|
||||
del attrs['meta']
|
||||
if "meta" in attrs:
|
||||
del attrs["meta"]
|
||||
|
||||
# Find the parent document class
|
||||
parent_doc_cls = [b for b in flattened_bases
|
||||
if b.__class__ == TopLevelDocumentMetaclass]
|
||||
parent_doc_cls = [
|
||||
b for b in flattened_bases if b.__class__ == TopLevelDocumentMetaclass
|
||||
]
|
||||
parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0]
|
||||
|
||||
# Prevent classes setting collection different to their parents
|
||||
# If parent wasn't an abstract class
|
||||
if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and
|
||||
not parent_doc_cls._meta.get('abstract', True)):
|
||||
msg = 'Trying to set a collection on a subclass (%s)' % name
|
||||
if (
|
||||
parent_doc_cls
|
||||
and "collection" in attrs.get("_meta", {})
|
||||
and not parent_doc_cls._meta.get("abstract", True)
|
||||
):
|
||||
msg = "Trying to set a collection on a subclass (%s)" % name
|
||||
warnings.warn(msg, SyntaxWarning)
|
||||
del attrs['_meta']['collection']
|
||||
del attrs["_meta"]["collection"]
|
||||
|
||||
# Ensure abstract documents have abstract bases
|
||||
if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
|
||||
if (parent_doc_cls and
|
||||
not parent_doc_cls._meta.get('abstract', False)):
|
||||
msg = 'Abstract document cannot have non-abstract base'
|
||||
if attrs.get("_is_base_cls") or attrs["_meta"].get("abstract"):
|
||||
if parent_doc_cls and not parent_doc_cls._meta.get("abstract", False):
|
||||
msg = "Abstract document cannot have non-abstract base"
|
||||
raise ValueError(msg)
|
||||
return super_new(cls, name, bases, attrs)
|
||||
return super_new(mcs, name, bases, attrs)
|
||||
|
||||
# Merge base class metas.
|
||||
# Uses a special MetaDict that handles various merging rules
|
||||
meta = MetaDict()
|
||||
for base in flattened_bases[::-1]:
|
||||
# Add any mixin metadata from plain objects
|
||||
if hasattr(base, 'meta'):
|
||||
if hasattr(base, "meta"):
|
||||
meta.merge(base.meta)
|
||||
elif hasattr(base, '_meta'):
|
||||
elif hasattr(base, "_meta"):
|
||||
meta.merge(base._meta)
|
||||
|
||||
# Set collection in the meta if its callable
|
||||
if (getattr(base, '_is_document', False) and
|
||||
not base._meta.get('abstract')):
|
||||
collection = meta.get('collection', None)
|
||||
if getattr(base, "_is_document", False) and not base._meta.get("abstract"):
|
||||
collection = meta.get("collection", None)
|
||||
if callable(collection):
|
||||
meta['collection'] = collection(base)
|
||||
meta["collection"] = collection(base)
|
||||
|
||||
meta.merge(attrs.get('_meta', {})) # Top level meta
|
||||
meta.merge(attrs.get("_meta", {})) # Top level meta
|
||||
|
||||
# Only simple classes (i.e. direct subclasses of Document) may set
|
||||
# allow_inheritance to False. If the base Document allows inheritance,
|
||||
# none of its subclasses can override allow_inheritance to False.
|
||||
simple_class = all([b._meta.get('abstract')
|
||||
for b in flattened_bases if hasattr(b, '_meta')])
|
||||
simple_class = all(
|
||||
b._meta.get("abstract") for b in flattened_bases if hasattr(b, "_meta")
|
||||
)
|
||||
if (
|
||||
not simple_class and
|
||||
meta['allow_inheritance'] is False and
|
||||
not meta['abstract']
|
||||
not simple_class
|
||||
and meta["allow_inheritance"] is False
|
||||
and not meta["abstract"]
|
||||
):
|
||||
raise ValueError('Only direct subclasses of Document may set '
|
||||
'"allow_inheritance" to False')
|
||||
raise ValueError(
|
||||
"Only direct subclasses of Document may set "
|
||||
'"allow_inheritance" to False'
|
||||
)
|
||||
|
||||
# Set default collection name
|
||||
if 'collection' not in meta:
|
||||
meta['collection'] = ''.join('_%s' % c if c.isupper() else c
|
||||
for c in name).strip('_').lower()
|
||||
attrs['_meta'] = meta
|
||||
if "collection" not in meta:
|
||||
meta["collection"] = (
|
||||
"".join("_%s" % c if c.isupper() else c for c in name)
|
||||
.strip("_")
|
||||
.lower()
|
||||
)
|
||||
attrs["_meta"] = meta
|
||||
|
||||
# Call super and get the new class
|
||||
new_class = super_new(cls, name, bases, attrs)
|
||||
new_class = super_new(mcs, name, bases, attrs)
|
||||
|
||||
meta = new_class._meta
|
||||
|
||||
# Set index specifications
|
||||
meta['index_specs'] = new_class._build_index_specs(meta['indexes'])
|
||||
meta["index_specs"] = new_class._build_index_specs(meta["indexes"])
|
||||
|
||||
# If collection is a callable - call it and set the value
|
||||
collection = meta.get('collection')
|
||||
collection = meta.get("collection")
|
||||
if callable(collection):
|
||||
new_class._meta['collection'] = collection(new_class)
|
||||
new_class._meta["collection"] = collection(new_class)
|
||||
|
||||
# Provide a default queryset unless exists or one has been set
|
||||
if 'objects' not in dir(new_class):
|
||||
if "objects" not in dir(new_class):
|
||||
new_class.objects = QuerySetManager()
|
||||
|
||||
# Validate the fields and set primary key if needed
|
||||
for field_name, field in new_class._fields.iteritems():
|
||||
for field_name, field in new_class._fields.items():
|
||||
if field.primary_key:
|
||||
# Ensure only one primary key is set
|
||||
current_pk = new_class._meta.get('id_field')
|
||||
current_pk = new_class._meta.get("id_field")
|
||||
if current_pk and current_pk != field_name:
|
||||
raise ValueError('Cannot override primary key field')
|
||||
raise ValueError("Cannot override primary key field")
|
||||
|
||||
# Set primary key
|
||||
if not current_pk:
|
||||
new_class._meta['id_field'] = field_name
|
||||
new_class._meta["id_field"] = field_name
|
||||
new_class.id = field
|
||||
|
||||
# Set primary key if not defined by the document
|
||||
new_class._auto_id_field = getattr(parent_doc_cls,
|
||||
'_auto_id_field', False)
|
||||
if not new_class._meta.get('id_field'):
|
||||
# After 0.10, find not existing names, instead of overwriting
|
||||
id_name, id_db_name = cls.get_auto_id_names(new_class)
|
||||
new_class._auto_id_field = True
|
||||
new_class._meta['id_field'] = id_name
|
||||
# If the document doesn't explicitly define a primary key field, create
|
||||
# one. Make it an ObjectIdField and give it a non-clashing name ("id"
|
||||
# by default, but can be different if that one's taken).
|
||||
if not new_class._meta.get("id_field"):
|
||||
id_name, id_db_name = mcs.get_auto_id_names(new_class)
|
||||
new_class._meta["id_field"] = id_name
|
||||
new_class._fields[id_name] = ObjectIdField(db_field=id_db_name)
|
||||
new_class._fields[id_name].name = id_name
|
||||
new_class.id = new_class._fields[id_name]
|
||||
new_class._db_field_map[id_name] = id_db_name
|
||||
new_class._reverse_db_field_map[id_db_name] = id_name
|
||||
# Prepend id field to _fields_ordered
|
||||
|
||||
# Prepend the ID field to _fields_ordered (so that it's *always*
|
||||
# the first field).
|
||||
new_class._fields_ordered = (id_name,) + new_class._fields_ordered
|
||||
|
||||
# Merge in exceptions with parent hierarchy
|
||||
# Merge in exceptions with parent hierarchy.
|
||||
exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned)
|
||||
module = attrs.get('__module__')
|
||||
module = attrs.get("__module__")
|
||||
for exc in exceptions_to_merge:
|
||||
name = exc.__name__
|
||||
parents = tuple(getattr(base, name) for base in flattened_bases
|
||||
if hasattr(base, name)) or (exc,)
|
||||
# Create new exception and set to new_class
|
||||
exception = type(name, parents, {'__module__': module})
|
||||
parents = tuple(
|
||||
getattr(base, name) for base in flattened_bases if hasattr(base, name)
|
||||
) or (exc,)
|
||||
|
||||
# Create a new exception and set it as an attribute on the new
|
||||
# class.
|
||||
exception = type(name, parents, {"__module__": module})
|
||||
setattr(new_class, name, exception)
|
||||
|
||||
return new_class
|
||||
|
||||
@classmethod
|
||||
def get_auto_id_names(cls, new_class):
|
||||
id_name, id_db_name = ('id', '_id')
|
||||
if id_name not in new_class._fields and \
|
||||
id_db_name not in (v.db_field for v in new_class._fields.values()):
|
||||
def get_auto_id_names(mcs, new_class):
|
||||
"""Find a name for the automatic ID field for the given new class.
|
||||
|
||||
Return a two-element tuple where the first item is the field name (i.e.
|
||||
the attribute name on the object) and the second element is the DB
|
||||
field name (i.e. the name of the key stored in MongoDB).
|
||||
|
||||
Defaults to ('id', '_id'), or generates a non-clashing name in the form
|
||||
of ('auto_id_X', '_auto_id_X') if the default name is already taken.
|
||||
"""
|
||||
id_name, id_db_name = ("id", "_id")
|
||||
existing_fields = {field_name for field_name in new_class._fields}
|
||||
existing_db_fields = {v.db_field for v in new_class._fields.values()}
|
||||
if id_name not in existing_fields and id_db_name not in existing_db_fields:
|
||||
return id_name, id_db_name
|
||||
id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0
|
||||
while id_name in new_class._fields or \
|
||||
id_db_name in (v.db_field for v in new_class._fields.values()):
|
||||
id_name = '{0}_{1}'.format(id_basename, i)
|
||||
id_db_name = '{0}_{1}'.format(id_db_basename, i)
|
||||
i += 1
|
||||
|
||||
id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0)
|
||||
for i in itertools.count():
|
||||
id_name = f"{id_basename}_{i}"
|
||||
id_db_name = f"{id_db_basename}_{i}"
|
||||
if id_name not in existing_fields and id_db_name not in existing_db_fields:
|
||||
return id_name, id_db_name
|
||||
|
||||
|
||||
@@ -437,10 +452,11 @@ class MetaDict(dict):
|
||||
"""Custom dictionary for meta classes.
|
||||
Handles the merging of set indexes
|
||||
"""
|
||||
_merge_options = ('indexes',)
|
||||
|
||||
_merge_options = ("indexes",)
|
||||
|
||||
def merge(self, new_options):
|
||||
for k, v in new_options.iteritems():
|
||||
for k, v in new_options.items():
|
||||
if k in self._merge_options:
|
||||
self[k] = self.get(k, []) + v
|
||||
else:
|
||||
@@ -449,4 +465,5 @@ class MetaDict(dict):
|
||||
|
||||
class BasesTuple(tuple):
|
||||
"""Special class to handle introspection of bases tuple in __new__"""
|
||||
|
||||
pass
|
||||
|
||||
22
mongoengine/base/utils.py
Normal file
22
mongoengine/base/utils.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import re
|
||||
|
||||
|
||||
class LazyRegexCompiler:
|
||||
"""Descriptor to allow lazy compilation of regex"""
|
||||
|
||||
def __init__(self, pattern, flags=0):
|
||||
self._pattern = pattern
|
||||
self._flags = flags
|
||||
self._compiled_regex = None
|
||||
|
||||
@property
|
||||
def compiled_regex(self):
|
||||
if self._compiled_regex is None:
|
||||
self._compiled_regex = re.compile(self._pattern, self._flags)
|
||||
return self._compiled_regex
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
return self.compiled_regex
|
||||
|
||||
def __set__(self, instance, value):
|
||||
raise AttributeError("Can not set attribute LazyRegexCompiler")
|
||||
@@ -19,38 +19,44 @@ def _import_class(cls_name):
|
||||
if cls_name in _class_registry_cache:
|
||||
return _class_registry_cache.get(cls_name)
|
||||
|
||||
doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument',
|
||||
'MapReduceDocument')
|
||||
doc_classes = (
|
||||
"Document",
|
||||
"DynamicEmbeddedDocument",
|
||||
"EmbeddedDocument",
|
||||
"MapReduceDocument",
|
||||
)
|
||||
|
||||
# Field Classes
|
||||
if not _field_list_cache:
|
||||
from mongoengine.fields import __all__ as fields
|
||||
|
||||
_field_list_cache.extend(fields)
|
||||
from mongoengine.base.fields import __all__ as fields
|
||||
|
||||
_field_list_cache.extend(fields)
|
||||
|
||||
field_classes = _field_list_cache
|
||||
|
||||
queryset_classes = ('OperationError',)
|
||||
deref_classes = ('DeReference',)
|
||||
deref_classes = ("DeReference",)
|
||||
|
||||
if cls_name == 'BaseDocument':
|
||||
if cls_name == "BaseDocument":
|
||||
from mongoengine.base import document as module
|
||||
import_classes = ['BaseDocument']
|
||||
|
||||
import_classes = ["BaseDocument"]
|
||||
elif cls_name in doc_classes:
|
||||
from mongoengine import document as module
|
||||
|
||||
import_classes = doc_classes
|
||||
elif cls_name in field_classes:
|
||||
from mongoengine import fields as module
|
||||
|
||||
import_classes = field_classes
|
||||
elif cls_name in queryset_classes:
|
||||
from mongoengine import queryset as module
|
||||
import_classes = queryset_classes
|
||||
elif cls_name in deref_classes:
|
||||
from mongoengine import dereference as module
|
||||
|
||||
import_classes = deref_classes
|
||||
else:
|
||||
raise ValueError('No import set for: ' % cls_name)
|
||||
raise ValueError("No import set for: %s" % cls_name)
|
||||
|
||||
for cls in import_classes:
|
||||
_class_registry_cache[cls] = getattr(module, cls)
|
||||
|
||||
@@ -1,49 +1,68 @@
|
||||
from pymongo import MongoClient, ReadPreference, uri_parser
|
||||
import six
|
||||
from pymongo.database import _check_name
|
||||
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
|
||||
__all__ = ['MongoEngineConnectionError', 'connect', 'register_connection',
|
||||
'DEFAULT_CONNECTION_NAME']
|
||||
__all__ = [
|
||||
"DEFAULT_CONNECTION_NAME",
|
||||
"DEFAULT_DATABASE_NAME",
|
||||
"ConnectionFailure",
|
||||
"connect",
|
||||
"disconnect",
|
||||
"disconnect_all",
|
||||
"get_connection",
|
||||
"get_db",
|
||||
"register_connection",
|
||||
]
|
||||
|
||||
|
||||
DEFAULT_CONNECTION_NAME = 'default'
|
||||
|
||||
if IS_PYMONGO_3:
|
||||
READ_PREFERENCE = ReadPreference.PRIMARY
|
||||
else:
|
||||
from pymongo import MongoReplicaSetClient
|
||||
READ_PREFERENCE = False
|
||||
|
||||
|
||||
class MongoEngineConnectionError(Exception):
|
||||
"""Error raised when the database connection can't be established or
|
||||
when a connection with a requested alias can't be retrieved.
|
||||
"""
|
||||
pass
|
||||
|
||||
DEFAULT_CONNECTION_NAME = "default"
|
||||
DEFAULT_DATABASE_NAME = "test"
|
||||
DEFAULT_HOST = "localhost"
|
||||
DEFAULT_PORT = 27017
|
||||
|
||||
_connection_settings = {}
|
||||
_connections = {}
|
||||
_dbs = {}
|
||||
|
||||
READ_PREFERENCE = ReadPreference.PRIMARY
|
||||
|
||||
def register_connection(alias, db=None, name=None, host=None, port=None,
|
||||
|
||||
class ConnectionFailure(Exception):
|
||||
"""Error raised when the database connection can't be established or
|
||||
when a connection with a requested alias can't be retrieved.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def _check_db_name(name):
|
||||
"""Check if a database name is valid.
|
||||
This functionality is copied from pymongo Database class constructor.
|
||||
"""
|
||||
if not isinstance(name, str):
|
||||
raise TypeError("name must be an instance of %s" % str)
|
||||
elif name != "$external":
|
||||
_check_name(name)
|
||||
|
||||
|
||||
def _get_connection_settings(
|
||||
db=None,
|
||||
name=None,
|
||||
host=None,
|
||||
port=None,
|
||||
read_preference=READ_PREFERENCE,
|
||||
username=None, password=None,
|
||||
username=None,
|
||||
password=None,
|
||||
authentication_source=None,
|
||||
authentication_mechanism=None,
|
||||
**kwargs):
|
||||
"""Add a connection.
|
||||
**kwargs,
|
||||
):
|
||||
"""Get the connection settings as a dict
|
||||
|
||||
:param alias: the name that will be used to refer to this connection
|
||||
throughout MongoEngine
|
||||
:param name: the name of the specific database to use
|
||||
: param db: the name of the database to use, for compatibility with connect
|
||||
: param name: the name of the specific database to use
|
||||
: param host: the host name of the: program: `mongod` instance to connect to
|
||||
: param port: the port that the: program: `mongod` instance is running on
|
||||
: param read_preference: The read preference for the collection
|
||||
** Added pymongo 2.1
|
||||
: param username: username to authenticate with
|
||||
: param password: password to authenticate with
|
||||
: param authentication_source: database to authenticate against
|
||||
@@ -55,75 +74,173 @@ def register_connection(alias, db=None, name=None, host=None, port=None,
|
||||
: param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
||||
for example maxpoolsize, tz_aware, etc. See the documentation
|
||||
for pymongo's `MongoClient` for a full list.
|
||||
|
||||
.. versionchanged:: 0.10.6 - added mongomock support
|
||||
"""
|
||||
conn_settings = {
|
||||
'name': name or db or 'test',
|
||||
'host': host or 'localhost',
|
||||
'port': port or 27017,
|
||||
'read_preference': read_preference,
|
||||
'username': username,
|
||||
'password': password,
|
||||
'authentication_source': authentication_source,
|
||||
'authentication_mechanism': authentication_mechanism
|
||||
"name": name or db or DEFAULT_DATABASE_NAME,
|
||||
"host": host or DEFAULT_HOST,
|
||||
"port": port or DEFAULT_PORT,
|
||||
"read_preference": read_preference,
|
||||
"username": username,
|
||||
"password": password,
|
||||
"authentication_source": authentication_source,
|
||||
"authentication_mechanism": authentication_mechanism,
|
||||
}
|
||||
|
||||
conn_host = conn_settings['host']
|
||||
_check_db_name(conn_settings["name"])
|
||||
conn_host = conn_settings["host"]
|
||||
|
||||
# Host can be a list or a string, so if string, force to a list.
|
||||
if isinstance(conn_host, six.string_types):
|
||||
if isinstance(conn_host, str):
|
||||
conn_host = [conn_host]
|
||||
|
||||
resolved_hosts = []
|
||||
for entity in conn_host:
|
||||
|
||||
# Handle Mongomock
|
||||
if entity.startswith('mongomock://'):
|
||||
conn_settings['is_mock'] = True
|
||||
if entity.startswith("mongomock://"):
|
||||
conn_settings["is_mock"] = True
|
||||
# `mongomock://` is not a valid url prefix and must be replaced by `mongodb://`
|
||||
resolved_hosts.append(entity.replace('mongomock://', 'mongodb://', 1))
|
||||
new_entity = entity.replace("mongomock://", "mongodb://", 1)
|
||||
resolved_hosts.append(new_entity)
|
||||
|
||||
uri_dict = uri_parser.parse_uri(new_entity)
|
||||
|
||||
database = uri_dict.get("database")
|
||||
if database:
|
||||
conn_settings["name"] = database
|
||||
|
||||
# Handle URI style connections, only updating connection params which
|
||||
# were explicitly specified in the URI.
|
||||
elif '://' in entity:
|
||||
elif "://" in entity:
|
||||
uri_dict = uri_parser.parse_uri(entity)
|
||||
resolved_hosts.append(entity)
|
||||
|
||||
if uri_dict.get('database'):
|
||||
conn_settings['name'] = uri_dict.get('database')
|
||||
database = uri_dict.get("database")
|
||||
if database:
|
||||
conn_settings["name"] = database
|
||||
|
||||
for param in ('read_preference', 'username', 'password'):
|
||||
for param in ("read_preference", "username", "password"):
|
||||
if uri_dict.get(param):
|
||||
conn_settings[param] = uri_dict[param]
|
||||
|
||||
uri_options = uri_dict['options']
|
||||
if 'replicaset' in uri_options:
|
||||
conn_settings['replicaSet'] = uri_options['replicaset']
|
||||
if 'authsource' in uri_options:
|
||||
conn_settings['authentication_source'] = uri_options['authsource']
|
||||
if 'authmechanism' in uri_options:
|
||||
conn_settings['authentication_mechanism'] = uri_options['authmechanism']
|
||||
uri_options = uri_dict["options"]
|
||||
if "replicaset" in uri_options:
|
||||
conn_settings["replicaSet"] = uri_options["replicaset"]
|
||||
if "authsource" in uri_options:
|
||||
conn_settings["authentication_source"] = uri_options["authsource"]
|
||||
if "authmechanism" in uri_options:
|
||||
conn_settings["authentication_mechanism"] = uri_options["authmechanism"]
|
||||
if "readpreference" in uri_options:
|
||||
read_preferences = (
|
||||
ReadPreference.NEAREST,
|
||||
ReadPreference.PRIMARY,
|
||||
ReadPreference.PRIMARY_PREFERRED,
|
||||
ReadPreference.SECONDARY,
|
||||
ReadPreference.SECONDARY_PREFERRED,
|
||||
)
|
||||
|
||||
# Starting with PyMongo v3.5, the "readpreference" option is
|
||||
# returned as a string (e.g. "secondaryPreferred") and not an
|
||||
# int (e.g. 3).
|
||||
# TODO simplify the code below once we drop support for
|
||||
# PyMongo v3.4.
|
||||
read_pf_mode = uri_options["readpreference"]
|
||||
if isinstance(read_pf_mode, str):
|
||||
read_pf_mode = read_pf_mode.lower()
|
||||
for preference in read_preferences:
|
||||
if (
|
||||
preference.name.lower() == read_pf_mode
|
||||
or preference.mode == read_pf_mode
|
||||
):
|
||||
conn_settings["read_preference"] = preference
|
||||
break
|
||||
else:
|
||||
resolved_hosts.append(entity)
|
||||
conn_settings['host'] = resolved_hosts
|
||||
conn_settings["host"] = resolved_hosts
|
||||
|
||||
# Deprecated parameters that should not be passed on
|
||||
kwargs.pop('slaves', None)
|
||||
kwargs.pop('is_slave', None)
|
||||
kwargs.pop("slaves", None)
|
||||
kwargs.pop("is_slave", None)
|
||||
|
||||
conn_settings.update(kwargs)
|
||||
return conn_settings
|
||||
|
||||
|
||||
def register_connection(
|
||||
alias,
|
||||
db=None,
|
||||
name=None,
|
||||
host=None,
|
||||
port=None,
|
||||
read_preference=READ_PREFERENCE,
|
||||
username=None,
|
||||
password=None,
|
||||
authentication_source=None,
|
||||
authentication_mechanism=None,
|
||||
**kwargs,
|
||||
):
|
||||
"""Register the connection settings.
|
||||
|
||||
: param alias: the name that will be used to refer to this connection
|
||||
throughout MongoEngine
|
||||
: param db: the name of the database to use, for compatibility with connect
|
||||
: param name: the name of the specific database to use
|
||||
: param host: the host name of the: program: `mongod` instance to connect to
|
||||
: param port: the port that the: program: `mongod` instance is running on
|
||||
: param read_preference: The read preference for the collection
|
||||
: param username: username to authenticate with
|
||||
: param password: password to authenticate with
|
||||
: param authentication_source: database to authenticate against
|
||||
: param authentication_mechanism: database authentication mechanisms.
|
||||
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
|
||||
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
|
||||
: param is_mock: explicitly use mongomock for this connection
|
||||
(can also be done by using `mongomock: // ` as db host prefix)
|
||||
: param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
||||
for example maxpoolsize, tz_aware, etc. See the documentation
|
||||
for pymongo's `MongoClient` for a full list.
|
||||
"""
|
||||
conn_settings = _get_connection_settings(
|
||||
db=db,
|
||||
name=name,
|
||||
host=host,
|
||||
port=port,
|
||||
read_preference=read_preference,
|
||||
username=username,
|
||||
password=password,
|
||||
authentication_source=authentication_source,
|
||||
authentication_mechanism=authentication_mechanism,
|
||||
**kwargs,
|
||||
)
|
||||
_connection_settings[alias] = conn_settings
|
||||
|
||||
|
||||
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
||||
"""Close the connection with a given alias."""
|
||||
from mongoengine import Document
|
||||
from mongoengine.base.common import _get_documents_by_db
|
||||
|
||||
if alias in _connections:
|
||||
get_connection(alias=alias).close()
|
||||
del _connections[alias]
|
||||
|
||||
if alias in _dbs:
|
||||
# Detach all cached collections in Documents
|
||||
for doc_cls in _get_documents_by_db(alias, DEFAULT_CONNECTION_NAME):
|
||||
if issubclass(doc_cls, Document): # Skip EmbeddedDocument
|
||||
doc_cls._disconnect()
|
||||
|
||||
del _dbs[alias]
|
||||
|
||||
if alias in _connection_settings:
|
||||
del _connection_settings[alias]
|
||||
|
||||
|
||||
def disconnect_all():
|
||||
"""Close all registered database."""
|
||||
for alias in list(_connections.keys()):
|
||||
disconnect(alias)
|
||||
|
||||
|
||||
def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
"""Return a connection with a given alias."""
|
||||
@@ -138,85 +255,93 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
return _connections[alias]
|
||||
|
||||
# Validate that the requested alias exists in the _connection_settings.
|
||||
# Raise MongoEngineConnectionError if it doesn't.
|
||||
# Raise ConnectionFailure if it doesn't.
|
||||
if alias not in _connection_settings:
|
||||
if alias == DEFAULT_CONNECTION_NAME:
|
||||
msg = 'You have not defined a default connection'
|
||||
msg = "You have not defined a default connection"
|
||||
else:
|
||||
msg = 'Connection with alias "%s" has not been defined' % alias
|
||||
raise MongoEngineConnectionError(msg)
|
||||
raise ConnectionFailure(msg)
|
||||
|
||||
def _clean_settings(settings_dict):
|
||||
# set literal more efficient than calling set function
|
||||
irrelevant_fields_set = {
|
||||
'name', 'username', 'password',
|
||||
'authentication_source', 'authentication_mechanism'
|
||||
"name",
|
||||
"username",
|
||||
"password",
|
||||
"authentication_source",
|
||||
"authentication_mechanism",
|
||||
}
|
||||
return {
|
||||
k: v for k, v in settings_dict.items()
|
||||
if k not in irrelevant_fields_set
|
||||
k: v for k, v in settings_dict.items() if k not in irrelevant_fields_set
|
||||
}
|
||||
|
||||
raw_conn_settings = _connection_settings[alias].copy()
|
||||
|
||||
# Retrieve a copy of the connection settings associated with the requested
|
||||
# alias and remove the database name and authentication info (we don't
|
||||
# care about them at this point).
|
||||
conn_settings = _clean_settings(_connection_settings[alias].copy())
|
||||
conn_settings = _clean_settings(raw_conn_settings)
|
||||
|
||||
# Determine if we should use PyMongo's or mongomock's MongoClient.
|
||||
is_mock = conn_settings.pop('is_mock', False)
|
||||
is_mock = conn_settings.pop("is_mock", False)
|
||||
if is_mock:
|
||||
try:
|
||||
import mongomock
|
||||
except ImportError:
|
||||
raise RuntimeError('You need mongomock installed to mock '
|
||||
'MongoEngine.')
|
||||
raise RuntimeError("You need mongomock installed to mock MongoEngine.")
|
||||
connection_class = mongomock.MongoClient
|
||||
else:
|
||||
connection_class = MongoClient
|
||||
|
||||
# For replica set connections with PyMongo 2.x, use
|
||||
# MongoReplicaSetClient.
|
||||
# TODO remove this once we stop supporting PyMongo 2.x.
|
||||
if 'replicaSet' in conn_settings and not IS_PYMONGO_3:
|
||||
connection_class = MongoReplicaSetClient
|
||||
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
|
||||
# Re-use existing connection if one is suitable.
|
||||
existing_connection = _find_existing_connection(raw_conn_settings)
|
||||
if existing_connection:
|
||||
connection = existing_connection
|
||||
else:
|
||||
connection = _create_connection(
|
||||
alias=alias, connection_class=connection_class, **conn_settings
|
||||
)
|
||||
_connections[alias] = connection
|
||||
return _connections[alias]
|
||||
|
||||
# hosts_or_uri has to be a string, so if 'host' was provided
|
||||
# as a list, join its parts and separate them by ','
|
||||
if isinstance(conn_settings['hosts_or_uri'], list):
|
||||
conn_settings['hosts_or_uri'] = ','.join(
|
||||
conn_settings['hosts_or_uri'])
|
||||
|
||||
# Discard port since it can't be used on MongoReplicaSetClient
|
||||
conn_settings.pop('port', None)
|
||||
def _create_connection(alias, connection_class, **connection_settings):
|
||||
"""
|
||||
Create the new connection for this alias. Raise
|
||||
ConnectionFailure if it can't be established.
|
||||
"""
|
||||
try:
|
||||
return connection_class(**connection_settings)
|
||||
except Exception as e:
|
||||
raise ConnectionFailure(f"Cannot connect to database {alias} :\n{e}")
|
||||
|
||||
# Iterate over all of the connection settings and if a connection with
|
||||
# the same parameters is already established, use it instead of creating
|
||||
# a new one.
|
||||
existing_connection = None
|
||||
connection_settings_iterator = (
|
||||
|
||||
def _find_existing_connection(connection_settings):
|
||||
"""
|
||||
Check if an existing connection could be reused
|
||||
|
||||
Iterate over all of the connection settings and if an existing connection
|
||||
with the same parameters is suitable, return it
|
||||
|
||||
:param connection_settings: the settings of the new connection
|
||||
:return: An existing connection or None
|
||||
"""
|
||||
connection_settings_bis = (
|
||||
(db_alias, settings.copy())
|
||||
for db_alias, settings in _connection_settings.items()
|
||||
)
|
||||
for db_alias, connection_settings in connection_settings_iterator:
|
||||
connection_settings = _clean_settings(connection_settings)
|
||||
if conn_settings == connection_settings and _connections.get(db_alias):
|
||||
existing_connection = _connections[db_alias]
|
||||
break
|
||||
|
||||
# If an existing connection was found, assign it to the new alias
|
||||
if existing_connection:
|
||||
_connections[alias] = existing_connection
|
||||
else:
|
||||
# Otherwise, create the new connection for this alias. Raise
|
||||
# MongoEngineConnectionError if it can't be established.
|
||||
try:
|
||||
_connections[alias] = connection_class(**conn_settings)
|
||||
except Exception as e:
|
||||
raise MongoEngineConnectionError(
|
||||
'Cannot connect to database %s :\n%s' % (alias, e))
|
||||
def _clean_settings(settings_dict):
|
||||
# Only remove the name but it's important to
|
||||
# keep the username/password/authentication_source/authentication_mechanism
|
||||
# to identify if the connection could be shared (cfr https://github.com/MongoEngine/mongoengine/issues/2047)
|
||||
return {k: v for k, v in settings_dict.items() if k != "name"}
|
||||
|
||||
return _connections[alias]
|
||||
cleaned_conn_settings = _clean_settings(connection_settings)
|
||||
for db_alias, connection_settings in connection_settings_bis:
|
||||
db_conn_settings = _clean_settings(connection_settings)
|
||||
if cleaned_conn_settings == db_conn_settings and _connections.get(db_alias):
|
||||
return _connections[db_alias]
|
||||
|
||||
|
||||
def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
@@ -226,14 +351,18 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
if alias not in _dbs:
|
||||
conn = get_connection(alias)
|
||||
conn_settings = _connection_settings[alias]
|
||||
db = conn[conn_settings['name']]
|
||||
auth_kwargs = {'source': conn_settings['authentication_source']}
|
||||
if conn_settings['authentication_mechanism'] is not None:
|
||||
auth_kwargs['mechanism'] = conn_settings['authentication_mechanism']
|
||||
db = conn[conn_settings["name"]]
|
||||
auth_kwargs = {"source": conn_settings["authentication_source"]}
|
||||
if conn_settings["authentication_mechanism"] is not None:
|
||||
auth_kwargs["mechanism"] = conn_settings["authentication_mechanism"]
|
||||
# Authenticate if necessary
|
||||
if conn_settings['username'] and (conn_settings['password'] or
|
||||
conn_settings['authentication_mechanism'] == 'MONGODB-X509'):
|
||||
db.authenticate(conn_settings['username'], conn_settings['password'], **auth_kwargs)
|
||||
if conn_settings["username"] and (
|
||||
conn_settings["password"]
|
||||
or conn_settings["authentication_mechanism"] == "MONGODB-X509"
|
||||
):
|
||||
db.authenticate(
|
||||
conn_settings["username"], conn_settings["password"], **auth_kwargs
|
||||
)
|
||||
_dbs[alias] = db
|
||||
return _dbs[alias]
|
||||
|
||||
@@ -248,12 +377,23 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||
Multiple databases are supported by using aliases. Provide a separate
|
||||
`alias` to connect to a different instance of: program: `mongod`.
|
||||
|
||||
In order to replace a connection identified by a given alias, you'll
|
||||
need to call ``disconnect`` first
|
||||
|
||||
See the docstring for `register_connection` for more details about all
|
||||
supported kwargs.
|
||||
|
||||
.. versionchanged:: 0.6 - added multiple database support.
|
||||
"""
|
||||
if alias not in _connections:
|
||||
if alias in _connections:
|
||||
prev_conn_setting = _connection_settings[alias]
|
||||
new_conn_settings = _get_connection_settings(db, **kwargs)
|
||||
|
||||
if new_conn_settings != prev_conn_setting:
|
||||
err_msg = (
|
||||
"A different connection with alias `{}` was already "
|
||||
"registered. Use disconnect() first"
|
||||
).format(alias)
|
||||
raise ConnectionFailure(err_msg)
|
||||
else:
|
||||
register_connection(alias, db, **kwargs)
|
||||
|
||||
return get_connection(alias)
|
||||
|
||||
@@ -1,14 +1,24 @@
|
||||
from contextlib import contextmanager
|
||||
|
||||
from pymongo.read_concern import ReadConcern
|
||||
from pymongo.write_concern import WriteConcern
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||
from mongoengine.pymongo_support import count_documents
|
||||
|
||||
__all__ = (
|
||||
"switch_db",
|
||||
"switch_collection",
|
||||
"no_dereference",
|
||||
"no_sub_classes",
|
||||
"query_counter",
|
||||
"set_write_concern",
|
||||
"set_read_write_concern",
|
||||
)
|
||||
|
||||
|
||||
__all__ = ('switch_db', 'switch_collection', 'no_dereference',
|
||||
'no_sub_classes', 'query_counter', 'set_write_concern')
|
||||
|
||||
|
||||
class switch_db(object):
|
||||
class switch_db:
|
||||
"""switch_db alias context manager.
|
||||
|
||||
Example ::
|
||||
@@ -35,21 +45,21 @@ class switch_db(object):
|
||||
self.cls = cls
|
||||
self.collection = cls._get_collection()
|
||||
self.db_alias = db_alias
|
||||
self.ori_db_alias = cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME)
|
||||
self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME)
|
||||
|
||||
def __enter__(self):
|
||||
"""Change the db_alias and clear the cached collection."""
|
||||
self.cls._meta['db_alias'] = self.db_alias
|
||||
self.cls._meta["db_alias"] = self.db_alias
|
||||
self.cls._collection = None
|
||||
return self.cls
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
"""Reset the db_alias and collection."""
|
||||
self.cls._meta['db_alias'] = self.ori_db_alias
|
||||
self.cls._meta["db_alias"] = self.ori_db_alias
|
||||
self.cls._collection = self.collection
|
||||
|
||||
|
||||
class switch_collection(object):
|
||||
class switch_collection:
|
||||
"""switch_collection alias context manager.
|
||||
|
||||
Example ::
|
||||
@@ -91,7 +101,7 @@ class switch_collection(object):
|
||||
self.cls._get_collection_name = self.ori_get_collection_name
|
||||
|
||||
|
||||
class no_dereference(object):
|
||||
class no_dereference:
|
||||
"""no_dereference context manager.
|
||||
|
||||
Turns off all dereferencing in Documents for the duration of the context
|
||||
@@ -108,14 +118,15 @@ class no_dereference(object):
|
||||
"""
|
||||
self.cls = cls
|
||||
|
||||
ReferenceField = _import_class('ReferenceField')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
ComplexBaseField = _import_class('ComplexBaseField')
|
||||
ReferenceField = _import_class("ReferenceField")
|
||||
GenericReferenceField = _import_class("GenericReferenceField")
|
||||
ComplexBaseField = _import_class("ComplexBaseField")
|
||||
|
||||
self.deref_fields = [k for k, v in self.cls._fields.iteritems()
|
||||
if isinstance(v, (ReferenceField,
|
||||
GenericReferenceField,
|
||||
ComplexBaseField))]
|
||||
self.deref_fields = [
|
||||
k
|
||||
for k, v in self.cls._fields.items()
|
||||
if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField))
|
||||
]
|
||||
|
||||
def __enter__(self):
|
||||
"""Change the objects default and _auto_dereference values."""
|
||||
@@ -130,7 +141,7 @@ class no_dereference(object):
|
||||
return self.cls
|
||||
|
||||
|
||||
class no_sub_classes(object):
|
||||
class no_sub_classes:
|
||||
"""no_sub_classes context manager.
|
||||
|
||||
Only returns instances of this class and no sub (inherited) classes::
|
||||
@@ -145,77 +156,99 @@ class no_sub_classes(object):
|
||||
:param cls: the class to turn querying sub classes on
|
||||
"""
|
||||
self.cls = cls
|
||||
self.cls_initial_subclasses = None
|
||||
|
||||
def __enter__(self):
|
||||
"""Change the objects default and _auto_dereference values."""
|
||||
self.cls._all_subclasses = self.cls._subclasses
|
||||
self.cls._subclasses = (self.cls,)
|
||||
self.cls_initial_subclasses = self.cls._subclasses
|
||||
self.cls._subclasses = (self.cls._class_name,)
|
||||
return self.cls
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
"""Reset the default and _auto_dereference values."""
|
||||
self.cls._subclasses = self.cls._all_subclasses
|
||||
delattr(self.cls, '_all_subclasses')
|
||||
return self.cls
|
||||
self.cls._subclasses = self.cls_initial_subclasses
|
||||
|
||||
|
||||
class query_counter(object):
|
||||
"""Query_counter context manager to get the number of queries."""
|
||||
class query_counter:
|
||||
"""Query_counter context manager to get the number of queries.
|
||||
This works by updating the `profiling_level` of the database so that all queries get logged,
|
||||
resetting the db.system.profile collection at the beginning of the context and counting the new entries.
|
||||
|
||||
def __init__(self):
|
||||
"""Construct the query_counter."""
|
||||
self.counter = 0
|
||||
self.db = get_db()
|
||||
This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes
|
||||
can interfere with it
|
||||
|
||||
def __enter__(self):
|
||||
"""On every with block we need to drop the profile collection."""
|
||||
Be aware that:
|
||||
- Iterating over large amount of documents (>101) makes pymongo issue `getmore` queries to fetch the next batch of
|
||||
documents (https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches)
|
||||
- Some queries are ignored by default by the counter (killcursors, db.system.indexes)
|
||||
"""
|
||||
|
||||
def __init__(self, alias=DEFAULT_CONNECTION_NAME):
|
||||
"""Construct the query_counter"""
|
||||
self.db = get_db(alias=alias)
|
||||
self.initial_profiling_level = None
|
||||
self._ctx_query_counter = 0 # number of queries issued by the context
|
||||
|
||||
self._ignored_query = {
|
||||
"ns": {"$ne": "%s.system.indexes" % self.db.name},
|
||||
"op": {"$ne": "killcursors"}, # MONGODB < 3.2
|
||||
"command.killCursors": {"$exists": False}, # MONGODB >= 3.2
|
||||
}
|
||||
|
||||
def _turn_on_profiling(self):
|
||||
self.initial_profiling_level = self.db.profiling_level()
|
||||
self.db.set_profiling_level(0)
|
||||
self.db.system.profile.drop()
|
||||
self.db.set_profiling_level(2)
|
||||
|
||||
def _resets_profiling(self):
|
||||
self.db.set_profiling_level(self.initial_profiling_level)
|
||||
|
||||
def __enter__(self):
|
||||
self._turn_on_profiling()
|
||||
return self
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
"""Reset the profiling level."""
|
||||
self.db.set_profiling_level(0)
|
||||
self._resets_profiling()
|
||||
|
||||
def __eq__(self, value):
|
||||
"""== Compare querycounter."""
|
||||
counter = self._get_count()
|
||||
return value == counter
|
||||
|
||||
def __ne__(self, value):
|
||||
"""!= Compare querycounter."""
|
||||
return not self.__eq__(value)
|
||||
|
||||
def __lt__(self, value):
|
||||
"""< Compare querycounter."""
|
||||
return self._get_count() < value
|
||||
|
||||
def __le__(self, value):
|
||||
"""<= Compare querycounter."""
|
||||
return self._get_count() <= value
|
||||
|
||||
def __gt__(self, value):
|
||||
"""> Compare querycounter."""
|
||||
return self._get_count() > value
|
||||
|
||||
def __ge__(self, value):
|
||||
""">= Compare querycounter."""
|
||||
return self._get_count() >= value
|
||||
|
||||
def __int__(self):
|
||||
"""int representation."""
|
||||
return self._get_count()
|
||||
|
||||
def __repr__(self):
|
||||
"""repr query_counter as the number of queries."""
|
||||
return u"%s" % self._get_count()
|
||||
return "%s" % self._get_count()
|
||||
|
||||
def _get_count(self):
|
||||
"""Get the number of queries."""
|
||||
ignore_query = {'ns': {'$ne': '%s.system.indexes' % self.db.name}}
|
||||
count = self.db.system.profile.find(ignore_query).count() - self.counter
|
||||
self.counter += 1
|
||||
"""Get the number of queries by counting the current number of entries in db.system.profile
|
||||
and substracting the queries issued by this context. In fact everytime this is called, 1 query is
|
||||
issued so we need to balance that
|
||||
"""
|
||||
count = (
|
||||
count_documents(self.db.system.profile, self._ignored_query)
|
||||
- self._ctx_query_counter
|
||||
)
|
||||
self._ctx_query_counter += (
|
||||
1 # Account for the query we just issued to gather the information
|
||||
)
|
||||
return count
|
||||
|
||||
|
||||
@@ -224,3 +257,21 @@ def set_write_concern(collection, write_concerns):
|
||||
combined_concerns = dict(collection.write_concern.document.items())
|
||||
combined_concerns.update(write_concerns)
|
||||
yield collection.with_options(write_concern=WriteConcern(**combined_concerns))
|
||||
|
||||
|
||||
@contextmanager
|
||||
def set_read_write_concern(collection, write_concerns, read_concerns):
|
||||
combined_write_concerns = dict(collection.write_concern.document.items())
|
||||
|
||||
if write_concerns is not None:
|
||||
combined_write_concerns.update(write_concerns)
|
||||
|
||||
combined_read_concerns = dict(collection.read_concern.document.items())
|
||||
|
||||
if read_concerns is not None:
|
||||
combined_read_concerns.update(read_concerns)
|
||||
|
||||
yield collection.with_options(
|
||||
write_concern=WriteConcern(**combined_write_concerns),
|
||||
read_concern=ReadConcern(**combined_read_concerns),
|
||||
)
|
||||
|
||||
@@ -1,16 +1,25 @@
|
||||
from bson import DBRef, SON
|
||||
import six
|
||||
from bson import SON, DBRef
|
||||
|
||||
from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList,
|
||||
TopLevelDocumentMetaclass, get_document)
|
||||
from mongoengine.base import (
|
||||
BaseDict,
|
||||
BaseList,
|
||||
EmbeddedDocumentList,
|
||||
TopLevelDocumentMetaclass,
|
||||
get_document,
|
||||
)
|
||||
from mongoengine.base.datastructures import LazyReference
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.document import Document, EmbeddedDocument
|
||||
from mongoengine.fields import DictField, ListField, MapField, ReferenceField
|
||||
from mongoengine.fields import (
|
||||
DictField,
|
||||
ListField,
|
||||
MapField,
|
||||
ReferenceField,
|
||||
)
|
||||
from mongoengine.queryset import QuerySet
|
||||
|
||||
|
||||
class DeReference(object):
|
||||
class DeReference:
|
||||
def __call__(self, items, max_depth=1, instance=None, name=None):
|
||||
"""
|
||||
Cheaply dereferences the items to a set depth.
|
||||
@@ -24,7 +33,7 @@ class DeReference(object):
|
||||
:class:`~mongoengine.base.ComplexBaseField`
|
||||
:param get: A boolean determining if being called by __get__
|
||||
"""
|
||||
if items is None or isinstance(items, six.string_types):
|
||||
if items is None or isinstance(items, str):
|
||||
return items
|
||||
|
||||
# cheapest way to convert a queryset to a list
|
||||
@@ -35,43 +44,59 @@ class DeReference(object):
|
||||
self.max_depth = max_depth
|
||||
doc_type = None
|
||||
|
||||
if instance and isinstance(instance, (Document, EmbeddedDocument,
|
||||
TopLevelDocumentMetaclass)):
|
||||
if instance and isinstance(
|
||||
instance, (Document, EmbeddedDocument, TopLevelDocumentMetaclass)
|
||||
):
|
||||
doc_type = instance._fields.get(name)
|
||||
while hasattr(doc_type, 'field'):
|
||||
while hasattr(doc_type, "field"):
|
||||
doc_type = doc_type.field
|
||||
|
||||
if isinstance(doc_type, ReferenceField):
|
||||
field = doc_type
|
||||
doc_type = doc_type.document_type
|
||||
is_list = not hasattr(items, 'items')
|
||||
is_list = not hasattr(items, "items")
|
||||
|
||||
if is_list and all([i.__class__ == doc_type for i in items]):
|
||||
if is_list and all(i.__class__ == doc_type for i in items):
|
||||
return items
|
||||
elif not is_list and all(
|
||||
[i.__class__ == doc_type for i in items.values()]):
|
||||
i.__class__ == doc_type for i in items.values()
|
||||
):
|
||||
return items
|
||||
elif not field.dbref:
|
||||
if not hasattr(items, 'items'):
|
||||
# We must turn the ObjectIds into DBRefs
|
||||
|
||||
def _get_items(items):
|
||||
# Recursively dig into the sub items of a list/dict
|
||||
# to turn the ObjectIds into DBRefs
|
||||
def _get_items_from_list(items):
|
||||
new_items = []
|
||||
for v in items:
|
||||
if isinstance(v, list):
|
||||
new_items.append(_get_items(v))
|
||||
value = v
|
||||
if isinstance(v, dict):
|
||||
value = _get_items_from_dict(v)
|
||||
elif isinstance(v, list):
|
||||
value = _get_items_from_list(v)
|
||||
elif not isinstance(v, (DBRef, Document)):
|
||||
new_items.append(field.to_python(v))
|
||||
else:
|
||||
new_items.append(v)
|
||||
value = field.to_python(v)
|
||||
new_items.append(value)
|
||||
return new_items
|
||||
|
||||
items = _get_items(items)
|
||||
def _get_items_from_dict(items):
|
||||
new_items = {}
|
||||
for k, v in items.items():
|
||||
value = v
|
||||
if isinstance(v, list):
|
||||
value = _get_items_from_list(v)
|
||||
elif isinstance(v, dict):
|
||||
value = _get_items_from_dict(v)
|
||||
elif not isinstance(v, (DBRef, Document)):
|
||||
value = field.to_python(v)
|
||||
new_items[k] = value
|
||||
return new_items
|
||||
|
||||
if not hasattr(items, "items"):
|
||||
items = _get_items_from_list(items)
|
||||
else:
|
||||
items = {
|
||||
k: (v if isinstance(v, (DBRef, Document))
|
||||
else field.to_python(v))
|
||||
for k, v in items.iteritems()
|
||||
}
|
||||
items = _get_items_from_dict(items)
|
||||
|
||||
self.reference_map = self._find_references(items)
|
||||
self.object_map = self._fetch_objects(doc_type=doc_type)
|
||||
@@ -98,20 +123,26 @@ class DeReference(object):
|
||||
depth += 1
|
||||
for item in iterator:
|
||||
if isinstance(item, (Document, EmbeddedDocument)):
|
||||
for field_name, field in item._fields.iteritems():
|
||||
for field_name, field in item._fields.items():
|
||||
v = item._data.get(field_name, None)
|
||||
if isinstance(v, LazyReference):
|
||||
# LazyReference inherits DBRef but should not be dereferenced here !
|
||||
continue
|
||||
elif isinstance(v, DBRef):
|
||||
reference_map.setdefault(field.document_type, set()).add(v.id)
|
||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||
reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id)
|
||||
elif isinstance(v, (dict, SON)) and "_ref" in v:
|
||||
reference_map.setdefault(get_document(v["_cls"]), set()).add(
|
||||
v["_ref"].id
|
||||
)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
|
||||
field_cls = getattr(
|
||||
getattr(field, "field", None), "document_type", None
|
||||
)
|
||||
references = self._find_references(v, depth)
|
||||
for key, refs in references.iteritems():
|
||||
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
||||
for key, refs in references.items():
|
||||
if isinstance(
|
||||
field_cls, (Document, TopLevelDocumentMetaclass)
|
||||
):
|
||||
key = field_cls
|
||||
reference_map.setdefault(key, set()).update(refs)
|
||||
elif isinstance(item, LazyReference):
|
||||
@@ -119,48 +150,58 @@ class DeReference(object):
|
||||
continue
|
||||
elif isinstance(item, DBRef):
|
||||
reference_map.setdefault(item.collection, set()).add(item.id)
|
||||
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
||||
reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id)
|
||||
elif isinstance(item, (dict, SON)) and "_ref" in item:
|
||||
reference_map.setdefault(get_document(item["_cls"]), set()).add(
|
||||
item["_ref"].id
|
||||
)
|
||||
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
||||
references = self._find_references(item, depth - 1)
|
||||
for key, refs in references.iteritems():
|
||||
for key, refs in references.items():
|
||||
reference_map.setdefault(key, set()).update(refs)
|
||||
|
||||
return reference_map
|
||||
|
||||
def _fetch_objects(self, doc_type=None):
|
||||
"""Fetch all references and convert to their document objects
|
||||
"""
|
||||
"""Fetch all references and convert to their document objects"""
|
||||
object_map = {}
|
||||
for collection, dbrefs in self.reference_map.iteritems():
|
||||
if hasattr(collection, 'objects'): # We have a document class for the refs
|
||||
for collection, dbrefs in self.reference_map.items():
|
||||
|
||||
# we use getattr instead of hasattr because hasattr swallows any exception under python2
|
||||
# so it could hide nasty things without raising exceptions (cfr bug #1688))
|
||||
ref_document_cls_exists = getattr(collection, "objects", None) is not None
|
||||
|
||||
if ref_document_cls_exists:
|
||||
col_name = collection._get_collection_name()
|
||||
refs = [dbref for dbref in dbrefs
|
||||
if (col_name, dbref) not in object_map]
|
||||
refs = [
|
||||
dbref for dbref in dbrefs if (col_name, dbref) not in object_map
|
||||
]
|
||||
references = collection.objects.in_bulk(refs)
|
||||
for key, doc in references.iteritems():
|
||||
for key, doc in references.items():
|
||||
object_map[(col_name, key)] = doc
|
||||
else: # Generic reference: use the refs data to convert to document
|
||||
if isinstance(doc_type, (ListField, DictField, MapField,)):
|
||||
if isinstance(doc_type, (ListField, DictField, MapField)):
|
||||
continue
|
||||
|
||||
refs = [dbref for dbref in dbrefs
|
||||
if (collection, dbref) not in object_map]
|
||||
refs = [
|
||||
dbref for dbref in dbrefs if (collection, dbref) not in object_map
|
||||
]
|
||||
|
||||
if doc_type:
|
||||
references = doc_type._get_db()[collection].find({'_id': {'$in': refs}})
|
||||
references = doc_type._get_db()[collection].find(
|
||||
{"_id": {"$in": refs}}
|
||||
)
|
||||
for ref in references:
|
||||
doc = doc_type._from_son(ref)
|
||||
object_map[(collection, doc.id)] = doc
|
||||
else:
|
||||
references = get_db()[collection].find({'_id': {'$in': refs}})
|
||||
references = get_db()[collection].find({"_id": {"$in": refs}})
|
||||
for ref in references:
|
||||
if '_cls' in ref:
|
||||
doc = get_document(ref['_cls'])._from_son(ref)
|
||||
if "_cls" in ref:
|
||||
doc = get_document(ref["_cls"])._from_son(ref)
|
||||
elif doc_type is None:
|
||||
doc = get_document(
|
||||
''.join(x.capitalize()
|
||||
for x in collection.split('_')))._from_son(ref)
|
||||
"".join(x.capitalize() for x in collection.split("_"))
|
||||
)._from_son(ref)
|
||||
else:
|
||||
doc = doc_type._from_son(ref)
|
||||
object_map[(collection, doc.id)] = doc
|
||||
@@ -188,19 +229,20 @@ class DeReference(object):
|
||||
return BaseList(items, instance, name)
|
||||
|
||||
if isinstance(items, (dict, SON)):
|
||||
if '_ref' in items:
|
||||
if "_ref" in items:
|
||||
return self.object_map.get(
|
||||
(items['_ref'].collection, items['_ref'].id), items)
|
||||
elif '_cls' in items:
|
||||
doc = get_document(items['_cls'])._from_son(items)
|
||||
_cls = doc._data.pop('_cls', None)
|
||||
del items['_cls']
|
||||
(items["_ref"].collection, items["_ref"].id), items
|
||||
)
|
||||
elif "_cls" in items:
|
||||
doc = get_document(items["_cls"])._from_son(items)
|
||||
_cls = doc._data.pop("_cls", None)
|
||||
del items["_cls"]
|
||||
doc._data = self._attach_objects(doc._data, depth, doc, None)
|
||||
if _cls is not None:
|
||||
doc._data['_cls'] = _cls
|
||||
doc._data["_cls"] = _cls
|
||||
return doc
|
||||
|
||||
if not hasattr(items, 'items'):
|
||||
if not hasattr(items, "items"):
|
||||
is_list = True
|
||||
list_type = BaseList
|
||||
if isinstance(items, EmbeddedDocumentList):
|
||||
@@ -210,7 +252,7 @@ class DeReference(object):
|
||||
data = []
|
||||
else:
|
||||
is_list = False
|
||||
iterator = items.iteritems()
|
||||
iterator = items.items()
|
||||
data = {}
|
||||
|
||||
depth += 1
|
||||
@@ -227,17 +269,23 @@ class DeReference(object):
|
||||
v = data[k]._data.get(field_name, None)
|
||||
if isinstance(v, DBRef):
|
||||
data[k]._data[field_name] = self.object_map.get(
|
||||
(v.collection, v.id), v)
|
||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||
(v.collection, v.id), v
|
||||
)
|
||||
elif isinstance(v, (dict, SON)) and "_ref" in v:
|
||||
data[k]._data[field_name] = self.object_map.get(
|
||||
(v['_ref'].collection, v['_ref'].id), v)
|
||||
(v["_ref"].collection, v["_ref"].id), v
|
||||
)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
item_name = six.text_type('{0}.{1}.{2}').format(name, k, field_name)
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name)
|
||||
item_name = f"{name}.{k}.{field_name}"
|
||||
data[k]._data[field_name] = self._attach_objects(
|
||||
v, depth, instance=instance, name=item_name
|
||||
)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
item_name = '%s.%s' % (name, k) if name else name
|
||||
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name)
|
||||
elif isinstance(v, DBRef) and hasattr(v, 'id'):
|
||||
item_name = f"{name}.{k}" if name else name
|
||||
data[k] = self._attach_objects(
|
||||
v, depth - 1, instance=instance, name=item_name
|
||||
)
|
||||
elif isinstance(v, DBRef) and hasattr(v, "id"):
|
||||
data[k] = self.object_map.get((v.collection, v.id), v)
|
||||
|
||||
if instance and name:
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,20 @@
|
||||
from collections import defaultdict
|
||||
|
||||
import six
|
||||
|
||||
__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError',
|
||||
'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError',
|
||||
'OperationError', 'NotUniqueError', 'FieldDoesNotExist',
|
||||
'ValidationError', 'SaveConditionError')
|
||||
__all__ = (
|
||||
"NotRegistered",
|
||||
"InvalidDocumentError",
|
||||
"LookUpError",
|
||||
"DoesNotExist",
|
||||
"MultipleObjectsReturned",
|
||||
"InvalidQueryError",
|
||||
"OperationError",
|
||||
"NotUniqueError",
|
||||
"BulkWriteError",
|
||||
"FieldDoesNotExist",
|
||||
"ValidationError",
|
||||
"SaveConditionError",
|
||||
"DeprecatedError",
|
||||
)
|
||||
|
||||
|
||||
class NotRegistered(Exception):
|
||||
@@ -40,6 +49,10 @@ class NotUniqueError(OperationError):
|
||||
pass
|
||||
|
||||
|
||||
class BulkWriteError(OperationError):
|
||||
pass
|
||||
|
||||
|
||||
class SaveConditionError(OperationError):
|
||||
pass
|
||||
|
||||
@@ -70,24 +83,25 @@ class ValidationError(AssertionError):
|
||||
field_name = None
|
||||
_message = None
|
||||
|
||||
def __init__(self, message='', **kwargs):
|
||||
self.errors = kwargs.get('errors', {})
|
||||
self.field_name = kwargs.get('field_name')
|
||||
def __init__(self, message="", **kwargs):
|
||||
super().__init__(message)
|
||||
self.errors = kwargs.get("errors", {})
|
||||
self.field_name = kwargs.get("field_name")
|
||||
self.message = message
|
||||
|
||||
def __str__(self):
|
||||
return six.text_type(self.message)
|
||||
return str(self.message)
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%s,)' % (self.__class__.__name__, self.message)
|
||||
return f"{self.__class__.__name__}({self.message},)"
|
||||
|
||||
def __getattribute__(self, name):
|
||||
message = super(ValidationError, self).__getattribute__(name)
|
||||
if name == 'message':
|
||||
message = super().__getattribute__(name)
|
||||
if name == "message":
|
||||
if self.field_name:
|
||||
message = '%s' % message
|
||||
message = "%s" % message
|
||||
if self.errors:
|
||||
message = '%s(%s)' % (message, self._format_errors())
|
||||
message = f"{message}({self._format_errors()})"
|
||||
return message
|
||||
|
||||
def _get_message(self):
|
||||
@@ -108,16 +122,13 @@ class ValidationError(AssertionError):
|
||||
|
||||
def build_dict(source):
|
||||
errors_dict = {}
|
||||
if not source:
|
||||
return errors_dict
|
||||
|
||||
if isinstance(source, dict):
|
||||
for field_name, error in source.iteritems():
|
||||
for field_name, error in source.items():
|
||||
errors_dict[field_name] = build_dict(error)
|
||||
elif isinstance(source, ValidationError) and source.errors:
|
||||
return build_dict(source.errors)
|
||||
else:
|
||||
return six.text_type(source)
|
||||
return str(source)
|
||||
|
||||
return errors_dict
|
||||
|
||||
@@ -129,17 +140,22 @@ class ValidationError(AssertionError):
|
||||
def _format_errors(self):
|
||||
"""Returns a string listing all errors within a document"""
|
||||
|
||||
def generate_key(value, prefix=''):
|
||||
def generate_key(value, prefix=""):
|
||||
if isinstance(value, list):
|
||||
value = ' '.join([generate_key(k) for k in value])
|
||||
value = " ".join([generate_key(k) for k in value])
|
||||
elif isinstance(value, dict):
|
||||
value = ' '.join(
|
||||
[generate_key(v, k) for k, v in value.iteritems()])
|
||||
value = " ".join([generate_key(v, k) for k, v in value.items()])
|
||||
|
||||
results = '%s.%s' % (prefix, value) if prefix else value
|
||||
results = f"{prefix}.{value}" if prefix else value
|
||||
return results
|
||||
|
||||
error_dict = defaultdict(list)
|
||||
for k, v in self.to_dict().iteritems():
|
||||
for k, v in self.to_dict().items():
|
||||
error_dict[generate_key(v)].append(k)
|
||||
return ' '.join(['%s: %s' % (k, v) for k, v in error_dict.iteritems()])
|
||||
return " ".join([f"{k}: {v}" for k, v in error_dict.items()])
|
||||
|
||||
|
||||
class DeprecatedError(Exception):
|
||||
"""Raise when a user uses a feature that has been Deprecated"""
|
||||
|
||||
pass
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
20
mongoengine/mongodb_support.py
Normal file
20
mongoengine/mongodb_support.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""
|
||||
Helper functions, constants, and types to aid with MongoDB version support
|
||||
"""
|
||||
from mongoengine.connection import get_connection
|
||||
|
||||
# Constant that can be used to compare the version retrieved with
|
||||
# get_mongodb_version()
|
||||
MONGODB_34 = (3, 4)
|
||||
MONGODB_36 = (3, 6)
|
||||
MONGODB_42 = (4, 2)
|
||||
MONGODB_44 = (4, 4)
|
||||
|
||||
|
||||
def get_mongodb_version():
|
||||
"""Return the version of the default connected mongoDB (first 2 digits)
|
||||
|
||||
:return: tuple(int, int)
|
||||
"""
|
||||
version_list = get_connection().server_info()["versionArray"][:2] # e.g: (3, 2)
|
||||
return tuple(version_list)
|
||||
60
mongoengine/pymongo_support.py
Normal file
60
mongoengine/pymongo_support.py
Normal file
@@ -0,0 +1,60 @@
|
||||
"""
|
||||
Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support.
|
||||
"""
|
||||
import pymongo
|
||||
from pymongo.errors import OperationFailure
|
||||
|
||||
_PYMONGO_37 = (3, 7)
|
||||
|
||||
PYMONGO_VERSION = tuple(pymongo.version_tuple[:2])
|
||||
|
||||
IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37
|
||||
|
||||
|
||||
def count_documents(
|
||||
collection, filter, skip=None, limit=None, hint=None, collation=None
|
||||
):
|
||||
"""Pymongo>3.7 deprecates count in favour of count_documents"""
|
||||
if limit == 0:
|
||||
return 0 # Pymongo raises an OperationFailure if called with limit=0
|
||||
|
||||
kwargs = {}
|
||||
if skip is not None:
|
||||
kwargs["skip"] = skip
|
||||
if limit is not None:
|
||||
kwargs["limit"] = limit
|
||||
if hint not in (-1, None):
|
||||
kwargs["hint"] = hint
|
||||
if collation is not None:
|
||||
kwargs["collation"] = collation
|
||||
|
||||
# count_documents appeared in pymongo 3.7
|
||||
if IS_PYMONGO_GTE_37:
|
||||
try:
|
||||
return collection.count_documents(filter=filter, **kwargs)
|
||||
except OperationFailure:
|
||||
# OperationFailure - accounts for some operators that used to work
|
||||
# with .count but are no longer working with count_documents (i.e $geoNear, $near, and $nearSphere)
|
||||
# fallback to deprecated Cursor.count
|
||||
# Keeping this should be reevaluated the day pymongo removes .count entirely
|
||||
pass
|
||||
|
||||
cursor = collection.find(filter)
|
||||
for option, option_value in kwargs.items():
|
||||
cursor_method = getattr(cursor, option)
|
||||
cursor = cursor_method(option_value)
|
||||
with_limit_and_skip = "skip" in kwargs or "limit" in kwargs
|
||||
return cursor.count(with_limit_and_skip=with_limit_and_skip)
|
||||
|
||||
|
||||
def list_collection_names(db, include_system_collections=False):
|
||||
"""Pymongo>3.7 deprecates collection_names in favour of list_collection_names"""
|
||||
if IS_PYMONGO_GTE_37:
|
||||
collections = db.list_collection_names()
|
||||
else:
|
||||
collections = db.collection_names()
|
||||
|
||||
if not include_system_collections:
|
||||
collections = [c for c in collections if not c.startswith("system.")]
|
||||
|
||||
return collections
|
||||
@@ -1,25 +0,0 @@
|
||||
"""
|
||||
Helper functions, constants, and types to aid with Python v2.7 - v3.x and
|
||||
PyMongo v2.7 - v3.x support.
|
||||
"""
|
||||
import pymongo
|
||||
import six
|
||||
|
||||
|
||||
if pymongo.version_tuple[0] < 3:
|
||||
IS_PYMONGO_3 = False
|
||||
else:
|
||||
IS_PYMONGO_3 = True
|
||||
|
||||
|
||||
# six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3.
|
||||
StringIO = six.BytesIO
|
||||
|
||||
# Additionally for Py2, try to use the faster cStringIO, if available
|
||||
if not six.PY3:
|
||||
try:
|
||||
import cStringIO
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
StringIO = cStringIO.StringIO
|
||||
@@ -7,11 +7,22 @@ from mongoengine.queryset.visitor import *
|
||||
|
||||
# Expose just the public subset of all imported objects and constants.
|
||||
__all__ = (
|
||||
'QuerySet', 'QuerySetNoCache', 'Q', 'queryset_manager', 'QuerySetManager',
|
||||
'QueryFieldList', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL',
|
||||
|
||||
"QuerySet",
|
||||
"QuerySetNoCache",
|
||||
"Q",
|
||||
"queryset_manager",
|
||||
"QuerySetManager",
|
||||
"QueryFieldList",
|
||||
"DO_NOTHING",
|
||||
"NULLIFY",
|
||||
"CASCADE",
|
||||
"DENY",
|
||||
"PULL",
|
||||
# Errors that might be related to a queryset, mostly here for backward
|
||||
# compatibility
|
||||
'DoesNotExist', 'InvalidQueryError', 'MultipleObjectsReturned',
|
||||
'NotUniqueError', 'OperationError',
|
||||
"DoesNotExist",
|
||||
"InvalidQueryError",
|
||||
"MultipleObjectsReturned",
|
||||
"NotUniqueError",
|
||||
"OperationError",
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,12 +1,15 @@
|
||||
__all__ = ('QueryFieldList',)
|
||||
__all__ = ("QueryFieldList",)
|
||||
|
||||
|
||||
class QueryFieldList(object):
|
||||
class QueryFieldList:
|
||||
"""Object that handles combinations of .only() and .exclude() calls"""
|
||||
|
||||
ONLY = 1
|
||||
EXCLUDE = 0
|
||||
|
||||
def __init__(self, fields=None, value=ONLY, always_include=None, _only_called=False):
|
||||
def __init__(
|
||||
self, fields=None, value=ONLY, always_include=None, _only_called=False
|
||||
):
|
||||
"""The QueryFieldList builder
|
||||
|
||||
:param fields: A list of fields used in `.only()` or `.exclude()`
|
||||
@@ -49,7 +52,7 @@ class QueryFieldList(object):
|
||||
self.fields = f.fields - self.fields
|
||||
self._clean_slice()
|
||||
|
||||
if '_id' in f.fields:
|
||||
if "_id" in f.fields:
|
||||
self._id = f.value
|
||||
|
||||
if self.always_include:
|
||||
@@ -59,11 +62,11 @@ class QueryFieldList(object):
|
||||
else:
|
||||
self.fields -= self.always_include
|
||||
|
||||
if getattr(f, '_only_called', False):
|
||||
if getattr(f, "_only_called", False):
|
||||
self._only_called = True
|
||||
return self
|
||||
|
||||
def __nonzero__(self):
|
||||
def __bool__(self):
|
||||
return bool(self.fields)
|
||||
|
||||
def as_dict(self):
|
||||
@@ -71,11 +74,11 @@ class QueryFieldList(object):
|
||||
if self.slice:
|
||||
field_list.update(self.slice)
|
||||
if self._id is not None:
|
||||
field_list['_id'] = self._id
|
||||
field_list["_id"] = self._id
|
||||
return field_list
|
||||
|
||||
def reset(self):
|
||||
self.fields = set([])
|
||||
self.fields = set()
|
||||
self.slice = {}
|
||||
self.value = self.ONLY
|
||||
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
from functools import partial
|
||||
|
||||
from mongoengine.queryset.queryset import QuerySet
|
||||
|
||||
__all__ = ('queryset_manager', 'QuerySetManager')
|
||||
__all__ = ("queryset_manager", "QuerySetManager")
|
||||
|
||||
|
||||
class QuerySetManager(object):
|
||||
class QuerySetManager:
|
||||
"""
|
||||
The default QuerySet Manager.
|
||||
|
||||
@@ -33,10 +34,10 @@ class QuerySetManager(object):
|
||||
return self
|
||||
|
||||
# owner is the document that contains the QuerySetManager
|
||||
queryset_class = owner._meta.get('queryset_class', self.default)
|
||||
queryset_class = owner._meta.get("queryset_class", self.default)
|
||||
queryset = queryset_class(owner, owner._get_collection())
|
||||
if self.get_queryset:
|
||||
arg_count = self.get_queryset.func_code.co_argcount
|
||||
arg_count = self.get_queryset.__code__.co_argcount
|
||||
if arg_count == 1:
|
||||
queryset = self.get_queryset(queryset)
|
||||
elif arg_count == 2:
|
||||
|
||||
@@ -1,11 +1,22 @@
|
||||
import six
|
||||
|
||||
from mongoengine.errors import OperationError
|
||||
from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING,
|
||||
NULLIFY, PULL)
|
||||
from mongoengine.queryset.base import (
|
||||
CASCADE,
|
||||
DENY,
|
||||
DO_NOTHING,
|
||||
NULLIFY,
|
||||
PULL,
|
||||
BaseQuerySet,
|
||||
)
|
||||
|
||||
__all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE',
|
||||
'DENY', 'PULL')
|
||||
__all__ = (
|
||||
"QuerySet",
|
||||
"QuerySetNoCache",
|
||||
"DO_NOTHING",
|
||||
"NULLIFY",
|
||||
"CASCADE",
|
||||
"DENY",
|
||||
"PULL",
|
||||
)
|
||||
|
||||
# The maximum number of items to display in a QuerySet.__repr__
|
||||
REPR_OUTPUT_SIZE = 20
|
||||
@@ -57,12 +68,12 @@ class QuerySet(BaseQuerySet):
|
||||
def __repr__(self):
|
||||
"""Provide a string representation of the QuerySet"""
|
||||
if self._iter:
|
||||
return '.. queryset mid-iteration ..'
|
||||
return ".. queryset mid-iteration .."
|
||||
|
||||
self._populate_cache()
|
||||
data = self._result_cache[: REPR_OUTPUT_SIZE + 1]
|
||||
if len(data) > REPR_OUTPUT_SIZE:
|
||||
data[-1] = '...(remaining elements truncated)...'
|
||||
data[-1] = "...(remaining elements truncated)..."
|
||||
return repr(data)
|
||||
|
||||
def _iter_results(self):
|
||||
@@ -89,7 +100,7 @@ class QuerySet(BaseQuerySet):
|
||||
yield self._result_cache[pos]
|
||||
pos += 1
|
||||
|
||||
# Raise StopIteration if we already established there were no more
|
||||
# return if we already established there were no more
|
||||
# docs in the db cursor.
|
||||
if not self._has_more:
|
||||
return
|
||||
@@ -114,8 +125,8 @@ class QuerySet(BaseQuerySet):
|
||||
# Pull in ITER_CHUNK_SIZE docs from the database and store them in
|
||||
# the result cache.
|
||||
try:
|
||||
for _ in six.moves.range(ITER_CHUNK_SIZE):
|
||||
self._result_cache.append(self.next())
|
||||
for _ in range(ITER_CHUNK_SIZE):
|
||||
self._result_cache.append(next(self))
|
||||
except StopIteration:
|
||||
# Getting this exception means there are no more docs in the
|
||||
# db cursor. Set _has_more to False so that we can use that
|
||||
@@ -130,52 +141,43 @@ class QuerySet(BaseQuerySet):
|
||||
getting the count
|
||||
"""
|
||||
if with_limit_and_skip is False:
|
||||
return super(QuerySet, self).count(with_limit_and_skip)
|
||||
return super().count(with_limit_and_skip)
|
||||
|
||||
if self._len is None:
|
||||
self._len = super(QuerySet, self).count(with_limit_and_skip)
|
||||
# cache the length
|
||||
self._len = super().count(with_limit_and_skip)
|
||||
|
||||
return self._len
|
||||
|
||||
def no_cache(self):
|
||||
"""Convert to a non-caching queryset
|
||||
|
||||
.. versionadded:: 0.8.3 Convert to non caching queryset
|
||||
"""
|
||||
"""Convert to a non-caching queryset"""
|
||||
if self._result_cache is not None:
|
||||
raise OperationError('QuerySet already cached')
|
||||
raise OperationError("QuerySet already cached")
|
||||
|
||||
return self._clone_into(QuerySetNoCache(self._document,
|
||||
self._collection))
|
||||
return self._clone_into(QuerySetNoCache(self._document, self._collection))
|
||||
|
||||
|
||||
class QuerySetNoCache(BaseQuerySet):
|
||||
"""A non caching QuerySet"""
|
||||
|
||||
def cache(self):
|
||||
"""Convert to a caching queryset
|
||||
|
||||
.. versionadded:: 0.8.3 Convert to caching queryset
|
||||
"""
|
||||
"""Convert to a caching queryset"""
|
||||
return self._clone_into(QuerySet(self._document, self._collection))
|
||||
|
||||
def __repr__(self):
|
||||
"""Provides the string representation of the QuerySet
|
||||
|
||||
.. versionchanged:: 0.6.13 Now doesnt modify the cursor
|
||||
"""
|
||||
"""Provides the string representation of the QuerySet"""
|
||||
if self._iter:
|
||||
return '.. queryset mid-iteration ..'
|
||||
return ".. queryset mid-iteration .."
|
||||
|
||||
data = []
|
||||
for _ in six.moves.range(REPR_OUTPUT_SIZE + 1):
|
||||
for _ in range(REPR_OUTPUT_SIZE + 1):
|
||||
try:
|
||||
data.append(self.next())
|
||||
data.append(next(self))
|
||||
except StopIteration:
|
||||
break
|
||||
|
||||
if len(data) > REPR_OUTPUT_SIZE:
|
||||
data[-1] = '...(remaining elements truncated)...'
|
||||
data[-1] = "...(remaining elements truncated)..."
|
||||
|
||||
self.rewind()
|
||||
return repr(data)
|
||||
@@ -186,10 +188,3 @@ class QuerySetNoCache(BaseQuerySet):
|
||||
queryset = self.clone()
|
||||
queryset.rewind()
|
||||
return queryset
|
||||
|
||||
|
||||
class QuerySetNoDeRef(QuerySet):
|
||||
"""Special no_dereference QuerySet"""
|
||||
|
||||
def __dereference(items, max_depth=1, instance=None, name=None):
|
||||
return items
|
||||
|
||||
@@ -1,31 +1,61 @@
|
||||
from collections import defaultdict
|
||||
|
||||
from bson import ObjectId, SON
|
||||
from bson.dbref import DBRef
|
||||
import pymongo
|
||||
import six
|
||||
from bson import SON, ObjectId
|
||||
from bson.dbref import DBRef
|
||||
|
||||
from mongoengine.base import UPDATE_OPERATORS
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.connection import get_connection
|
||||
from mongoengine.errors import InvalidQueryError
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
|
||||
__all__ = ('query', 'update')
|
||||
__all__ = ("query", "update", "STRING_OPERATORS")
|
||||
|
||||
COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
|
||||
'all', 'size', 'exists', 'not', 'elemMatch', 'type')
|
||||
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
|
||||
'within_box', 'within_polygon', 'near', 'near_sphere',
|
||||
'max_distance', 'min_distance', 'geo_within', 'geo_within_box',
|
||||
'geo_within_polygon', 'geo_within_center',
|
||||
'geo_within_sphere', 'geo_intersects')
|
||||
STRING_OPERATORS = ('contains', 'icontains', 'startswith',
|
||||
'istartswith', 'endswith', 'iendswith',
|
||||
'exact', 'iexact')
|
||||
CUSTOM_OPERATORS = ('match',)
|
||||
MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
|
||||
STRING_OPERATORS + CUSTOM_OPERATORS)
|
||||
COMPARISON_OPERATORS = (
|
||||
"ne",
|
||||
"gt",
|
||||
"gte",
|
||||
"lt",
|
||||
"lte",
|
||||
"in",
|
||||
"nin",
|
||||
"mod",
|
||||
"all",
|
||||
"size",
|
||||
"exists",
|
||||
"not",
|
||||
"elemMatch",
|
||||
"type",
|
||||
)
|
||||
GEO_OPERATORS = (
|
||||
"within_distance",
|
||||
"within_spherical_distance",
|
||||
"within_box",
|
||||
"within_polygon",
|
||||
"near",
|
||||
"near_sphere",
|
||||
"max_distance",
|
||||
"min_distance",
|
||||
"geo_within",
|
||||
"geo_within_box",
|
||||
"geo_within_polygon",
|
||||
"geo_within_center",
|
||||
"geo_within_sphere",
|
||||
"geo_intersects",
|
||||
)
|
||||
STRING_OPERATORS = (
|
||||
"contains",
|
||||
"icontains",
|
||||
"startswith",
|
||||
"istartswith",
|
||||
"endswith",
|
||||
"iendswith",
|
||||
"exact",
|
||||
"iexact",
|
||||
)
|
||||
CUSTOM_OPERATORS = ("match",)
|
||||
MATCH_OPERATORS = (
|
||||
COMPARISON_OPERATORS + GEO_OPERATORS + STRING_OPERATORS + CUSTOM_OPERATORS
|
||||
)
|
||||
|
||||
|
||||
# TODO make this less complex
|
||||
@@ -34,11 +64,11 @@ def query(_doc_cls=None, **kwargs):
|
||||
mongo_query = {}
|
||||
merge_query = defaultdict(list)
|
||||
for key, value in sorted(kwargs.items()):
|
||||
if key == '__raw__':
|
||||
if key == "__raw__":
|
||||
mongo_query.update(value)
|
||||
continue
|
||||
|
||||
parts = key.rsplit('__')
|
||||
parts = key.rsplit("__")
|
||||
indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()]
|
||||
parts = [part for part in parts if not part.isdigit()]
|
||||
# Check for an operator and transform to mongo-style if there is
|
||||
@@ -47,11 +77,11 @@ def query(_doc_cls=None, **kwargs):
|
||||
op = parts.pop()
|
||||
|
||||
# Allow to escape operator-like field name by __
|
||||
if len(parts) > 1 and parts[-1] == '':
|
||||
if len(parts) > 1 and parts[-1] == "":
|
||||
parts.pop()
|
||||
|
||||
negate = False
|
||||
if len(parts) > 1 and parts[-1] == 'not':
|
||||
if len(parts) > 1 and parts[-1] == "not":
|
||||
parts.pop()
|
||||
negate = True
|
||||
|
||||
@@ -63,18 +93,18 @@ def query(_doc_cls=None, **kwargs):
|
||||
raise InvalidQueryError(e)
|
||||
parts = []
|
||||
|
||||
CachedReferenceField = _import_class('CachedReferenceField')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
CachedReferenceField = _import_class("CachedReferenceField")
|
||||
GenericReferenceField = _import_class("GenericReferenceField")
|
||||
|
||||
cleaned_fields = []
|
||||
for field in fields:
|
||||
append_field = True
|
||||
if isinstance(field, six.string_types):
|
||||
if isinstance(field, str):
|
||||
parts.append(field)
|
||||
append_field = False
|
||||
# is last and CachedReferenceField
|
||||
elif isinstance(field, CachedReferenceField) and fields[-1] == field:
|
||||
parts.append('%s._id' % field.db_field)
|
||||
parts.append("%s._id" % field.db_field)
|
||||
else:
|
||||
parts.append(field.db_field)
|
||||
|
||||
@@ -84,23 +114,15 @@ def query(_doc_cls=None, **kwargs):
|
||||
# Convert value to proper value
|
||||
field = cleaned_fields[-1]
|
||||
|
||||
singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not']
|
||||
singular_ops = [None, "ne", "gt", "gte", "lt", "lte", "not"]
|
||||
singular_ops += STRING_OPERATORS
|
||||
if op in singular_ops:
|
||||
if isinstance(field, six.string_types):
|
||||
if (op in STRING_OPERATORS and
|
||||
isinstance(value, six.string_types)):
|
||||
StringField = _import_class('StringField')
|
||||
value = StringField.prepare_query_value(op, value)
|
||||
else:
|
||||
value = field
|
||||
else:
|
||||
value = field.prepare_query_value(op, value)
|
||||
|
||||
if isinstance(field, CachedReferenceField) and value:
|
||||
value = value['_id']
|
||||
value = value["_id"]
|
||||
|
||||
elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict):
|
||||
elif op in ("in", "nin", "all", "near") and not isinstance(value, dict):
|
||||
# Raise an error if the in/nin/all/near param is not iterable.
|
||||
value = _prepare_query_for_iterable(field, op, value)
|
||||
|
||||
@@ -110,73 +132,77 @@ def query(_doc_cls=None, **kwargs):
|
||||
# * If the value is an ObjectId, the key should be "field_name._ref.$id".
|
||||
if isinstance(field, GenericReferenceField):
|
||||
if isinstance(value, DBRef):
|
||||
parts[-1] += '._ref'
|
||||
parts[-1] += "._ref"
|
||||
elif isinstance(value, ObjectId):
|
||||
parts[-1] += '._ref.$id'
|
||||
parts[-1] += "._ref.$id"
|
||||
|
||||
# if op and op not in COMPARISON_OPERATORS:
|
||||
if op:
|
||||
if op in GEO_OPERATORS:
|
||||
value = _geo_operator(field, op, value)
|
||||
elif op in ('match', 'elemMatch'):
|
||||
ListField = _import_class('ListField')
|
||||
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
|
||||
elif op in ("match", "elemMatch"):
|
||||
ListField = _import_class("ListField")
|
||||
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
|
||||
if (
|
||||
isinstance(value, dict) and
|
||||
isinstance(field, ListField) and
|
||||
isinstance(field.field, EmbeddedDocumentField)
|
||||
isinstance(value, dict)
|
||||
and isinstance(field, ListField)
|
||||
and isinstance(field.field, EmbeddedDocumentField)
|
||||
):
|
||||
value = query(field.field.document_type, **value)
|
||||
else:
|
||||
value = field.prepare_query_value(op, value)
|
||||
value = {'$elemMatch': value}
|
||||
value = {"$elemMatch": value}
|
||||
elif op in CUSTOM_OPERATORS:
|
||||
NotImplementedError('Custom method "%s" has not '
|
||||
'been implemented' % op)
|
||||
NotImplementedError(
|
||||
'Custom method "%s" has not ' "been implemented" % op
|
||||
)
|
||||
elif op not in STRING_OPERATORS:
|
||||
value = {'$' + op: value}
|
||||
value = {"$" + op: value}
|
||||
|
||||
if negate:
|
||||
value = {'$not': value}
|
||||
value = {"$not": value}
|
||||
|
||||
for i, part in indices:
|
||||
parts.insert(i, part)
|
||||
|
||||
key = '.'.join(parts)
|
||||
key = ".".join(parts)
|
||||
|
||||
if op is None or key not in mongo_query:
|
||||
if key not in mongo_query:
|
||||
mongo_query[key] = value
|
||||
elif key in mongo_query:
|
||||
if isinstance(mongo_query[key], dict):
|
||||
else:
|
||||
if isinstance(mongo_query[key], dict) and isinstance(value, dict):
|
||||
mongo_query[key].update(value)
|
||||
# $max/minDistance needs to come last - convert to SON
|
||||
value_dict = mongo_query[key]
|
||||
if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \
|
||||
('$near' in value_dict or '$nearSphere' in value_dict):
|
||||
if ("$maxDistance" in value_dict or "$minDistance" in value_dict) and (
|
||||
"$near" in value_dict or "$nearSphere" in value_dict
|
||||
):
|
||||
value_son = SON()
|
||||
for k, v in value_dict.iteritems():
|
||||
if k == '$maxDistance' or k == '$minDistance':
|
||||
for k, v in value_dict.items():
|
||||
if k == "$maxDistance" or k == "$minDistance":
|
||||
continue
|
||||
value_son[k] = v
|
||||
# Required for MongoDB >= 2.6, may fail when combining
|
||||
# PyMongo 3+ and MongoDB < 2.6
|
||||
near_embedded = False
|
||||
for near_op in ('$near', '$nearSphere'):
|
||||
if isinstance(value_dict.get(near_op), dict) and (
|
||||
IS_PYMONGO_3 or get_connection().max_wire_version > 1):
|
||||
for near_op in ("$near", "$nearSphere"):
|
||||
if isinstance(value_dict.get(near_op), dict):
|
||||
value_son[near_op] = SON(value_son[near_op])
|
||||
if '$maxDistance' in value_dict:
|
||||
value_son[near_op][
|
||||
'$maxDistance'] = value_dict['$maxDistance']
|
||||
if '$minDistance' in value_dict:
|
||||
value_son[near_op][
|
||||
'$minDistance'] = value_dict['$minDistance']
|
||||
if "$maxDistance" in value_dict:
|
||||
value_son[near_op]["$maxDistance"] = value_dict[
|
||||
"$maxDistance"
|
||||
]
|
||||
if "$minDistance" in value_dict:
|
||||
value_son[near_op]["$minDistance"] = value_dict[
|
||||
"$minDistance"
|
||||
]
|
||||
near_embedded = True
|
||||
|
||||
if not near_embedded:
|
||||
if '$maxDistance' in value_dict:
|
||||
value_son['$maxDistance'] = value_dict['$maxDistance']
|
||||
if '$minDistance' in value_dict:
|
||||
value_son['$minDistance'] = value_dict['$minDistance']
|
||||
if "$maxDistance" in value_dict:
|
||||
value_son["$maxDistance"] = value_dict["$maxDistance"]
|
||||
if "$minDistance" in value_dict:
|
||||
value_son["$minDistance"] = value_dict["$minDistance"]
|
||||
mongo_query[key] = value_son
|
||||
else:
|
||||
# Store for manually merging later
|
||||
@@ -188,10 +214,10 @@ def query(_doc_cls=None, **kwargs):
|
||||
del mongo_query[k]
|
||||
if isinstance(v, list):
|
||||
value = [{k: val} for val in v]
|
||||
if '$and' in mongo_query.keys():
|
||||
mongo_query['$and'].extend(value)
|
||||
if "$and" in mongo_query.keys():
|
||||
mongo_query["$and"].extend(value)
|
||||
else:
|
||||
mongo_query['$and'] = value
|
||||
mongo_query["$and"] = value
|
||||
|
||||
return mongo_query
|
||||
|
||||
@@ -201,37 +227,44 @@ def update(_doc_cls=None, **update):
|
||||
format.
|
||||
"""
|
||||
mongo_update = {}
|
||||
|
||||
for key, value in update.items():
|
||||
if key == '__raw__':
|
||||
if key == "__raw__":
|
||||
mongo_update.update(value)
|
||||
continue
|
||||
parts = key.split('__')
|
||||
|
||||
parts = key.split("__")
|
||||
|
||||
# if there is no operator, default to 'set'
|
||||
if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS:
|
||||
parts.insert(0, 'set')
|
||||
parts.insert(0, "set")
|
||||
|
||||
# Check for an operator and transform to mongo-style if there is
|
||||
op = None
|
||||
if parts[0] in UPDATE_OPERATORS:
|
||||
op = parts.pop(0)
|
||||
# Convert Pythonic names to Mongo equivalents
|
||||
if op in ('push_all', 'pull_all'):
|
||||
op = op.replace('_all', 'All')
|
||||
elif op == 'dec':
|
||||
operator_map = {
|
||||
"push_all": "pushAll",
|
||||
"pull_all": "pullAll",
|
||||
"dec": "inc",
|
||||
"add_to_set": "addToSet",
|
||||
"set_on_insert": "setOnInsert",
|
||||
}
|
||||
if op == "dec":
|
||||
# Support decrement by flipping a positive value's sign
|
||||
# and using 'inc'
|
||||
op = 'inc'
|
||||
value = -value
|
||||
elif op == 'add_to_set':
|
||||
op = 'addToSet'
|
||||
elif op == 'set_on_insert':
|
||||
op = 'setOnInsert'
|
||||
# If the operator doesn't found from operator map, the op value
|
||||
# will stay unchanged
|
||||
op = operator_map.get(op, op)
|
||||
|
||||
match = None
|
||||
if parts[-1] in COMPARISON_OPERATORS:
|
||||
match = parts.pop()
|
||||
|
||||
# Allow to escape operator-like field name by __
|
||||
if len(parts) > 1 and parts[-1] == '':
|
||||
if len(parts) > 1 and parts[-1] == "":
|
||||
parts.pop()
|
||||
|
||||
if _doc_cls:
|
||||
@@ -246,10 +279,10 @@ def update(_doc_cls=None, **update):
|
||||
appended_sub_field = False
|
||||
for field in fields:
|
||||
append_field = True
|
||||
if isinstance(field, six.string_types):
|
||||
if isinstance(field, str):
|
||||
# Convert the S operator to $
|
||||
if field == 'S':
|
||||
field = '$'
|
||||
if field == "S":
|
||||
field = "$"
|
||||
parts.append(field)
|
||||
append_field = False
|
||||
else:
|
||||
@@ -257,7 +290,7 @@ def update(_doc_cls=None, **update):
|
||||
if append_field:
|
||||
appended_sub_field = False
|
||||
cleaned_fields.append(field)
|
||||
if hasattr(field, 'field'):
|
||||
if hasattr(field, "field"):
|
||||
cleaned_fields.append(field.field)
|
||||
appended_sub_field = True
|
||||
|
||||
@@ -267,54 +300,53 @@ def update(_doc_cls=None, **update):
|
||||
else:
|
||||
field = cleaned_fields[-1]
|
||||
|
||||
GeoJsonBaseField = _import_class('GeoJsonBaseField')
|
||||
GeoJsonBaseField = _import_class("GeoJsonBaseField")
|
||||
if isinstance(field, GeoJsonBaseField):
|
||||
value = field.to_mongo(value)
|
||||
|
||||
if op == 'pull':
|
||||
if op == "pull":
|
||||
if field.required or value is not None:
|
||||
if match == 'in' and not isinstance(value, dict):
|
||||
if match in ("in", "nin") and not isinstance(value, dict):
|
||||
value = _prepare_query_for_iterable(field, op, value)
|
||||
else:
|
||||
value = field.prepare_query_value(op, value)
|
||||
elif op == 'push' and isinstance(value, (list, tuple, set)):
|
||||
elif op == "push" and isinstance(value, (list, tuple, set)):
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
elif op in (None, 'set', 'push'):
|
||||
elif op in (None, "set", "push"):
|
||||
if field.required or value is not None:
|
||||
value = field.prepare_query_value(op, value)
|
||||
elif op in ('pushAll', 'pullAll'):
|
||||
elif op in ("pushAll", "pullAll"):
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
elif op in ('addToSet', 'setOnInsert'):
|
||||
elif op in ("addToSet", "setOnInsert"):
|
||||
if isinstance(value, (list, tuple, set)):
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
elif field.required or value is not None:
|
||||
value = field.prepare_query_value(op, value)
|
||||
elif op == 'unset':
|
||||
elif op == "unset":
|
||||
value = 1
|
||||
elif op == "inc":
|
||||
value = field.prepare_query_value(op, value)
|
||||
|
||||
if match:
|
||||
match = '$' + match
|
||||
match = "$" + match
|
||||
value = {match: value}
|
||||
|
||||
key = '.'.join(parts)
|
||||
key = ".".join(parts)
|
||||
|
||||
if not op:
|
||||
raise InvalidQueryError('Updates must supply an operation '
|
||||
'eg: set__FIELD=value')
|
||||
|
||||
if 'pull' in op and '.' in key:
|
||||
if "pull" in op and "." in key:
|
||||
# Dot operators don't work on pull operations
|
||||
# unless they point to a list field
|
||||
# Otherwise it uses nested dict syntax
|
||||
if op == 'pullAll':
|
||||
raise InvalidQueryError('pullAll operations only support '
|
||||
'a single field depth')
|
||||
if op == "pullAll":
|
||||
raise InvalidQueryError(
|
||||
"pullAll operations only support a single field depth"
|
||||
)
|
||||
|
||||
# Look for the last list field and use dot notation until there
|
||||
field_classes = [c.__class__ for c in cleaned_fields]
|
||||
field_classes.reverse()
|
||||
ListField = _import_class('ListField')
|
||||
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
|
||||
ListField = _import_class("ListField")
|
||||
EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField")
|
||||
if ListField in field_classes or EmbeddedDocumentListField in field_classes:
|
||||
# Join all fields via dot notation to the last ListField or EmbeddedDocumentListField
|
||||
# Then process as normal
|
||||
@@ -323,37 +355,36 @@ def update(_doc_cls=None, **update):
|
||||
else:
|
||||
_check_field = EmbeddedDocumentListField
|
||||
|
||||
last_listField = len(
|
||||
cleaned_fields) - field_classes.index(_check_field)
|
||||
key = '.'.join(parts[:last_listField])
|
||||
last_listField = len(cleaned_fields) - field_classes.index(_check_field)
|
||||
key = ".".join(parts[:last_listField])
|
||||
parts = parts[last_listField:]
|
||||
parts.insert(0, key)
|
||||
|
||||
parts.reverse()
|
||||
for key in parts:
|
||||
value = {key: value}
|
||||
elif op == 'addToSet' and isinstance(value, list):
|
||||
value = {key: {'$each': value}}
|
||||
elif op in ('push', 'pushAll'):
|
||||
elif op == "addToSet" and isinstance(value, list):
|
||||
value = {key: {"$each": value}}
|
||||
elif op in ("push", "pushAll"):
|
||||
if parts[-1].isdigit():
|
||||
key = parts[0]
|
||||
key = ".".join(parts[0:-1])
|
||||
position = int(parts[-1])
|
||||
# $position expects an iterable. If pushing a single value,
|
||||
# wrap it in a list.
|
||||
if not isinstance(value, (set, tuple, list)):
|
||||
value = [value]
|
||||
value = {key: {'$each': value, '$position': position}}
|
||||
value = {key: {"$each": value, "$position": position}}
|
||||
else:
|
||||
if op == 'pushAll':
|
||||
op = 'push' # convert to non-deprecated keyword
|
||||
if op == "pushAll":
|
||||
op = "push" # convert to non-deprecated keyword
|
||||
if not isinstance(value, (set, tuple, list)):
|
||||
value = [value]
|
||||
value = {key: {'$each': value}}
|
||||
value = {key: {"$each": value}}
|
||||
else:
|
||||
value = {key: value}
|
||||
else:
|
||||
value = {key: value}
|
||||
key = '$' + op
|
||||
key = "$" + op
|
||||
if key not in mongo_update:
|
||||
mongo_update[key] = value
|
||||
elif key in mongo_update and isinstance(mongo_update[key], dict):
|
||||
@@ -364,45 +395,47 @@ def update(_doc_cls=None, **update):
|
||||
|
||||
def _geo_operator(field, op, value):
|
||||
"""Helper to return the query for a given geo query."""
|
||||
if op == 'max_distance':
|
||||
value = {'$maxDistance': value}
|
||||
elif op == 'min_distance':
|
||||
value = {'$minDistance': value}
|
||||
if op == "max_distance":
|
||||
value = {"$maxDistance": value}
|
||||
elif op == "min_distance":
|
||||
value = {"$minDistance": value}
|
||||
elif field._geo_index == pymongo.GEO2D:
|
||||
if op == 'within_distance':
|
||||
value = {'$within': {'$center': value}}
|
||||
elif op == 'within_spherical_distance':
|
||||
value = {'$within': {'$centerSphere': value}}
|
||||
elif op == 'within_polygon':
|
||||
value = {'$within': {'$polygon': value}}
|
||||
elif op == 'near':
|
||||
value = {'$near': value}
|
||||
elif op == 'near_sphere':
|
||||
value = {'$nearSphere': value}
|
||||
elif op == 'within_box':
|
||||
value = {'$within': {'$box': value}}
|
||||
else:
|
||||
raise NotImplementedError('Geo method "%s" has not been '
|
||||
'implemented for a GeoPointField' % op)
|
||||
else:
|
||||
if op == 'geo_within':
|
||||
value = {'$geoWithin': _infer_geometry(value)}
|
||||
elif op == 'geo_within_box':
|
||||
value = {'$geoWithin': {'$box': value}}
|
||||
elif op == 'geo_within_polygon':
|
||||
value = {'$geoWithin': {'$polygon': value}}
|
||||
elif op == 'geo_within_center':
|
||||
value = {'$geoWithin': {'$center': value}}
|
||||
elif op == 'geo_within_sphere':
|
||||
value = {'$geoWithin': {'$centerSphere': value}}
|
||||
elif op == 'geo_intersects':
|
||||
value = {'$geoIntersects': _infer_geometry(value)}
|
||||
elif op == 'near':
|
||||
value = {'$near': _infer_geometry(value)}
|
||||
if op == "within_distance":
|
||||
value = {"$within": {"$center": value}}
|
||||
elif op == "within_spherical_distance":
|
||||
value = {"$within": {"$centerSphere": value}}
|
||||
elif op == "within_polygon":
|
||||
value = {"$within": {"$polygon": value}}
|
||||
elif op == "near":
|
||||
value = {"$near": value}
|
||||
elif op == "near_sphere":
|
||||
value = {"$nearSphere": value}
|
||||
elif op == "within_box":
|
||||
value = {"$within": {"$box": value}}
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Geo method "%s" has not been implemented for a %s '
|
||||
% (op, field._name)
|
||||
'Geo method "%s" has not been ' "implemented for a GeoPointField" % op
|
||||
)
|
||||
else:
|
||||
if op == "geo_within":
|
||||
value = {"$geoWithin": _infer_geometry(value)}
|
||||
elif op == "geo_within_box":
|
||||
value = {"$geoWithin": {"$box": value}}
|
||||
elif op == "geo_within_polygon":
|
||||
value = {"$geoWithin": {"$polygon": value}}
|
||||
elif op == "geo_within_center":
|
||||
value = {"$geoWithin": {"$center": value}}
|
||||
elif op == "geo_within_sphere":
|
||||
value = {"$geoWithin": {"$centerSphere": value}}
|
||||
elif op == "geo_intersects":
|
||||
value = {"$geoIntersects": _infer_geometry(value)}
|
||||
elif op == "near":
|
||||
value = {"$near": _infer_geometry(value)}
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Geo method "{}" has not been implemented for a {} '.format(
|
||||
op, field._name
|
||||
)
|
||||
)
|
||||
return value
|
||||
|
||||
@@ -412,52 +445,58 @@ def _infer_geometry(value):
|
||||
given value.
|
||||
"""
|
||||
if isinstance(value, dict):
|
||||
if '$geometry' in value:
|
||||
if "$geometry" in value:
|
||||
return value
|
||||
elif 'coordinates' in value and 'type' in value:
|
||||
return {'$geometry': value}
|
||||
raise InvalidQueryError('Invalid $geometry dictionary should have '
|
||||
'type and coordinates keys')
|
||||
elif "coordinates" in value and "type" in value:
|
||||
return {"$geometry": value}
|
||||
raise InvalidQueryError(
|
||||
"Invalid $geometry dictionary should have type and coordinates keys"
|
||||
)
|
||||
elif isinstance(value, (list, set)):
|
||||
# TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon?
|
||||
# TODO: should both TypeError and IndexError be alike interpreted?
|
||||
|
||||
try:
|
||||
value[0][0][0]
|
||||
return {'$geometry': {'type': 'Polygon', 'coordinates': value}}
|
||||
return {"$geometry": {"type": "Polygon", "coordinates": value}}
|
||||
except (TypeError, IndexError):
|
||||
pass
|
||||
|
||||
try:
|
||||
value[0][0]
|
||||
return {'$geometry': {'type': 'LineString', 'coordinates': value}}
|
||||
return {"$geometry": {"type": "LineString", "coordinates": value}}
|
||||
except (TypeError, IndexError):
|
||||
pass
|
||||
|
||||
try:
|
||||
value[0]
|
||||
return {'$geometry': {'type': 'Point', 'coordinates': value}}
|
||||
return {"$geometry": {"type": "Point", "coordinates": value}}
|
||||
except (TypeError, IndexError):
|
||||
pass
|
||||
|
||||
raise InvalidQueryError('Invalid $geometry data. Can be either a '
|
||||
'dictionary or (nested) lists of coordinate(s)')
|
||||
raise InvalidQueryError(
|
||||
"Invalid $geometry data. Can be either a "
|
||||
"dictionary or (nested) lists of coordinate(s)"
|
||||
)
|
||||
|
||||
|
||||
def _prepare_query_for_iterable(field, op, value):
|
||||
# We need a special check for BaseDocument, because - although it's iterable - using
|
||||
# it as such in the context of this method is most definitely a mistake.
|
||||
BaseDocument = _import_class('BaseDocument')
|
||||
BaseDocument = _import_class("BaseDocument")
|
||||
|
||||
if isinstance(value, BaseDocument):
|
||||
raise TypeError("When using the `in`, `nin`, `all`, or "
|
||||
"`near`-operators you can\'t use a "
|
||||
raise TypeError(
|
||||
"When using the `in`, `nin`, `all`, or "
|
||||
"`near`-operators you can't use a "
|
||||
"`Document`, you must wrap your object "
|
||||
"in a list (object -> [object]).")
|
||||
"in a list (object -> [object])."
|
||||
)
|
||||
|
||||
if not hasattr(value, '__iter__'):
|
||||
raise TypeError("The `in`, `nin`, `all`, or "
|
||||
if not hasattr(value, "__iter__"):
|
||||
raise TypeError(
|
||||
"The `in`, `nin`, `all`, or "
|
||||
"`near`-operators must be applied to an "
|
||||
"iterable (e.g. a list).")
|
||||
"iterable (e.g. a list)."
|
||||
)
|
||||
|
||||
return [field.prepare_query_value(op, v) for v in value]
|
||||
|
||||
@@ -1,23 +1,26 @@
|
||||
import copy
|
||||
import warnings
|
||||
|
||||
from mongoengine.errors import InvalidQueryError
|
||||
from mongoengine.queryset import transform
|
||||
|
||||
__all__ = ('Q',)
|
||||
__all__ = ("Q", "QNode")
|
||||
|
||||
|
||||
class QNodeVisitor(object):
|
||||
"""Base visitor class for visiting Q-object nodes in a query tree.
|
||||
"""
|
||||
def warn_empty_is_deprecated():
|
||||
msg = "'empty' property is deprecated in favour of using 'not bool(filter)'"
|
||||
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
||||
|
||||
|
||||
class QNodeVisitor:
|
||||
"""Base visitor class for visiting Q-object nodes in a query tree."""
|
||||
|
||||
def visit_combination(self, combination):
|
||||
"""Called by QCombination objects.
|
||||
"""
|
||||
"""Called by QCombination objects."""
|
||||
return combination
|
||||
|
||||
def visit_query(self, query):
|
||||
"""Called by (New)Q objects.
|
||||
"""
|
||||
"""Called by (New)Q objects."""
|
||||
return query
|
||||
|
||||
|
||||
@@ -43,8 +46,7 @@ class SimplificationVisitor(QNodeVisitor):
|
||||
return combination
|
||||
|
||||
def _query_conjunction(self, queries):
|
||||
"""Merges query dicts - effectively &ing them together.
|
||||
"""
|
||||
"""Merges query dicts - effectively &ing them together."""
|
||||
query_ops = set()
|
||||
combined_query = {}
|
||||
for query in queries:
|
||||
@@ -69,16 +71,16 @@ class QueryCompilerVisitor(QNodeVisitor):
|
||||
self.document = document
|
||||
|
||||
def visit_combination(self, combination):
|
||||
operator = '$and'
|
||||
operator = "$and"
|
||||
if combination.operation == combination.OR:
|
||||
operator = '$or'
|
||||
operator = "$or"
|
||||
return {operator: combination.children}
|
||||
|
||||
def visit_query(self, query):
|
||||
return transform.query(self.document, **query.query)
|
||||
|
||||
|
||||
class QNode(object):
|
||||
class QNode:
|
||||
"""Base class for nodes in query trees."""
|
||||
|
||||
AND = 0
|
||||
@@ -96,16 +98,19 @@ class QNode(object):
|
||||
"""Combine this node with another node into a QCombination
|
||||
object.
|
||||
"""
|
||||
if getattr(other, 'empty', True):
|
||||
# If the other Q() is empty, ignore it and just use `self`.
|
||||
if not bool(other):
|
||||
return self
|
||||
|
||||
if self.empty:
|
||||
# Or if this Q is empty, ignore it and just use `other`.
|
||||
if not bool(self):
|
||||
return other
|
||||
|
||||
return QCombination(operation, [self, other])
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
warn_empty_is_deprecated()
|
||||
return False
|
||||
|
||||
def __or__(self, other):
|
||||
@@ -131,6 +136,13 @@ class QCombination(QNode):
|
||||
else:
|
||||
self.children.append(node)
|
||||
|
||||
def __repr__(self):
|
||||
op = " & " if self.operation is self.AND else " | "
|
||||
return "(%s)" % op.join([repr(node) for node in self.children])
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.children)
|
||||
|
||||
def accept(self, visitor):
|
||||
for i in range(len(self.children)):
|
||||
if isinstance(self.children[i], QNode):
|
||||
@@ -140,8 +152,16 @@ class QCombination(QNode):
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
warn_empty_is_deprecated()
|
||||
return not bool(self.children)
|
||||
|
||||
def __eq__(self, other):
|
||||
return (
|
||||
self.__class__ == other.__class__
|
||||
and self.operation == other.operation
|
||||
and self.children == other.children
|
||||
)
|
||||
|
||||
|
||||
class Q(QNode):
|
||||
"""A simple query object, used in a query tree to build up more complex
|
||||
@@ -151,9 +171,19 @@ class Q(QNode):
|
||||
def __init__(self, **query):
|
||||
self.query = query
|
||||
|
||||
def __repr__(self):
|
||||
return "Q(**%s)" % repr(self.query)
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.query)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__class__ == other.__class__ and self.query == other.query
|
||||
|
||||
def accept(self, visitor):
|
||||
return visitor.visit_query(self)
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
warn_empty_is_deprecated()
|
||||
return not bool(self.query)
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
__all__ = ('pre_init', 'post_init', 'pre_save', 'pre_save_post_validation',
|
||||
'post_save', 'pre_delete', 'post_delete')
|
||||
__all__ = (
|
||||
"pre_init",
|
||||
"post_init",
|
||||
"pre_save",
|
||||
"pre_save_post_validation",
|
||||
"post_save",
|
||||
"pre_delete",
|
||||
"post_delete",
|
||||
)
|
||||
|
||||
signals_available = False
|
||||
try:
|
||||
@@ -7,11 +14,12 @@ try:
|
||||
|
||||
signals_available = True
|
||||
except ImportError:
|
||||
class Namespace(object):
|
||||
|
||||
class Namespace:
|
||||
def signal(self, name, doc=None):
|
||||
return _FakeSignal(name, doc)
|
||||
|
||||
class _FakeSignal(object):
|
||||
class _FakeSignal:
|
||||
"""If blinker is unavailable, create a fake class with the same
|
||||
interface that allows sending of signals but will fail with an
|
||||
error on anything else. Instead of doing anything on send, it
|
||||
@@ -23,13 +31,16 @@ except ImportError:
|
||||
self.__doc__ = doc
|
||||
|
||||
def _fail(self, *args, **kwargs):
|
||||
raise RuntimeError('signalling support is unavailable '
|
||||
'because the blinker library is '
|
||||
'not installed.')
|
||||
raise RuntimeError(
|
||||
"signalling support is unavailable "
|
||||
"because the blinker library is "
|
||||
"not installed."
|
||||
)
|
||||
|
||||
send = lambda *a, **kw: None # noqa
|
||||
connect = disconnect = has_receivers_for = receivers_for = \
|
||||
temporarily_connected_to = _fail
|
||||
connect = (
|
||||
disconnect
|
||||
) = has_receivers_for = receivers_for = temporarily_connected_to = _fail
|
||||
del _fail
|
||||
|
||||
|
||||
@@ -37,12 +48,12 @@ except ImportError:
|
||||
# not put signals in here. Create your own namespace instead.
|
||||
_signals = Namespace()
|
||||
|
||||
pre_init = _signals.signal('pre_init')
|
||||
post_init = _signals.signal('post_init')
|
||||
pre_save = _signals.signal('pre_save')
|
||||
pre_save_post_validation = _signals.signal('pre_save_post_validation')
|
||||
post_save = _signals.signal('post_save')
|
||||
pre_delete = _signals.signal('pre_delete')
|
||||
post_delete = _signals.signal('post_delete')
|
||||
pre_bulk_insert = _signals.signal('pre_bulk_insert')
|
||||
post_bulk_insert = _signals.signal('post_bulk_insert')
|
||||
pre_init = _signals.signal("pre_init")
|
||||
post_init = _signals.signal("post_init")
|
||||
pre_save = _signals.signal("pre_save")
|
||||
pre_save_post_validation = _signals.signal("pre_save_post_validation")
|
||||
post_save = _signals.signal("post_save")
|
||||
pre_delete = _signals.signal("pre_delete")
|
||||
post_delete = _signals.signal("post_delete")
|
||||
pre_bulk_insert = _signals.signal("pre_bulk_insert")
|
||||
post_bulk_insert = _signals.signal("post_bulk_insert")
|
||||
|
||||
7
requirements-dev.txt
Normal file
7
requirements-dev.txt
Normal file
@@ -0,0 +1,7 @@
|
||||
black
|
||||
flake8
|
||||
pre-commit
|
||||
pytest
|
||||
ipdb
|
||||
ipython
|
||||
tox
|
||||
@@ -1,7 +0,0 @@
|
||||
nose
|
||||
pymongo>=2.7.1
|
||||
six==1.10.0
|
||||
flake8
|
||||
flake8-import-order
|
||||
Sphinx==1.5.5
|
||||
sphinx-rtd-theme==0.2.4
|
||||
23
setup.cfg
23
setup.cfg
@@ -1,11 +1,18 @@
|
||||
[nosetests]
|
||||
verbosity=2
|
||||
detailed-errors=1
|
||||
#tests=tests
|
||||
cover-package=mongoengine
|
||||
|
||||
[flake8]
|
||||
ignore=E501,F401,F403,F405,I201,I202
|
||||
ignore=E501,F403,F405,I201,I202,W504,W605,W503,B007
|
||||
exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests
|
||||
max-complexity=47
|
||||
application-import-names=mongoengine,tests
|
||||
|
||||
[tool:pytest]
|
||||
# Limits the discovery to tests directory
|
||||
# avoids that it runs for instance the benchmark
|
||||
testpaths = tests
|
||||
|
||||
[isort]
|
||||
known_first_party = mongoengine,tests
|
||||
default_section = THIRDPARTY
|
||||
multi_line_output = 3
|
||||
include_trailing_comma = True
|
||||
combine_as_imports = True
|
||||
line_length = 70
|
||||
ensure_newline_before_comments = 1
|
||||
|
||||
141
setup.py
141
setup.py
@@ -1,20 +1,20 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pkg_resources import normalize_path
|
||||
from setuptools import find_packages, setup
|
||||
from setuptools.command.test import test as TestCommand
|
||||
|
||||
# Hack to silence atexit traceback in newer python versions
|
||||
try:
|
||||
import multiprocessing
|
||||
import multiprocessing # noqa: F401
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
DESCRIPTION = (
|
||||
'MongoEngine is a Python Object-Document '
|
||||
'Mapper for working with MongoDB.'
|
||||
)
|
||||
DESCRIPTION = "MongoEngine is a Python Object-Document Mapper for working with MongoDB."
|
||||
|
||||
try:
|
||||
with open('README.rst') as fin:
|
||||
with open("README.rst") as fin:
|
||||
LONG_DESCRIPTION = fin.read()
|
||||
except Exception:
|
||||
LONG_DESCRIPTION = None
|
||||
@@ -24,64 +24,123 @@ def get_version(version_tuple):
|
||||
"""Return the version tuple as a string, e.g. for (0, 10, 7),
|
||||
return '0.10.7'.
|
||||
"""
|
||||
return '.'.join(map(str, version_tuple))
|
||||
return ".".join(map(str, version_tuple))
|
||||
|
||||
|
||||
class PyTest(TestCommand):
|
||||
"""Will force pytest to search for tests inside the build directory
|
||||
for 2to3 converted code (used by tox), instead of the current directory.
|
||||
Required as long as we need 2to3
|
||||
|
||||
Known Limitation: https://tox.readthedocs.io/en/latest/example/pytest.html#known-issues-and-limitations
|
||||
Source: https://www.hackzine.org/python-testing-with-pytest-and-2to3-plus-tox-and-travis-ci.html
|
||||
"""
|
||||
|
||||
# https://pytest.readthedocs.io/en/2.7.3/goodpractises.html#integration-with-setuptools-test-commands
|
||||
# Allows to provide pytest command argument through the test runner command `python setup.py test`
|
||||
# e.g: `python setup.py test -a "-k=test"`
|
||||
# This only works for 1 argument though
|
||||
user_options = [("pytest-args=", "a", "Arguments to pass to py.test")]
|
||||
|
||||
def initialize_options(self):
|
||||
TestCommand.initialize_options(self)
|
||||
self.pytest_args = ""
|
||||
|
||||
def finalize_options(self):
|
||||
TestCommand.finalize_options(self)
|
||||
self.test_args = ["tests"]
|
||||
self.test_suite = True
|
||||
|
||||
def run_tests(self):
|
||||
# import here, cause outside the eggs aren't loaded
|
||||
import pytest
|
||||
from pkg_resources import _namespace_packages
|
||||
|
||||
# Purge modules under test from sys.modules. The test loader will
|
||||
# re-import them from the build location. Required when 2to3 is used
|
||||
# with namespace packages.
|
||||
if sys.version_info >= (3,) and getattr(self.distribution, "use_2to3", False):
|
||||
module = self.test_args[-1].split(".")[0]
|
||||
if module in _namespace_packages:
|
||||
del_modules = []
|
||||
if module in sys.modules:
|
||||
del_modules.append(module)
|
||||
module += "."
|
||||
for name in sys.modules:
|
||||
if name.startswith(module):
|
||||
del_modules.append(name)
|
||||
map(sys.modules.__delitem__, del_modules)
|
||||
|
||||
# Run on the build directory for 2to3-built code
|
||||
# This will prevent the old 2.x code from being found
|
||||
# by py.test discovery mechanism, that apparently
|
||||
# ignores sys.path..
|
||||
ei_cmd = self.get_finalized_command("egg_info")
|
||||
self.test_args = [normalize_path(ei_cmd.egg_base)]
|
||||
|
||||
cmd_args = self.test_args + ([self.pytest_args] if self.pytest_args else [])
|
||||
errno = pytest.main(cmd_args)
|
||||
sys.exit(errno)
|
||||
|
||||
|
||||
# Dirty hack to get version number from monogengine/__init__.py - we can't
|
||||
# import it as it depends on PyMongo and PyMongo isn't installed until this
|
||||
# file is read
|
||||
init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
|
||||
version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0]
|
||||
init = os.path.join(os.path.dirname(__file__), "mongoengine", "__init__.py")
|
||||
version_line = list(filter(lambda l: l.startswith("VERSION"), open(init)))[0]
|
||||
|
||||
VERSION = get_version(eval(version_line.split('=')[-1]))
|
||||
VERSION = get_version(eval(version_line.split("=")[-1]))
|
||||
|
||||
CLASSIFIERS = [
|
||||
'Development Status :: 4 - Beta',
|
||||
'Intended Audience :: Developers',
|
||||
'License :: OSI Approved :: MIT License',
|
||||
'Operating System :: OS Independent',
|
||||
'Programming Language :: Python',
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.3",
|
||||
"Programming Language :: Python :: 3.4",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
'Topic :: Database',
|
||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||
"Topic :: Database",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
]
|
||||
|
||||
extra_opts = {
|
||||
'packages': find_packages(exclude=['tests', 'tests.*']),
|
||||
'tests_require': ['nose', 'coverage==4.2', 'blinker', 'Pillow>=2.0.0']
|
||||
"packages": find_packages(exclude=["tests", "tests.*"]),
|
||||
"tests_require": [
|
||||
"pytest<5.0",
|
||||
"pytest-cov",
|
||||
"coverage",
|
||||
"blinker",
|
||||
"Pillow>=7.0.0",
|
||||
],
|
||||
}
|
||||
|
||||
if "test" in sys.argv:
|
||||
extra_opts["packages"] = find_packages()
|
||||
extra_opts["package_data"] = {
|
||||
"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]
|
||||
}
|
||||
if sys.version_info[0] == 3:
|
||||
extra_opts['use_2to3'] = True
|
||||
if 'test' in sys.argv or 'nosetests' in sys.argv:
|
||||
extra_opts['packages'] = find_packages()
|
||||
extra_opts['package_data'] = {
|
||||
'tests': ['fields/mongoengine.png', 'fields/mongodb_leaf.png']}
|
||||
else:
|
||||
extra_opts['tests_require'] += ['python-dateutil']
|
||||
|
||||
setup(
|
||||
name='mongoengine',
|
||||
name="mongoengine",
|
||||
version=VERSION,
|
||||
author='Harry Marr',
|
||||
author_email='harry.marr@gmail.com',
|
||||
author="Harry Marr",
|
||||
author_email="harry.marr@gmail.com",
|
||||
maintainer="Stefan Wojcik",
|
||||
maintainer_email="wojcikstefan@gmail.com",
|
||||
url='http://mongoengine.org/',
|
||||
download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
|
||||
license='MIT',
|
||||
url="http://mongoengine.org/",
|
||||
download_url="https://github.com/MongoEngine/mongoengine/tarball/master",
|
||||
license="MIT",
|
||||
include_package_data=True,
|
||||
description=DESCRIPTION,
|
||||
long_description=LONG_DESCRIPTION,
|
||||
platforms=['any'],
|
||||
platforms=["any"],
|
||||
classifiers=CLASSIFIERS,
|
||||
install_requires=['pymongo>=2.7.1', 'six'],
|
||||
test_suite='nose.collector',
|
||||
python_requires=">=3.6",
|
||||
install_requires=["pymongo>=3.4, <4.0"],
|
||||
cmdclass={"test": PyTest},
|
||||
**extra_opts
|
||||
)
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
from all_warnings import AllWarnings
|
||||
from document import *
|
||||
from queryset import *
|
||||
from fields import *
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
"""
|
||||
This test has been put into a module. This is because it tests warnings that
|
||||
only get triggered on first hit. This way we can ensure its imported into the
|
||||
top level and called first by the test suite.
|
||||
"""
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
|
||||
__all__ = ('AllWarnings', )
|
||||
|
||||
|
||||
class AllWarnings(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.warning_list = []
|
||||
self.showwarning_default = warnings.showwarning
|
||||
warnings.showwarning = self.append_to_warning_list
|
||||
|
||||
def append_to_warning_list(self, message, category, *args):
|
||||
self.warning_list.append({"message": message,
|
||||
"category": category})
|
||||
|
||||
def tearDown(self):
|
||||
# restore default handling of warnings
|
||||
warnings.showwarning = self.showwarning_default
|
||||
|
||||
def test_document_collection_syntax_warning(self):
|
||||
|
||||
class NonAbstractBase(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class InheritedDocumentFailTest(NonAbstractBase):
|
||||
meta = {'collection': 'fail'}
|
||||
|
||||
warning = self.warning_list[0]
|
||||
self.assertEqual(SyntaxWarning, warning["category"])
|
||||
self.assertEqual('non_abstract_base',
|
||||
InheritedDocumentFailTest._get_collection_name())
|
||||
|
||||
35
tests/all_warnings/test_warnings.py
Normal file
35
tests/all_warnings/test_warnings.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""
|
||||
This test has been put into a module. This is because it tests warnings that
|
||||
only get triggered on first hit. This way we can ensure its imported into the
|
||||
top level and called first by the test suite.
|
||||
"""
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
|
||||
class TestAllWarnings(unittest.TestCase):
|
||||
def setUp(self):
|
||||
connect(db="mongoenginetest")
|
||||
self.warning_list = []
|
||||
self.showwarning_default = warnings.showwarning
|
||||
warnings.showwarning = self.append_to_warning_list
|
||||
|
||||
def append_to_warning_list(self, message, category, *args):
|
||||
self.warning_list.append({"message": message, "category": category})
|
||||
|
||||
def tearDown(self):
|
||||
# restore default handling of warnings
|
||||
warnings.showwarning = self.showwarning_default
|
||||
|
||||
def test_document_collection_syntax_warning(self):
|
||||
class NonAbstractBase(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class InheritedDocumentFailTest(NonAbstractBase):
|
||||
meta = {"collection": "fail"}
|
||||
|
||||
warning = self.warning_list[0]
|
||||
assert SyntaxWarning == warning["category"]
|
||||
assert "non_abstract_base" == InheritedDocumentFailTest._get_collection_name()
|
||||
@@ -1,13 +0,0 @@
|
||||
import unittest
|
||||
|
||||
from class_methods import *
|
||||
from delta import *
|
||||
from dynamic import *
|
||||
from indexes import *
|
||||
from inheritance import *
|
||||
from instance import *
|
||||
from json_serialisation import *
|
||||
from validation import *
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -1,371 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from mongoengine.queryset import NULLIFY, PULL
|
||||
from mongoengine.connection import get_db
|
||||
from tests.utils import needs_mongodb_v26
|
||||
|
||||
__all__ = ("ClassMethodsTest", )
|
||||
|
||||
|
||||
class ClassMethodsTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
non_field = True
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
self.Person = Person
|
||||
|
||||
def tearDown(self):
|
||||
for collection in self.db.collection_names():
|
||||
if 'system.' in collection:
|
||||
continue
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_definition(self):
|
||||
"""Ensure that document may be defined using fields.
|
||||
"""
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name'],
|
||||
sorted(self.Person._fields.keys()))
|
||||
self.assertEqual(["IntField", "ObjectIdField", "StringField", "StringField"],
|
||||
sorted([x.__class__.__name__ for x in
|
||||
self.Person._fields.values()]))
|
||||
|
||||
def test_get_db(self):
|
||||
"""Ensure that get_db returns the expected db.
|
||||
"""
|
||||
db = self.Person._get_db()
|
||||
self.assertEqual(self.db, db)
|
||||
|
||||
def test_get_collection_name(self):
|
||||
"""Ensure that get_collection_name returns the expected collection
|
||||
name.
|
||||
"""
|
||||
collection_name = 'person'
|
||||
self.assertEqual(collection_name, self.Person._get_collection_name())
|
||||
|
||||
def test_get_collection(self):
|
||||
"""Ensure that get_collection returns the expected collection.
|
||||
"""
|
||||
collection_name = 'person'
|
||||
collection = self.Person._get_collection()
|
||||
self.assertEqual(self.db[collection_name], collection)
|
||||
|
||||
def test_drop_collection(self):
|
||||
"""Ensure that the collection may be dropped from the database.
|
||||
"""
|
||||
collection_name = 'person'
|
||||
self.Person(name='Test').save()
|
||||
self.assertTrue(collection_name in self.db.collection_names())
|
||||
|
||||
self.Person.drop_collection()
|
||||
self.assertFalse(collection_name in self.db.collection_names())
|
||||
|
||||
def test_register_delete_rule(self):
|
||||
"""Ensure that register delete rule adds a delete rule to the document
|
||||
meta.
|
||||
"""
|
||||
class Job(Document):
|
||||
employee = ReferenceField(self.Person)
|
||||
|
||||
self.assertEqual(self.Person._meta.get('delete_rules'), None)
|
||||
|
||||
self.Person.register_delete_rule(Job, 'employee', NULLIFY)
|
||||
self.assertEqual(self.Person._meta['delete_rules'],
|
||||
{(Job, 'employee'): NULLIFY})
|
||||
|
||||
def test_compare_indexes(self):
|
||||
""" Ensure that the indexes are properly created and that
|
||||
compare_indexes identifies the missing/extra indexes
|
||||
"""
|
||||
|
||||
class BlogPost(Document):
|
||||
author = StringField()
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
tags = StringField()
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'title')]
|
||||
}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
|
||||
BlogPost.ensure_index(['author', 'description'])
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('author', 1), ('description', 1)]] })
|
||||
|
||||
BlogPost._get_collection().drop_index('author_1_description_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
|
||||
BlogPost._get_collection().drop_index('author_1_title_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('author', 1), ('title', 1)]], 'extra': [] })
|
||||
|
||||
def test_compare_indexes_inheritance(self):
|
||||
""" Ensure that the indexes are properly created and that
|
||||
compare_indexes identifies the missing/extra indexes for subclassed
|
||||
documents (_cls included)
|
||||
"""
|
||||
|
||||
class BlogPost(Document):
|
||||
author = StringField()
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True
|
||||
}
|
||||
|
||||
class BlogPostWithTags(BlogPost):
|
||||
tags = StringField()
|
||||
tag_list = ListField(StringField())
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'tags')]
|
||||
}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
|
||||
BlogPostWithTags.ensure_index(['author', 'tag_list'])
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]] })
|
||||
|
||||
BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
|
||||
BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': [] })
|
||||
|
||||
def test_compare_indexes_multiple_subclasses(self):
|
||||
""" Ensure that compare_indexes behaves correctly if called from a
|
||||
class, which base class has multiple subclasses
|
||||
"""
|
||||
|
||||
class BlogPost(Document):
|
||||
author = StringField()
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True
|
||||
}
|
||||
|
||||
class BlogPostWithTags(BlogPost):
|
||||
tags = StringField()
|
||||
tag_list = ListField(StringField())
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'tags')]
|
||||
}
|
||||
|
||||
class BlogPostWithCustomField(BlogPost):
|
||||
custom = DictField()
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'custom')]
|
||||
}
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
BlogPostWithCustomField.ensure_indexes()
|
||||
|
||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
self.assertEqual(BlogPostWithTags.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
self.assertEqual(BlogPostWithCustomField.compare_indexes(), { 'missing': [], 'extra': [] })
|
||||
|
||||
@needs_mongodb_v26
|
||||
def test_compare_indexes_for_text_indexes(self):
|
||||
""" Ensure that compare_indexes behaves correctly for text indexes """
|
||||
|
||||
class Doc(Document):
|
||||
a = StringField()
|
||||
b = StringField()
|
||||
meta = {'indexes': [
|
||||
{'fields': ['$a', "$b"],
|
||||
'default_language': 'english',
|
||||
'weights': {'a': 10, 'b': 2}
|
||||
}
|
||||
]}
|
||||
|
||||
Doc.drop_collection()
|
||||
Doc.ensure_indexes()
|
||||
actual = Doc.compare_indexes()
|
||||
expected = {'missing': [], 'extra': []}
|
||||
self.assertEqual(actual, expected)
|
||||
|
||||
def test_list_indexes_inheritance(self):
|
||||
""" ensure that all of the indexes are listed regardless of the super-
|
||||
or sub-class that we call it from
|
||||
"""
|
||||
|
||||
class BlogPost(Document):
|
||||
author = StringField()
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True
|
||||
}
|
||||
|
||||
class BlogPostWithTags(BlogPost):
|
||||
tags = StringField()
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'tags')]
|
||||
}
|
||||
|
||||
class BlogPostWithTagsAndExtraText(BlogPostWithTags):
|
||||
extra_text = StringField()
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'tags', 'extra_text')]
|
||||
}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
BlogPostWithTagsAndExtraText.ensure_indexes()
|
||||
|
||||
self.assertEqual(BlogPost.list_indexes(),
|
||||
BlogPostWithTags.list_indexes())
|
||||
self.assertEqual(BlogPost.list_indexes(),
|
||||
BlogPostWithTagsAndExtraText.list_indexes())
|
||||
self.assertEqual(BlogPost.list_indexes(),
|
||||
[[('_cls', 1), ('author', 1), ('tags', 1)],
|
||||
[('_cls', 1), ('author', 1), ('tags', 1), ('extra_text', 1)],
|
||||
[(u'_id', 1)], [('_cls', 1)]])
|
||||
|
||||
def test_register_delete_rule_inherited(self):
|
||||
|
||||
class Vaccine(Document):
|
||||
name = StringField(required=True)
|
||||
|
||||
meta = {"indexes": ["name"]}
|
||||
|
||||
class Animal(Document):
|
||||
family = StringField(required=True)
|
||||
vaccine_made = ListField(ReferenceField("Vaccine", reverse_delete_rule=PULL))
|
||||
|
||||
meta = {"allow_inheritance": True, "indexes": ["family"]}
|
||||
|
||||
class Cat(Animal):
|
||||
name = StringField(required=True)
|
||||
|
||||
self.assertEqual(Vaccine._meta['delete_rules'][(Animal, 'vaccine_made')], PULL)
|
||||
self.assertEqual(Vaccine._meta['delete_rules'][(Cat, 'vaccine_made')], PULL)
|
||||
|
||||
def test_collection_naming(self):
|
||||
"""Ensure that a collection with a specified name may be used.
|
||||
"""
|
||||
|
||||
class DefaultNamingTest(Document):
|
||||
pass
|
||||
self.assertEqual('default_naming_test',
|
||||
DefaultNamingTest._get_collection_name())
|
||||
|
||||
class CustomNamingTest(Document):
|
||||
meta = {'collection': 'pimp_my_collection'}
|
||||
|
||||
self.assertEqual('pimp_my_collection',
|
||||
CustomNamingTest._get_collection_name())
|
||||
|
||||
class DynamicNamingTest(Document):
|
||||
meta = {'collection': lambda c: "DYNAMO"}
|
||||
self.assertEqual('DYNAMO', DynamicNamingTest._get_collection_name())
|
||||
|
||||
# Use Abstract class to handle backwards compatibility
|
||||
class BaseDocument(Document):
|
||||
meta = {
|
||||
'abstract': True,
|
||||
'collection': lambda c: c.__name__.lower()
|
||||
}
|
||||
|
||||
class OldNamingConvention(BaseDocument):
|
||||
pass
|
||||
self.assertEqual('oldnamingconvention',
|
||||
OldNamingConvention._get_collection_name())
|
||||
|
||||
class InheritedAbstractNamingTest(BaseDocument):
|
||||
meta = {'collection': 'wibble'}
|
||||
self.assertEqual('wibble',
|
||||
InheritedAbstractNamingTest._get_collection_name())
|
||||
|
||||
# Mixin tests
|
||||
class BaseMixin(object):
|
||||
meta = {
|
||||
'collection': lambda c: c.__name__.lower()
|
||||
}
|
||||
|
||||
class OldMixinNamingConvention(Document, BaseMixin):
|
||||
pass
|
||||
self.assertEqual('oldmixinnamingconvention',
|
||||
OldMixinNamingConvention._get_collection_name())
|
||||
|
||||
class BaseMixin(object):
|
||||
meta = {
|
||||
'collection': lambda c: c.__name__.lower()
|
||||
}
|
||||
|
||||
class BaseDocument(Document, BaseMixin):
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class MyDocument(BaseDocument):
|
||||
pass
|
||||
|
||||
self.assertEqual('basedocument', MyDocument._get_collection_name())
|
||||
|
||||
def test_custom_collection_name_operations(self):
|
||||
"""Ensure that a collection with a specified name is used as expected.
|
||||
"""
|
||||
collection_name = 'personCollTest'
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
meta = {'collection': collection_name}
|
||||
|
||||
Person(name="Test User").save()
|
||||
self.assertTrue(collection_name in self.db.collection_names())
|
||||
|
||||
user_obj = self.db[collection_name].find_one()
|
||||
self.assertEqual(user_obj['name'], "Test User")
|
||||
|
||||
user_obj = Person.objects[0]
|
||||
self.assertEqual(user_obj.name, "Test User")
|
||||
|
||||
Person.drop_collection()
|
||||
self.assertFalse(collection_name in self.db.collection_names())
|
||||
|
||||
def test_collection_name_and_primary(self):
|
||||
"""Ensure that a collection with a specified name may be used.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField(primary_key=True)
|
||||
meta = {'collection': 'app'}
|
||||
|
||||
Person(name="Test User").save()
|
||||
|
||||
user_obj = Person.objects.first()
|
||||
self.assertEqual(user_obj.name, "Test User")
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -1,867 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
|
||||
from bson import SON
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
__all__ = ("DeltaTest",)
|
||||
|
||||
|
||||
class DeltaTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
non_field = True
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
self.Person = Person
|
||||
|
||||
def tearDown(self):
|
||||
for collection in self.db.collection_names():
|
||||
if 'system.' in collection:
|
||||
continue
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_delta(self):
|
||||
self.delta(Document)
|
||||
self.delta(DynamicDocument)
|
||||
|
||||
def delta(self, DocClass):
|
||||
|
||||
class Doc(DocClass):
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
list_field = ListField()
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
|
||||
doc.string_field = 'hello'
|
||||
self.assertEqual(doc._get_changed_fields(), ['string_field'])
|
||||
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
self.assertEqual(doc._get_changed_fields(), ['int_field'])
|
||||
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||
doc.dict_field = dict_value
|
||||
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ['1', 2, {'hello': 'world'}]
|
||||
doc.list_field = list_value
|
||||
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
|
||||
|
||||
def test_delta_recursive(self):
|
||||
self.delta_recursive(Document, EmbeddedDocument)
|
||||
self.delta_recursive(DynamicDocument, EmbeddedDocument)
|
||||
self.delta_recursive(Document, DynamicEmbeddedDocument)
|
||||
self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument)
|
||||
|
||||
def delta_recursive(self, DocClass, EmbeddedClass):
|
||||
|
||||
class Embedded(EmbeddedClass):
|
||||
id = StringField()
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
list_field = ListField()
|
||||
|
||||
class Doc(DocClass):
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
list_field = ListField()
|
||||
embedded_field = EmbeddedDocumentField(Embedded)
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.id = "010101"
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(doc._get_changed_fields(), ['embedded_field'])
|
||||
|
||||
embedded_delta = {
|
||||
'id': "010101",
|
||||
'string_field': 'hello',
|
||||
'int_field': 1,
|
||||
'dict_field': {'hello': 'world'},
|
||||
'list_field': ['1', 2, {'hello': 'world'}]
|
||||
}
|
||||
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||
self.assertEqual(doc._delta(),
|
||||
({'embedded_field': embedded_delta}, {}))
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['embedded_field.dict_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
|
||||
self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {})
|
||||
|
||||
doc.embedded_field.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['embedded_field.list_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1}))
|
||||
self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field, [])
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = 'hello'
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {'hello': 'world'}
|
||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||
|
||||
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['embedded_field.list_field'])
|
||||
|
||||
self.assertEqual(doc.embedded_field._delta(), ({
|
||||
'list_field': ['1', 2, {
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello',
|
||||
'dict_field': {'hello': 'world'},
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
}]
|
||||
}, {}))
|
||||
|
||||
self.assertEqual(doc._delta(), ({
|
||||
'embedded_field.list_field': ['1', 2, {
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello',
|
||||
'dict_field': {'hello': 'world'},
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
}]
|
||||
}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||
for k in doc.embedded_field.list_field[2]._fields:
|
||||
self.assertEqual(doc.embedded_field.list_field[2][k],
|
||||
embedded_2[k])
|
||||
|
||||
doc.embedded_field.list_field[2].string_field = 'world'
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['embedded_field.list_field.2.string_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(),
|
||||
({'list_field.2.string_field': 'world'}, {}))
|
||||
self.assertEqual(doc._delta(),
|
||||
({'embedded_field.list_field.2.string_field': 'world'}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||
'world')
|
||||
|
||||
# Test multiple assignments
|
||||
doc.embedded_field.list_field[2].string_field = 'hello world'
|
||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['embedded_field.list_field.2'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({'list_field.2': {
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello world',
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
'dict_field': {'hello': 'world'}}
|
||||
}, {}))
|
||||
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2': {
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello world',
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
'dict_field': {'hello': 'world'}}
|
||||
}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||
'hello world')
|
||||
|
||||
# Test list native methods
|
||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||
self.assertEqual(doc._delta(),
|
||||
({'embedded_field.list_field.2.list_field':
|
||||
[2, {'hello': 'world'}]}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.append(1)
|
||||
self.assertEqual(doc._delta(),
|
||||
({'embedded_field.list_field.2.list_field':
|
||||
[2, {'hello': 'world'}, 1]}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||
[2, {'hello': 'world'}, 1])
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||
[1, 2, {'hello': 'world'}])
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field[2]['hello']
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'embedded_field.list_field.2.list_field.2.hello': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'embedded_field.list_field.2.list_field': 1}))
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.dict_field['Embedded'] = embedded_1
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.dict_field['Embedded'].string_field = 'Hello World'
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['dict_field.Embedded.string_field'])
|
||||
self.assertEqual(doc._delta(),
|
||||
({'dict_field.Embedded.string_field': 'Hello World'}, {}))
|
||||
|
||||
def test_circular_reference_deltas(self):
|
||||
self.circular_reference_deltas(Document, Document)
|
||||
self.circular_reference_deltas(Document, DynamicDocument)
|
||||
self.circular_reference_deltas(DynamicDocument, Document)
|
||||
self.circular_reference_deltas(DynamicDocument, DynamicDocument)
|
||||
|
||||
def circular_reference_deltas(self, DocClass1, DocClass2):
|
||||
|
||||
class Person(DocClass1):
|
||||
name = StringField()
|
||||
owns = ListField(ReferenceField('Organization'))
|
||||
|
||||
class Organization(DocClass2):
|
||||
name = StringField()
|
||||
owner = ReferenceField('Person')
|
||||
|
||||
Person.drop_collection()
|
||||
Organization.drop_collection()
|
||||
|
||||
person = Person(name="owner").save()
|
||||
organization = Organization(name="company").save()
|
||||
|
||||
person.owns.append(organization)
|
||||
organization.owner = person
|
||||
|
||||
person.save()
|
||||
organization.save()
|
||||
|
||||
p = Person.objects[0].select_related()
|
||||
o = Organization.objects.first()
|
||||
self.assertEqual(p.owns[0], o)
|
||||
self.assertEqual(o.owner, p)
|
||||
|
||||
def test_circular_reference_deltas_2(self):
|
||||
self.circular_reference_deltas_2(Document, Document)
|
||||
self.circular_reference_deltas_2(Document, DynamicDocument)
|
||||
self.circular_reference_deltas_2(DynamicDocument, Document)
|
||||
self.circular_reference_deltas_2(DynamicDocument, DynamicDocument)
|
||||
|
||||
def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True):
|
||||
|
||||
class Person(DocClass1):
|
||||
name = StringField()
|
||||
owns = ListField(ReferenceField('Organization', dbref=dbref))
|
||||
employer = ReferenceField('Organization', dbref=dbref)
|
||||
|
||||
class Organization(DocClass2):
|
||||
name = StringField()
|
||||
owner = ReferenceField('Person', dbref=dbref)
|
||||
employees = ListField(ReferenceField('Person', dbref=dbref))
|
||||
|
||||
Person.drop_collection()
|
||||
Organization.drop_collection()
|
||||
|
||||
person = Person(name="owner").save()
|
||||
employee = Person(name="employee").save()
|
||||
organization = Organization(name="company").save()
|
||||
|
||||
person.owns.append(organization)
|
||||
organization.owner = person
|
||||
|
||||
organization.employees.append(employee)
|
||||
employee.employer = organization
|
||||
|
||||
person.save()
|
||||
organization.save()
|
||||
employee.save()
|
||||
|
||||
p = Person.objects.get(name="owner")
|
||||
e = Person.objects.get(name="employee")
|
||||
o = Organization.objects.first()
|
||||
|
||||
self.assertEqual(p.owns[0], o)
|
||||
self.assertEqual(o.owner, p)
|
||||
self.assertEqual(e.employer, o)
|
||||
|
||||
return person, organization, employee
|
||||
|
||||
def test_delta_db_field(self):
|
||||
self.delta_db_field(Document)
|
||||
self.delta_db_field(DynamicDocument)
|
||||
|
||||
def delta_db_field(self, DocClass):
|
||||
|
||||
class Doc(DocClass):
|
||||
string_field = StringField(db_field='db_string_field')
|
||||
int_field = IntField(db_field='db_int_field')
|
||||
dict_field = DictField(db_field='db_dict_field')
|
||||
list_field = ListField(db_field='db_list_field')
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
|
||||
doc.string_field = 'hello'
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_string_field'])
|
||||
self.assertEqual(doc._delta(), ({'db_string_field': 'hello'}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_int_field'])
|
||||
self.assertEqual(doc._delta(), ({'db_int_field': 1}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||
doc.dict_field = dict_value
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_dict_field'])
|
||||
self.assertEqual(doc._delta(), ({'db_dict_field': dict_value}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ['1', 2, {'hello': 'world'}]
|
||||
doc.list_field = list_value
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_list_field'])
|
||||
self.assertEqual(doc._delta(), ({'db_list_field': list_value}, {}))
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_dict_field'])
|
||||
self.assertEqual(doc._delta(), ({}, {'db_dict_field': 1}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_list_field'])
|
||||
self.assertEqual(doc._delta(), ({}, {'db_list_field': 1}))
|
||||
|
||||
# Test it saves that data
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc.string_field = 'hello'
|
||||
doc.int_field = 1
|
||||
doc.dict_field = {'hello': 'world'}
|
||||
doc.list_field = ['1', 2, {'hello': 'world'}]
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
self.assertEqual(doc.string_field, 'hello')
|
||||
self.assertEqual(doc.int_field, 1)
|
||||
self.assertEqual(doc.dict_field, {'hello': 'world'})
|
||||
self.assertEqual(doc.list_field, ['1', 2, {'hello': 'world'}])
|
||||
|
||||
def test_delta_recursive_db_field(self):
|
||||
self.delta_recursive_db_field(Document, EmbeddedDocument)
|
||||
self.delta_recursive_db_field(Document, DynamicEmbeddedDocument)
|
||||
self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument)
|
||||
self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument)
|
||||
|
||||
def delta_recursive_db_field(self, DocClass, EmbeddedClass):
|
||||
|
||||
class Embedded(EmbeddedClass):
|
||||
string_field = StringField(db_field='db_string_field')
|
||||
int_field = IntField(db_field='db_int_field')
|
||||
dict_field = DictField(db_field='db_dict_field')
|
||||
list_field = ListField(db_field='db_list_field')
|
||||
|
||||
class Doc(DocClass):
|
||||
string_field = StringField(db_field='db_string_field')
|
||||
int_field = IntField(db_field='db_int_field')
|
||||
dict_field = DictField(db_field='db_dict_field')
|
||||
list_field = ListField(db_field='db_list_field')
|
||||
embedded_field = EmbeddedDocumentField(Embedded,
|
||||
db_field='db_embedded_field')
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_embedded_field'])
|
||||
|
||||
embedded_delta = {
|
||||
'db_string_field': 'hello',
|
||||
'db_int_field': 1,
|
||||
'db_dict_field': {'hello': 'world'},
|
||||
'db_list_field': ['1', 2, {'hello': 'world'}]
|
||||
}
|
||||
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||
self.assertEqual(doc._delta(),
|
||||
({'db_embedded_field': embedded_delta}, {}))
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['db_embedded_field.db_dict_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(),
|
||||
({}, {'db_dict_field': 1}))
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'db_embedded_field.db_dict_field': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {})
|
||||
|
||||
doc.embedded_field.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['db_embedded_field.db_list_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(),
|
||||
({}, {'db_list_field': 1}))
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'db_embedded_field.db_list_field': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field, [])
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = 'hello'
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {'hello': 'world'}
|
||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||
|
||||
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['db_embedded_field.db_list_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({
|
||||
'db_list_field': ['1', 2, {
|
||||
'_cls': 'Embedded',
|
||||
'db_string_field': 'hello',
|
||||
'db_dict_field': {'hello': 'world'},
|
||||
'db_int_field': 1,
|
||||
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||
}]
|
||||
}, {}))
|
||||
|
||||
self.assertEqual(doc._delta(), ({
|
||||
'db_embedded_field.db_list_field': ['1', 2, {
|
||||
'_cls': 'Embedded',
|
||||
'db_string_field': 'hello',
|
||||
'db_dict_field': {'hello': 'world'},
|
||||
'db_int_field': 1,
|
||||
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||
}]
|
||||
}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||
for k in doc.embedded_field.list_field[2]._fields:
|
||||
self.assertEqual(doc.embedded_field.list_field[2][k],
|
||||
embedded_2[k])
|
||||
|
||||
doc.embedded_field.list_field[2].string_field = 'world'
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['db_embedded_field.db_list_field.2.db_string_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(),
|
||||
({'db_list_field.2.db_string_field': 'world'}, {}))
|
||||
self.assertEqual(doc._delta(),
|
||||
({'db_embedded_field.db_list_field.2.db_string_field': 'world'},
|
||||
{}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||
'world')
|
||||
|
||||
# Test multiple assignments
|
||||
doc.embedded_field.list_field[2].string_field = 'hello world'
|
||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['db_embedded_field.db_list_field.2'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({'db_list_field.2': {
|
||||
'_cls': 'Embedded',
|
||||
'db_string_field': 'hello world',
|
||||
'db_int_field': 1,
|
||||
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||
'db_dict_field': {'hello': 'world'}}}, {}))
|
||||
self.assertEqual(doc._delta(), ({
|
||||
'db_embedded_field.db_list_field.2': {
|
||||
'_cls': 'Embedded',
|
||||
'db_string_field': 'hello world',
|
||||
'db_int_field': 1,
|
||||
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||
'db_dict_field': {'hello': 'world'}}
|
||||
}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||
'hello world')
|
||||
|
||||
# Test list native methods
|
||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||
self.assertEqual(doc._delta(),
|
||||
({'db_embedded_field.db_list_field.2.db_list_field':
|
||||
[2, {'hello': 'world'}]}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.append(1)
|
||||
self.assertEqual(doc._delta(),
|
||||
({'db_embedded_field.db_list_field.2.db_list_field':
|
||||
[2, {'hello': 'world'}, 1]}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||
[2, {'hello': 'world'}, 1])
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||
[1, 2, {'hello': 'world'}])
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field[2]['hello']
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field
|
||||
self.assertEqual(doc._delta(), ({},
|
||||
{'db_embedded_field.db_list_field.2.db_list_field': 1}))
|
||||
|
||||
def test_delta_for_dynamic_documents(self):
|
||||
class Person(DynamicDocument):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(name="James", age=34)
|
||||
self.assertEqual(p._delta(), (
|
||||
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
|
||||
|
||||
p.doc = 123
|
||||
del p.doc
|
||||
self.assertEqual(p._delta(), (
|
||||
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
|
||||
|
||||
p = Person()
|
||||
p.name = "Dean"
|
||||
p.age = 22
|
||||
p.save()
|
||||
|
||||
p.age = 24
|
||||
self.assertEqual(p.age, 24)
|
||||
self.assertEqual(p._get_changed_fields(), ['age'])
|
||||
self.assertEqual(p._delta(), ({'age': 24}, {}))
|
||||
|
||||
p = Person.objects(age=22).get()
|
||||
p.age = 24
|
||||
self.assertEqual(p.age, 24)
|
||||
self.assertEqual(p._get_changed_fields(), ['age'])
|
||||
self.assertEqual(p._delta(), ({'age': 24}, {}))
|
||||
|
||||
p.save()
|
||||
self.assertEqual(1, Person.objects(age=24).count())
|
||||
|
||||
def test_dynamic_delta(self):
|
||||
|
||||
class Doc(DynamicDocument):
|
||||
pass
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
|
||||
doc.string_field = 'hello'
|
||||
self.assertEqual(doc._get_changed_fields(), ['string_field'])
|
||||
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
self.assertEqual(doc._get_changed_fields(), ['int_field'])
|
||||
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||
doc.dict_field = dict_value
|
||||
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ['1', 2, {'hello': 'world'}]
|
||||
doc.list_field = list_value
|
||||
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
|
||||
|
||||
def test_delta_with_dbref_true(self):
|
||||
person, organization, employee = self.circular_reference_deltas_2(Document, Document, True)
|
||||
employee.name = 'test'
|
||||
|
||||
self.assertEqual(organization._get_changed_fields(), [])
|
||||
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertEqual({}, updates)
|
||||
|
||||
organization.employees.append(person)
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertTrue('employees' in updates)
|
||||
|
||||
def test_delta_with_dbref_false(self):
|
||||
person, organization, employee = self.circular_reference_deltas_2(Document, Document, False)
|
||||
employee.name = 'test'
|
||||
|
||||
self.assertEqual(organization._get_changed_fields(), [])
|
||||
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertEqual({}, updates)
|
||||
|
||||
organization.employees.append(person)
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertTrue('employees' in updates)
|
||||
|
||||
def test_nested_nested_fields_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc)))
|
||||
name = StringField()
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
mydoc = MyDoc(name='testcase1', subs={'a': {'b': EmbeddedDoc(name='foo')}}).save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
subdoc = mydoc.subs['a']['b']
|
||||
subdoc.name = 'bar'
|
||||
|
||||
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||
self.assertEqual(["subs.a.b.name"], mydoc._get_changed_fields())
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
|
||||
def test_lower_level_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc().save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
mydoc.subs['a'] = EmbeddedDoc()
|
||||
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||
|
||||
subdoc = mydoc.subs['a']
|
||||
subdoc.name = 'bar'
|
||||
|
||||
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||
mydoc.save()
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
|
||||
def test_upper_level_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc(subs={'a': EmbeddedDoc(name='foo')}).save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
subdoc = mydoc.subs['a']
|
||||
subdoc.name = 'bar'
|
||||
|
||||
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||
self.assertEqual(["subs.a.name"], mydoc._get_changed_fields())
|
||||
|
||||
mydoc.subs['a'] = EmbeddedDoc()
|
||||
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||
mydoc.save()
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
|
||||
def test_referenced_object_changed_attributes(self):
|
||||
"""Ensures that when you save a new reference to a field, the referenced object isn't altered"""
|
||||
|
||||
class Organization(Document):
|
||||
name = StringField()
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
org = ReferenceField('Organization', required=True)
|
||||
|
||||
Organization.drop_collection()
|
||||
User.drop_collection()
|
||||
|
||||
org1 = Organization(name='Org 1')
|
||||
org1.save()
|
||||
|
||||
org2 = Organization(name='Org 2')
|
||||
org2.save()
|
||||
|
||||
user = User(name='Fred', org=org1)
|
||||
user.save()
|
||||
|
||||
org1.reload()
|
||||
org2.reload()
|
||||
user.reload()
|
||||
self.assertEqual(org1.name, 'Org 1')
|
||||
self.assertEqual(org2.name, 'Org 2')
|
||||
self.assertEqual(user.name, 'Fred')
|
||||
|
||||
user.name = 'Harold'
|
||||
user.org = org2
|
||||
|
||||
org2.name = 'New Org 2'
|
||||
self.assertEqual(org2.name, 'New Org 2')
|
||||
|
||||
user.save()
|
||||
org2.save()
|
||||
|
||||
self.assertEqual(org2.name, 'New Org 2')
|
||||
org2.reload()
|
||||
self.assertEqual(org2.name, 'New Org 2')
|
||||
|
||||
def test_delta_for_nested_map_fields(self):
|
||||
class UInfoDocument(Document):
|
||||
phone = StringField()
|
||||
|
||||
class EmbeddedRole(EmbeddedDocument):
|
||||
type = StringField()
|
||||
|
||||
class EmbeddedUser(EmbeddedDocument):
|
||||
name = StringField()
|
||||
roles = MapField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||
rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||
info = ReferenceField(UInfoDocument)
|
||||
|
||||
class Doc(Document):
|
||||
users = MapField(field=EmbeddedDocumentField(EmbeddedUser))
|
||||
num = IntField(default=-1)
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
doc = Doc(num=1)
|
||||
doc.users["007"] = EmbeddedUser(name="Agent007")
|
||||
doc.save()
|
||||
|
||||
uinfo = UInfoDocument(phone="79089269066")
|
||||
uinfo.save()
|
||||
|
||||
d = Doc.objects(num=1).first()
|
||||
d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin")
|
||||
d.users["007"]["rolist"].append(EmbeddedRole(type="oops"))
|
||||
d.users["007"]["info"] = uinfo
|
||||
delta = d._delta()
|
||||
self.assertEqual(True, "users.007.roles.666" in delta[0])
|
||||
self.assertEqual(True, "users.007.rolist" in delta[0])
|
||||
self.assertEqual(True, "users.007.info" in delta[0])
|
||||
self.assertEqual('superadmin', delta[0]["users.007.roles.666"]["type"])
|
||||
self.assertEqual('oops', delta[0]["users.007.rolist"][0]["type"])
|
||||
self.assertEqual(uinfo.id, delta[0]["users.007.info"])
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,512 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from tests.fixtures import Base
|
||||
|
||||
from mongoengine import Document, EmbeddedDocument, connect
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.fields import (BooleanField, GenericReferenceField,
|
||||
IntField, StringField)
|
||||
|
||||
__all__ = ('InheritanceTest', )
|
||||
|
||||
|
||||
class InheritanceTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
|
||||
def tearDown(self):
|
||||
for collection in self.db.collection_names():
|
||||
if 'system.' in collection:
|
||||
continue
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_superclasses(self):
|
||||
"""Ensure that the correct list of superclasses is assembled.
|
||||
"""
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Fish._superclasses, ('Animal',))
|
||||
self.assertEqual(Guppy._superclasses, ('Animal', 'Animal.Fish'))
|
||||
self.assertEqual(Mammal._superclasses, ('Animal',))
|
||||
self.assertEqual(Dog._superclasses, ('Animal', 'Animal.Mammal'))
|
||||
self.assertEqual(Human._superclasses, ('Animal', 'Animal.Mammal'))
|
||||
|
||||
def test_external_superclasses(self):
|
||||
"""Ensure that the correct list of super classes is assembled when
|
||||
importing part of the model.
|
||||
"""
|
||||
class Animal(Base): pass
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ('Base', ))
|
||||
self.assertEqual(Fish._superclasses, ('Base', 'Base.Animal',))
|
||||
self.assertEqual(Guppy._superclasses, ('Base', 'Base.Animal',
|
||||
'Base.Animal.Fish'))
|
||||
self.assertEqual(Mammal._superclasses, ('Base', 'Base.Animal',))
|
||||
self.assertEqual(Dog._superclasses, ('Base', 'Base.Animal',
|
||||
'Base.Animal.Mammal'))
|
||||
self.assertEqual(Human._superclasses, ('Base', 'Base.Animal',
|
||||
'Base.Animal.Mammal'))
|
||||
|
||||
def test_subclasses(self):
|
||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||
assembled.
|
||||
"""
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
self.assertEqual(Animal._subclasses, ('Animal',
|
||||
'Animal.Fish',
|
||||
'Animal.Fish.Guppy',
|
||||
'Animal.Mammal',
|
||||
'Animal.Mammal.Dog',
|
||||
'Animal.Mammal.Human'))
|
||||
self.assertEqual(Fish._subclasses, ('Animal.Fish',
|
||||
'Animal.Fish.Guppy',))
|
||||
self.assertEqual(Guppy._subclasses, ('Animal.Fish.Guppy',))
|
||||
self.assertEqual(Mammal._subclasses, ('Animal.Mammal',
|
||||
'Animal.Mammal.Dog',
|
||||
'Animal.Mammal.Human'))
|
||||
self.assertEqual(Human._subclasses, ('Animal.Mammal.Human',))
|
||||
|
||||
def test_external_subclasses(self):
|
||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||
assembled when importing part of the model.
|
||||
"""
|
||||
class Animal(Base): pass
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
self.assertEqual(Animal._subclasses, ('Base.Animal',
|
||||
'Base.Animal.Fish',
|
||||
'Base.Animal.Fish.Guppy',
|
||||
'Base.Animal.Mammal',
|
||||
'Base.Animal.Mammal.Dog',
|
||||
'Base.Animal.Mammal.Human'))
|
||||
self.assertEqual(Fish._subclasses, ('Base.Animal.Fish',
|
||||
'Base.Animal.Fish.Guppy',))
|
||||
self.assertEqual(Guppy._subclasses, ('Base.Animal.Fish.Guppy',))
|
||||
self.assertEqual(Mammal._subclasses, ('Base.Animal.Mammal',
|
||||
'Base.Animal.Mammal.Dog',
|
||||
'Base.Animal.Mammal.Human'))
|
||||
self.assertEqual(Human._subclasses, ('Base.Animal.Mammal.Human',))
|
||||
|
||||
def test_dynamic_declarations(self):
|
||||
"""Test that declaring an extra class updates meta data"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Animal._subclasses, ('Animal',))
|
||||
|
||||
# Test dynamically adding a class changes the meta data
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish'))
|
||||
|
||||
self.assertEqual(Fish._superclasses, ('Animal', ))
|
||||
self.assertEqual(Fish._subclasses, ('Animal.Fish',))
|
||||
|
||||
# Test dynamically adding an inherited class changes the meta data
|
||||
class Pike(Fish):
|
||||
pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish',
|
||||
'Animal.Fish.Pike'))
|
||||
|
||||
self.assertEqual(Fish._superclasses, ('Animal', ))
|
||||
self.assertEqual(Fish._subclasses, ('Animal.Fish', 'Animal.Fish.Pike'))
|
||||
|
||||
self.assertEqual(Pike._superclasses, ('Animal', 'Animal.Fish'))
|
||||
self.assertEqual(Pike._subclasses, ('Animal.Fish.Pike',))
|
||||
|
||||
def test_inheritance_meta_data(self):
|
||||
"""Ensure that document may inherit fields from a superclass document.
|
||||
"""
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'],
|
||||
sorted(Employee._fields.keys()))
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
Person._get_collection_name())
|
||||
|
||||
def test_inheritance_to_mongo_keys(self):
|
||||
"""Ensure that document may inherit fields from a superclass document.
|
||||
"""
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'],
|
||||
sorted(Employee._fields.keys()))
|
||||
self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(),
|
||||
['_cls', 'name', 'age'])
|
||||
self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(),
|
||||
['_cls', 'name', 'age', 'salary'])
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
Person._get_collection_name())
|
||||
|
||||
def test_indexes_and_multiple_inheritance(self):
|
||||
""" Ensure that all of the indexes are created for a document with
|
||||
multiple inheritance.
|
||||
"""
|
||||
|
||||
class A(Document):
|
||||
a = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'indexes': ['a']
|
||||
}
|
||||
|
||||
class B(Document):
|
||||
b = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'indexes': ['b']
|
||||
}
|
||||
|
||||
class C(A, B):
|
||||
pass
|
||||
|
||||
A.drop_collection()
|
||||
B.drop_collection()
|
||||
C.drop_collection()
|
||||
|
||||
C.ensure_indexes()
|
||||
|
||||
self.assertEqual(
|
||||
sorted([idx['key'] for idx in C._get_collection().index_information().values()]),
|
||||
sorted([[(u'_cls', 1), (u'b', 1)], [(u'_id', 1)], [(u'_cls', 1), (u'a', 1)]])
|
||||
)
|
||||
|
||||
def test_polymorphic_queries(self):
|
||||
"""Ensure that the correct subclasses are returned from a query
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
class Fish(Animal): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
Animal.drop_collection()
|
||||
|
||||
Animal().save()
|
||||
Fish().save()
|
||||
Mammal().save()
|
||||
Dog().save()
|
||||
Human().save()
|
||||
|
||||
classes = [obj.__class__ for obj in Animal.objects]
|
||||
self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human])
|
||||
|
||||
classes = [obj.__class__ for obj in Mammal.objects]
|
||||
self.assertEqual(classes, [Mammal, Dog, Human])
|
||||
|
||||
classes = [obj.__class__ for obj in Human.objects]
|
||||
self.assertEqual(classes, [Human])
|
||||
|
||||
def test_allow_inheritance(self):
|
||||
"""Ensure that inheritance is disabled by default on simple
|
||||
classes and that _cls will not be used.
|
||||
"""
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
|
||||
# can't inherit because Animal didn't explicitly allow inheritance
|
||||
with self.assertRaises(ValueError):
|
||||
class Dog(Animal):
|
||||
pass
|
||||
|
||||
# Check that _cls etc aren't present on simple documents
|
||||
dog = Animal(name='dog').save()
|
||||
self.assertEqual(dog.to_mongo().keys(), ['_id', 'name'])
|
||||
|
||||
collection = self.db[Animal._get_collection_name()]
|
||||
obj = collection.find_one()
|
||||
self.assertFalse('_cls' in obj)
|
||||
|
||||
def test_cant_turn_off_inheritance_on_subclass(self):
|
||||
"""Ensure if inheritance is on in a subclass you cant turn it off.
|
||||
"""
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
class Mammal(Animal):
|
||||
meta = {'allow_inheritance': False}
|
||||
|
||||
def test_allow_inheritance_abstract_document(self):
|
||||
"""Ensure that abstract documents can set inheritance rules and that
|
||||
_cls will not be used.
|
||||
"""
|
||||
class FinalDocument(Document):
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
class Animal(FinalDocument):
|
||||
name = StringField()
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
# Check that _cls isn't present in simple documents
|
||||
doc = Animal(name='dog')
|
||||
self.assertFalse('_cls' in doc.to_mongo())
|
||||
|
||||
def test_abstract_handle_ids_in_metaclass_properly(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 3)
|
||||
self.assertEqual(berlin._fields_ordered[0], 'id')
|
||||
|
||||
def test_auto_id_not_set_if_specific_in_parent_class(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
city_id = IntField(primary_key=True)
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 3)
|
||||
self.assertEqual(berlin._fields_ordered[0], 'city_id')
|
||||
|
||||
def test_auto_id_vs_non_pk_id_field(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
id = IntField()
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 4)
|
||||
self.assertEqual(berlin._fields_ordered[0], 'auto_id_0')
|
||||
berlin.save()
|
||||
self.assertEqual(berlin.pk, berlin.auto_id_0)
|
||||
|
||||
def test_abstract_document_creation_does_not_fail(self):
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
bkk = City(continent='asia')
|
||||
self.assertEqual(None, bkk.pk)
|
||||
# TODO: expected error? Shouldn't we create a new error type?
|
||||
with self.assertRaises(KeyError):
|
||||
setattr(bkk, 'pk', 1)
|
||||
|
||||
def test_allow_inheritance_embedded_document(self):
|
||||
"""Ensure embedded documents respect inheritance."""
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
class SpecialComment(Comment):
|
||||
pass
|
||||
|
||||
doc = Comment(content='test')
|
||||
self.assertFalse('_cls' in doc.to_mongo())
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
doc = Comment(content='test')
|
||||
self.assertTrue('_cls' in doc.to_mongo())
|
||||
|
||||
def test_document_inheritance(self):
|
||||
"""Ensure mutliple inheritance of abstract documents
|
||||
"""
|
||||
class DateCreatedDocument(Document):
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'abstract': True,
|
||||
}
|
||||
|
||||
class DateUpdatedDocument(Document):
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'abstract': True,
|
||||
}
|
||||
|
||||
try:
|
||||
class MyDocument(DateCreatedDocument, DateUpdatedDocument):
|
||||
pass
|
||||
except Exception:
|
||||
self.assertTrue(False, "Couldn't create MyDocument class")
|
||||
|
||||
def test_abstract_documents(self):
|
||||
"""Ensure that a document superclass can be marked as abstract
|
||||
thereby not using it as the name for the collection."""
|
||||
|
||||
defaults = {'index_background': True,
|
||||
'index_drop_dups': True,
|
||||
'index_opts': {'hello': 'world'},
|
||||
'allow_inheritance': True,
|
||||
'queryset_class': 'QuerySet',
|
||||
'db_alias': 'myDB',
|
||||
'shard_key': ('hello', 'world')}
|
||||
|
||||
meta_settings = {'abstract': True}
|
||||
meta_settings.update(defaults)
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = meta_settings
|
||||
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
|
||||
class Mammal(Animal):
|
||||
meta = {'abstract': True}
|
||||
class Human(Mammal): pass
|
||||
|
||||
for k, v in defaults.iteritems():
|
||||
for cls in [Animal, Fish, Guppy]:
|
||||
self.assertEqual(cls._meta[k], v)
|
||||
|
||||
self.assertFalse('collection' in Animal._meta)
|
||||
self.assertFalse('collection' in Mammal._meta)
|
||||
|
||||
self.assertEqual(Animal._get_collection_name(), None)
|
||||
self.assertEqual(Mammal._get_collection_name(), None)
|
||||
|
||||
self.assertEqual(Fish._get_collection_name(), 'fish')
|
||||
self.assertEqual(Guppy._get_collection_name(), 'fish')
|
||||
self.assertEqual(Human._get_collection_name(), 'human')
|
||||
|
||||
# ensure that a subclass of a non-abstract class can't be abstract
|
||||
with self.assertRaises(ValueError):
|
||||
class EvilHuman(Human):
|
||||
evil = BooleanField(default=True)
|
||||
meta = {'abstract': True}
|
||||
|
||||
def test_abstract_embedded_documents(self):
|
||||
# 789: EmbeddedDocument shouldn't inherit abstract
|
||||
class A(EmbeddedDocument):
|
||||
meta = {"abstract": True}
|
||||
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
self.assertFalse(B._meta["abstract"])
|
||||
|
||||
def test_inherited_collections(self):
|
||||
"""Ensure that subclassed documents don't override parents'
|
||||
collections
|
||||
"""
|
||||
|
||||
class Drink(Document):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class Drinker(Document):
|
||||
drink = GenericReferenceField()
|
||||
|
||||
try:
|
||||
warnings.simplefilter("error")
|
||||
|
||||
class AcloholicDrink(Drink):
|
||||
meta = {'collection': 'booze'}
|
||||
|
||||
except SyntaxWarning:
|
||||
warnings.simplefilter("ignore")
|
||||
|
||||
class AlcoholicDrink(Drink):
|
||||
meta = {'collection': 'booze'}
|
||||
|
||||
else:
|
||||
raise AssertionError("SyntaxWarning should be triggered")
|
||||
|
||||
warnings.resetwarnings()
|
||||
|
||||
Drink.drop_collection()
|
||||
AlcoholicDrink.drop_collection()
|
||||
Drinker.drop_collection()
|
||||
|
||||
red_bull = Drink(name='Red Bull')
|
||||
red_bull.save()
|
||||
|
||||
programmer = Drinker(drink=red_bull)
|
||||
programmer.save()
|
||||
|
||||
beer = AlcoholicDrink(name='Beer')
|
||||
beer.save()
|
||||
real_person = Drinker(drink=beer)
|
||||
real_person.save()
|
||||
|
||||
self.assertEqual(Drinker.objects[0].drink.name, red_bull.name)
|
||||
self.assertEqual(Drinker.objects[1].drink.name, beer.name)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
349
tests/document/test_class_methods.py
Normal file
349
tests/document/test_class_methods.py
Normal file
@@ -0,0 +1,349 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from mongoengine.queryset import NULLIFY, PULL
|
||||
|
||||
|
||||
class TestClassMethods(unittest.TestCase):
|
||||
def setUp(self):
|
||||
connect(db="mongoenginetest")
|
||||
self.db = get_db()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
non_field = True
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
self.Person = Person
|
||||
|
||||
def tearDown(self):
|
||||
for collection in list_collection_names(self.db):
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_definition(self):
|
||||
"""Ensure that document may be defined using fields."""
|
||||
assert ["_cls", "age", "id", "name"] == sorted(self.Person._fields.keys())
|
||||
assert ["IntField", "ObjectIdField", "StringField", "StringField"] == sorted(
|
||||
x.__class__.__name__ for x in self.Person._fields.values()
|
||||
)
|
||||
|
||||
def test_get_db(self):
|
||||
"""Ensure that get_db returns the expected db."""
|
||||
db = self.Person._get_db()
|
||||
assert self.db == db
|
||||
|
||||
def test_get_collection_name(self):
|
||||
"""Ensure that get_collection_name returns the expected collection
|
||||
name.
|
||||
"""
|
||||
collection_name = "person"
|
||||
assert collection_name == self.Person._get_collection_name()
|
||||
|
||||
def test_get_collection(self):
|
||||
"""Ensure that get_collection returns the expected collection."""
|
||||
collection_name = "person"
|
||||
collection = self.Person._get_collection()
|
||||
assert self.db[collection_name] == collection
|
||||
|
||||
def test_drop_collection(self):
|
||||
"""Ensure that the collection may be dropped from the database."""
|
||||
collection_name = "person"
|
||||
self.Person(name="Test").save()
|
||||
assert collection_name in list_collection_names(self.db)
|
||||
|
||||
self.Person.drop_collection()
|
||||
assert collection_name not in list_collection_names(self.db)
|
||||
|
||||
def test_register_delete_rule(self):
|
||||
"""Ensure that register delete rule adds a delete rule to the document
|
||||
meta.
|
||||
"""
|
||||
|
||||
class Job(Document):
|
||||
employee = ReferenceField(self.Person)
|
||||
|
||||
assert self.Person._meta.get("delete_rules") is None
|
||||
|
||||
self.Person.register_delete_rule(Job, "employee", NULLIFY)
|
||||
assert self.Person._meta["delete_rules"] == {(Job, "employee"): NULLIFY}
|
||||
|
||||
def test_compare_indexes(self):
|
||||
"""Ensure that the indexes are properly created and that
|
||||
compare_indexes identifies the missing/extra indexes
|
||||
"""
|
||||
|
||||
class BlogPost(Document):
|
||||
author = StringField()
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
tags = StringField()
|
||||
|
||||
meta = {"indexes": [("author", "title")]}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
BlogPost.ensure_index(["author", "description"])
|
||||
assert BlogPost.compare_indexes() == {
|
||||
"missing": [],
|
||||
"extra": [[("author", 1), ("description", 1)]],
|
||||
}
|
||||
|
||||
BlogPost._get_collection().drop_index("author_1_description_1")
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
BlogPost._get_collection().drop_index("author_1_title_1")
|
||||
assert BlogPost.compare_indexes() == {
|
||||
"missing": [[("author", 1), ("title", 1)]],
|
||||
"extra": [],
|
||||
}
|
||||
|
||||
def test_compare_indexes_inheritance(self):
|
||||
"""Ensure that the indexes are properly created and that
|
||||
compare_indexes identifies the missing/extra indexes for subclassed
|
||||
documents (_cls included)
|
||||
"""
|
||||
|
||||
class BlogPost(Document):
|
||||
author = StringField()
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class BlogPostWithTags(BlogPost):
|
||||
tags = StringField()
|
||||
tag_list = ListField(StringField())
|
||||
|
||||
meta = {"indexes": [("author", "tags")]}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
BlogPostWithTags.ensure_index(["author", "tag_list"])
|
||||
assert BlogPost.compare_indexes() == {
|
||||
"missing": [],
|
||||
"extra": [[("_cls", 1), ("author", 1), ("tag_list", 1)]],
|
||||
}
|
||||
|
||||
BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tag_list_1")
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tags_1")
|
||||
assert BlogPost.compare_indexes() == {
|
||||
"missing": [[("_cls", 1), ("author", 1), ("tags", 1)]],
|
||||
"extra": [],
|
||||
}
|
||||
|
||||
def test_compare_indexes_multiple_subclasses(self):
|
||||
"""Ensure that compare_indexes behaves correctly if called from a
|
||||
class, which base class has multiple subclasses
|
||||
"""
|
||||
|
||||
class BlogPost(Document):
|
||||
author = StringField()
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class BlogPostWithTags(BlogPost):
|
||||
tags = StringField()
|
||||
tag_list = ListField(StringField())
|
||||
|
||||
meta = {"indexes": [("author", "tags")]}
|
||||
|
||||
class BlogPostWithCustomField(BlogPost):
|
||||
custom = DictField()
|
||||
|
||||
meta = {"indexes": [("author", "custom")]}
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
BlogPostWithCustomField.ensure_indexes()
|
||||
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
assert BlogPostWithTags.compare_indexes() == {"missing": [], "extra": []}
|
||||
assert BlogPostWithCustomField.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
def test_compare_indexes_for_text_indexes(self):
|
||||
"""Ensure that compare_indexes behaves correctly for text indexes"""
|
||||
|
||||
class Doc(Document):
|
||||
a = StringField()
|
||||
b = StringField()
|
||||
meta = {
|
||||
"indexes": [
|
||||
{
|
||||
"fields": ["$a", "$b"],
|
||||
"default_language": "english",
|
||||
"weights": {"a": 10, "b": 2},
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Doc.drop_collection()
|
||||
Doc.ensure_indexes()
|
||||
actual = Doc.compare_indexes()
|
||||
expected = {"missing": [], "extra": []}
|
||||
assert actual == expected
|
||||
|
||||
def test_list_indexes_inheritance(self):
|
||||
"""ensure that all of the indexes are listed regardless of the super-
|
||||
or sub-class that we call it from
|
||||
"""
|
||||
|
||||
class BlogPost(Document):
|
||||
author = StringField()
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class BlogPostWithTags(BlogPost):
|
||||
tags = StringField()
|
||||
|
||||
meta = {"indexes": [("author", "tags")]}
|
||||
|
||||
class BlogPostWithTagsAndExtraText(BlogPostWithTags):
|
||||
extra_text = StringField()
|
||||
|
||||
meta = {"indexes": [("author", "tags", "extra_text")]}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
BlogPostWithTagsAndExtraText.ensure_indexes()
|
||||
|
||||
assert BlogPost.list_indexes() == BlogPostWithTags.list_indexes()
|
||||
assert BlogPost.list_indexes() == BlogPostWithTagsAndExtraText.list_indexes()
|
||||
assert BlogPost.list_indexes() == [
|
||||
[("_cls", 1), ("author", 1), ("tags", 1)],
|
||||
[("_cls", 1), ("author", 1), ("tags", 1), ("extra_text", 1)],
|
||||
[("_id", 1)],
|
||||
[("_cls", 1)],
|
||||
]
|
||||
|
||||
def test_register_delete_rule_inherited(self):
|
||||
class Vaccine(Document):
|
||||
name = StringField(required=True)
|
||||
|
||||
meta = {"indexes": ["name"]}
|
||||
|
||||
class Animal(Document):
|
||||
family = StringField(required=True)
|
||||
vaccine_made = ListField(
|
||||
ReferenceField("Vaccine", reverse_delete_rule=PULL)
|
||||
)
|
||||
|
||||
meta = {"allow_inheritance": True, "indexes": ["family"]}
|
||||
|
||||
class Cat(Animal):
|
||||
name = StringField(required=True)
|
||||
|
||||
assert Vaccine._meta["delete_rules"][(Animal, "vaccine_made")] == PULL
|
||||
assert Vaccine._meta["delete_rules"][(Cat, "vaccine_made")] == PULL
|
||||
|
||||
def test_collection_naming(self):
|
||||
"""Ensure that a collection with a specified name may be used."""
|
||||
|
||||
class DefaultNamingTest(Document):
|
||||
pass
|
||||
|
||||
assert "default_naming_test" == DefaultNamingTest._get_collection_name()
|
||||
|
||||
class CustomNamingTest(Document):
|
||||
meta = {"collection": "pimp_my_collection"}
|
||||
|
||||
assert "pimp_my_collection" == CustomNamingTest._get_collection_name()
|
||||
|
||||
class DynamicNamingTest(Document):
|
||||
meta = {"collection": lambda c: "DYNAMO"}
|
||||
|
||||
assert "DYNAMO" == DynamicNamingTest._get_collection_name()
|
||||
|
||||
# Use Abstract class to handle backwards compatibility
|
||||
class BaseDocument(Document):
|
||||
meta = {"abstract": True, "collection": lambda c: c.__name__.lower()}
|
||||
|
||||
class OldNamingConvention(BaseDocument):
|
||||
pass
|
||||
|
||||
assert "oldnamingconvention" == OldNamingConvention._get_collection_name()
|
||||
|
||||
class InheritedAbstractNamingTest(BaseDocument):
|
||||
meta = {"collection": "wibble"}
|
||||
|
||||
assert "wibble" == InheritedAbstractNamingTest._get_collection_name()
|
||||
|
||||
# Mixin tests
|
||||
class BaseMixin:
|
||||
meta = {"collection": lambda c: c.__name__.lower()}
|
||||
|
||||
class OldMixinNamingConvention(Document, BaseMixin):
|
||||
pass
|
||||
|
||||
assert (
|
||||
"oldmixinnamingconvention"
|
||||
== OldMixinNamingConvention._get_collection_name()
|
||||
)
|
||||
|
||||
class BaseMixin:
|
||||
meta = {"collection": lambda c: c.__name__.lower()}
|
||||
|
||||
class BaseDocument(Document, BaseMixin):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class MyDocument(BaseDocument):
|
||||
pass
|
||||
|
||||
assert "basedocument" == MyDocument._get_collection_name()
|
||||
|
||||
def test_custom_collection_name_operations(self):
|
||||
"""Ensure that a collection with a specified name is used as expected."""
|
||||
collection_name = "personCollTest"
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
meta = {"collection": collection_name}
|
||||
|
||||
Person(name="Test User").save()
|
||||
assert collection_name in list_collection_names(self.db)
|
||||
|
||||
user_obj = self.db[collection_name].find_one()
|
||||
assert user_obj["name"] == "Test User"
|
||||
|
||||
user_obj = Person.objects[0]
|
||||
assert user_obj.name == "Test User"
|
||||
|
||||
Person.drop_collection()
|
||||
assert collection_name not in list_collection_names(self.db)
|
||||
|
||||
def test_collection_name_and_primary(self):
|
||||
"""Ensure that a collection with a specified name may be used."""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField(primary_key=True)
|
||||
meta = {"collection": "app"}
|
||||
|
||||
Person(name="Test User").save()
|
||||
|
||||
user_obj = Person.objects.first()
|
||||
assert user_obj.name == "Test User"
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
957
tests/document/test_delta.py
Normal file
957
tests/document/test_delta.py
Normal file
@@ -0,0 +1,957 @@
|
||||
import unittest
|
||||
|
||||
from bson import SON
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestDelta(MongoDBTestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
non_field = True
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
self.Person = Person
|
||||
|
||||
def tearDown(self):
|
||||
for collection in list_collection_names(self.db):
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_delta(self):
|
||||
self.delta(Document)
|
||||
self.delta(DynamicDocument)
|
||||
|
||||
@staticmethod
|
||||
def delta(DocClass):
|
||||
class Doc(DocClass):
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
list_field = ListField()
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
doc.string_field = "hello"
|
||||
assert doc._get_changed_fields() == ["string_field"]
|
||||
assert doc._delta() == ({"string_field": "hello"}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
assert doc._get_changed_fields() == ["int_field"]
|
||||
assert doc._delta() == ({"int_field": 1}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {"hello": "world", "ping": "pong"}
|
||||
doc.dict_field = dict_value
|
||||
assert doc._get_changed_fields() == ["dict_field"]
|
||||
assert doc._delta() == ({"dict_field": dict_value}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ["1", 2, {"hello": "world"}]
|
||||
doc.list_field = list_value
|
||||
assert doc._get_changed_fields() == ["list_field"]
|
||||
assert doc._delta() == ({"list_field": list_value}, {})
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
assert doc._get_changed_fields() == ["dict_field"]
|
||||
assert doc._delta() == ({}, {"dict_field": 1})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
assert doc._get_changed_fields() == ["list_field"]
|
||||
assert doc._delta() == ({}, {"list_field": 1})
|
||||
|
||||
def test_delta_recursive(self):
|
||||
self.delta_recursive(Document, EmbeddedDocument)
|
||||
self.delta_recursive(DynamicDocument, EmbeddedDocument)
|
||||
self.delta_recursive(Document, DynamicEmbeddedDocument)
|
||||
self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument)
|
||||
|
||||
def delta_recursive(self, DocClass, EmbeddedClass):
|
||||
class Embedded(EmbeddedClass):
|
||||
id = StringField()
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
list_field = ListField()
|
||||
|
||||
class Doc(DocClass):
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
list_field = ListField()
|
||||
embedded_field = EmbeddedDocumentField(Embedded)
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.id = "010101"
|
||||
embedded_1.string_field = "hello"
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {"hello": "world"}
|
||||
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
assert doc._get_changed_fields() == ["embedded_field"]
|
||||
|
||||
embedded_delta = {
|
||||
"id": "010101",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
}
|
||||
assert doc.embedded_field._delta() == (embedded_delta, {})
|
||||
assert doc._delta() == ({"embedded_field": embedded_delta}, {})
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.dict_field = {}
|
||||
assert doc._get_changed_fields() == ["embedded_field.dict_field"]
|
||||
assert doc.embedded_field._delta() == ({}, {"dict_field": 1})
|
||||
assert doc._delta() == ({}, {"embedded_field.dict_field": 1})
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.dict_field == {}
|
||||
|
||||
doc.embedded_field.list_field = []
|
||||
assert doc._get_changed_fields() == ["embedded_field.list_field"]
|
||||
assert doc.embedded_field._delta() == ({}, {"list_field": 1})
|
||||
assert doc._delta() == ({}, {"embedded_field.list_field": 1})
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field == []
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = "hello"
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {"hello": "world"}
|
||||
embedded_2.list_field = ["1", 2, {"hello": "world"}]
|
||||
|
||||
doc.embedded_field.list_field = ["1", 2, embedded_2]
|
||||
assert doc._get_changed_fields() == ["embedded_field.list_field"]
|
||||
|
||||
assert doc.embedded_field._delta() == (
|
||||
{
|
||||
"list_field": [
|
||||
"1",
|
||||
2,
|
||||
{
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"dict_field": {"hello": "world"},
|
||||
"int_field": 1,
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
},
|
||||
]
|
||||
},
|
||||
{},
|
||||
)
|
||||
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"embedded_field.list_field": [
|
||||
"1",
|
||||
2,
|
||||
{
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"dict_field": {"hello": "world"},
|
||||
"int_field": 1,
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
},
|
||||
]
|
||||
},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
assert doc.embedded_field.list_field[0] == "1"
|
||||
assert doc.embedded_field.list_field[1] == 2
|
||||
for k in doc.embedded_field.list_field[2]._fields:
|
||||
assert doc.embedded_field.list_field[2][k] == embedded_2[k]
|
||||
|
||||
doc.embedded_field.list_field[2].string_field = "world"
|
||||
assert doc._get_changed_fields() == ["embedded_field.list_field.2.string_field"]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{"list_field.2.string_field": "world"},
|
||||
{},
|
||||
)
|
||||
assert doc._delta() == (
|
||||
{"embedded_field.list_field.2.string_field": "world"},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].string_field == "world"
|
||||
|
||||
# Test multiple assignments
|
||||
doc.embedded_field.list_field[2].string_field = "hello world"
|
||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||
assert doc._get_changed_fields() == ["embedded_field.list_field.2"]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{
|
||||
"list_field.2": {
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello world",
|
||||
"int_field": 1,
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
"dict_field": {"hello": "world"},
|
||||
}
|
||||
},
|
||||
{},
|
||||
)
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"embedded_field.list_field.2": {
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello world",
|
||||
"int_field": 1,
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
"dict_field": {"hello": "world"},
|
||||
}
|
||||
},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].string_field == "hello world"
|
||||
|
||||
# Test list native methods
|
||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||
assert doc._delta() == (
|
||||
{"embedded_field.list_field.2.list_field": [2, {"hello": "world"}]},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.append(1)
|
||||
assert doc._delta() == (
|
||||
{"embedded_field.list_field.2.list_field": [2, {"hello": "world"}, 1]},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1]
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}]
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field[2]["hello"]
|
||||
assert doc._delta() == (
|
||||
{},
|
||||
{"embedded_field.list_field.2.list_field.2.hello": 1},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field
|
||||
assert doc._delta() == ({}, {"embedded_field.list_field.2.list_field": 1})
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.dict_field["Embedded"] = embedded_1
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.dict_field["Embedded"].string_field = "Hello World"
|
||||
assert doc._get_changed_fields() == ["dict_field.Embedded.string_field"]
|
||||
assert doc._delta() == ({"dict_field.Embedded.string_field": "Hello World"}, {})
|
||||
|
||||
def test_circular_reference_deltas(self):
|
||||
self.circular_reference_deltas(Document, Document)
|
||||
self.circular_reference_deltas(Document, DynamicDocument)
|
||||
self.circular_reference_deltas(DynamicDocument, Document)
|
||||
self.circular_reference_deltas(DynamicDocument, DynamicDocument)
|
||||
|
||||
def circular_reference_deltas(self, DocClass1, DocClass2):
|
||||
class Person(DocClass1):
|
||||
name = StringField()
|
||||
owns = ListField(ReferenceField("Organization"))
|
||||
|
||||
class Organization(DocClass2):
|
||||
name = StringField()
|
||||
owner = ReferenceField("Person")
|
||||
|
||||
Person.drop_collection()
|
||||
Organization.drop_collection()
|
||||
|
||||
person = Person(name="owner").save()
|
||||
organization = Organization(name="company").save()
|
||||
|
||||
person.owns.append(organization)
|
||||
organization.owner = person
|
||||
|
||||
person.save()
|
||||
organization.save()
|
||||
|
||||
p = Person.objects[0].select_related()
|
||||
o = Organization.objects.first()
|
||||
assert p.owns[0] == o
|
||||
assert o.owner == p
|
||||
|
||||
def test_circular_reference_deltas_2(self):
|
||||
self.circular_reference_deltas_2(Document, Document)
|
||||
self.circular_reference_deltas_2(Document, DynamicDocument)
|
||||
self.circular_reference_deltas_2(DynamicDocument, Document)
|
||||
self.circular_reference_deltas_2(DynamicDocument, DynamicDocument)
|
||||
|
||||
def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True):
|
||||
class Person(DocClass1):
|
||||
name = StringField()
|
||||
owns = ListField(ReferenceField("Organization", dbref=dbref))
|
||||
employer = ReferenceField("Organization", dbref=dbref)
|
||||
|
||||
class Organization(DocClass2):
|
||||
name = StringField()
|
||||
owner = ReferenceField("Person", dbref=dbref)
|
||||
employees = ListField(ReferenceField("Person", dbref=dbref))
|
||||
|
||||
Person.drop_collection()
|
||||
Organization.drop_collection()
|
||||
|
||||
person = Person(name="owner").save()
|
||||
employee = Person(name="employee").save()
|
||||
organization = Organization(name="company").save()
|
||||
|
||||
person.owns.append(organization)
|
||||
organization.owner = person
|
||||
|
||||
organization.employees.append(employee)
|
||||
employee.employer = organization
|
||||
|
||||
person.save()
|
||||
organization.save()
|
||||
employee.save()
|
||||
|
||||
p = Person.objects.get(name="owner")
|
||||
e = Person.objects.get(name="employee")
|
||||
o = Organization.objects.first()
|
||||
|
||||
assert p.owns[0] == o
|
||||
assert o.owner == p
|
||||
assert e.employer == o
|
||||
|
||||
return person, organization, employee
|
||||
|
||||
def test_delta_db_field(self):
|
||||
self.delta_db_field(Document)
|
||||
self.delta_db_field(DynamicDocument)
|
||||
|
||||
def delta_db_field(self, DocClass):
|
||||
class Doc(DocClass):
|
||||
string_field = StringField(db_field="db_string_field")
|
||||
int_field = IntField(db_field="db_int_field")
|
||||
dict_field = DictField(db_field="db_dict_field")
|
||||
list_field = ListField(db_field="db_list_field")
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
doc.string_field = "hello"
|
||||
assert doc._get_changed_fields() == ["db_string_field"]
|
||||
assert doc._delta() == ({"db_string_field": "hello"}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
assert doc._get_changed_fields() == ["db_int_field"]
|
||||
assert doc._delta() == ({"db_int_field": 1}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {"hello": "world", "ping": "pong"}
|
||||
doc.dict_field = dict_value
|
||||
assert doc._get_changed_fields() == ["db_dict_field"]
|
||||
assert doc._delta() == ({"db_dict_field": dict_value}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ["1", 2, {"hello": "world"}]
|
||||
doc.list_field = list_value
|
||||
assert doc._get_changed_fields() == ["db_list_field"]
|
||||
assert doc._delta() == ({"db_list_field": list_value}, {})
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
assert doc._get_changed_fields() == ["db_dict_field"]
|
||||
assert doc._delta() == ({}, {"db_dict_field": 1})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
assert doc._get_changed_fields() == ["db_list_field"]
|
||||
assert doc._delta() == ({}, {"db_list_field": 1})
|
||||
|
||||
# Test it saves that data
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc.string_field = "hello"
|
||||
doc.int_field = 1
|
||||
doc.dict_field = {"hello": "world"}
|
||||
doc.list_field = ["1", 2, {"hello": "world"}]
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
assert doc.string_field == "hello"
|
||||
assert doc.int_field == 1
|
||||
assert doc.dict_field == {"hello": "world"}
|
||||
assert doc.list_field == ["1", 2, {"hello": "world"}]
|
||||
|
||||
def test_delta_recursive_db_field_on_doc_and_embeddeddoc(self):
|
||||
self.delta_recursive_db_field(Document, EmbeddedDocument)
|
||||
|
||||
def test_delta_recursive_db_field_on_doc_and_dynamicembeddeddoc(self):
|
||||
self.delta_recursive_db_field(Document, DynamicEmbeddedDocument)
|
||||
|
||||
def test_delta_recursive_db_field_on_dynamicdoc_and_embeddeddoc(self):
|
||||
self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument)
|
||||
|
||||
def test_delta_recursive_db_field_on_dynamicdoc_and_dynamicembeddeddoc(self):
|
||||
self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument)
|
||||
|
||||
@staticmethod
|
||||
def delta_recursive_db_field(DocClass, EmbeddedClass):
|
||||
class Embedded(EmbeddedClass):
|
||||
string_field = StringField(db_field="db_string_field")
|
||||
int_field = IntField(db_field="db_int_field")
|
||||
dict_field = DictField(db_field="db_dict_field")
|
||||
list_field = ListField(db_field="db_list_field")
|
||||
|
||||
class Doc(DocClass):
|
||||
string_field = StringField(db_field="db_string_field")
|
||||
int_field = IntField(db_field="db_int_field")
|
||||
dict_field = DictField(db_field="db_dict_field")
|
||||
list_field = ListField(db_field="db_list_field")
|
||||
embedded_field = EmbeddedDocumentField(
|
||||
Embedded, db_field="db_embedded_field"
|
||||
)
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = "hello"
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {"hello": "world"}
|
||||
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
assert doc._get_changed_fields() == ["db_embedded_field"]
|
||||
|
||||
embedded_delta = {
|
||||
"db_string_field": "hello",
|
||||
"db_int_field": 1,
|
||||
"db_dict_field": {"hello": "world"},
|
||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||
}
|
||||
assert doc.embedded_field._delta() == (embedded_delta, {})
|
||||
assert doc._delta() == ({"db_embedded_field": embedded_delta}, {})
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.dict_field = {}
|
||||
assert doc._get_changed_fields() == ["db_embedded_field.db_dict_field"]
|
||||
assert doc.embedded_field._delta() == ({}, {"db_dict_field": 1})
|
||||
assert doc._delta() == ({}, {"db_embedded_field.db_dict_field": 1})
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.dict_field == {}
|
||||
|
||||
assert doc._get_changed_fields() == []
|
||||
doc.embedded_field.list_field = []
|
||||
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"]
|
||||
assert doc.embedded_field._delta() == ({}, {"db_list_field": 1})
|
||||
assert doc._delta() == ({}, {"db_embedded_field.db_list_field": 1})
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field == []
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = "hello"
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {"hello": "world"}
|
||||
embedded_2.list_field = ["1", 2, {"hello": "world"}]
|
||||
|
||||
doc.embedded_field.list_field = ["1", 2, embedded_2]
|
||||
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{
|
||||
"db_list_field": [
|
||||
"1",
|
||||
2,
|
||||
{
|
||||
"_cls": "Embedded",
|
||||
"db_string_field": "hello",
|
||||
"db_dict_field": {"hello": "world"},
|
||||
"db_int_field": 1,
|
||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||
},
|
||||
]
|
||||
},
|
||||
{},
|
||||
)
|
||||
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"db_embedded_field.db_list_field": [
|
||||
"1",
|
||||
2,
|
||||
{
|
||||
"_cls": "Embedded",
|
||||
"db_string_field": "hello",
|
||||
"db_dict_field": {"hello": "world"},
|
||||
"db_int_field": 1,
|
||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||
},
|
||||
]
|
||||
},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
assert doc._get_changed_fields() == []
|
||||
doc = doc.reload(10)
|
||||
|
||||
assert doc.embedded_field.list_field[0] == "1"
|
||||
assert doc.embedded_field.list_field[1] == 2
|
||||
for k in doc.embedded_field.list_field[2]._fields:
|
||||
assert doc.embedded_field.list_field[2][k] == embedded_2[k]
|
||||
|
||||
doc.embedded_field.list_field[2].string_field = "world"
|
||||
assert doc._get_changed_fields() == [
|
||||
"db_embedded_field.db_list_field.2.db_string_field"
|
||||
]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{"db_list_field.2.db_string_field": "world"},
|
||||
{},
|
||||
)
|
||||
assert doc._delta() == (
|
||||
{"db_embedded_field.db_list_field.2.db_string_field": "world"},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].string_field == "world"
|
||||
|
||||
# Test multiple assignments
|
||||
doc.embedded_field.list_field[2].string_field = "hello world"
|
||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field.2"]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{
|
||||
"db_list_field.2": {
|
||||
"_cls": "Embedded",
|
||||
"db_string_field": "hello world",
|
||||
"db_int_field": 1,
|
||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||
"db_dict_field": {"hello": "world"},
|
||||
}
|
||||
},
|
||||
{},
|
||||
)
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"db_embedded_field.db_list_field.2": {
|
||||
"_cls": "Embedded",
|
||||
"db_string_field": "hello world",
|
||||
"db_int_field": 1,
|
||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||
"db_dict_field": {"hello": "world"},
|
||||
}
|
||||
},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].string_field == "hello world"
|
||||
|
||||
# Test list native methods
|
||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"db_embedded_field.db_list_field.2.db_list_field": [
|
||||
2,
|
||||
{"hello": "world"},
|
||||
]
|
||||
},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.append(1)
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"db_embedded_field.db_list_field.2.db_list_field": [
|
||||
2,
|
||||
{"hello": "world"},
|
||||
1,
|
||||
]
|
||||
},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1]
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}]
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field[2]["hello"]
|
||||
assert doc._delta() == (
|
||||
{},
|
||||
{"db_embedded_field.db_list_field.2.db_list_field.2.hello": 1},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
assert doc._delta() == (
|
||||
{},
|
||||
{},
|
||||
)
|
||||
del doc.embedded_field.list_field[2].list_field
|
||||
assert doc._delta() == (
|
||||
{},
|
||||
{"db_embedded_field.db_list_field.2.db_list_field": 1},
|
||||
)
|
||||
|
||||
def test_delta_for_dynamic_documents(self):
|
||||
class Person(DynamicDocument):
|
||||
name = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(name="James", age=34)
|
||||
assert p._delta() == (
|
||||
SON([("_cls", "Person"), ("name", "James"), ("age", 34)]),
|
||||
{},
|
||||
)
|
||||
|
||||
p.doc = 123
|
||||
del p.doc
|
||||
assert p._delta() == (
|
||||
SON([("_cls", "Person"), ("name", "James"), ("age", 34)]),
|
||||
{},
|
||||
)
|
||||
|
||||
p = Person()
|
||||
p.name = "Dean"
|
||||
p.age = 22
|
||||
p.save()
|
||||
|
||||
p.age = 24
|
||||
assert p.age == 24
|
||||
assert p._get_changed_fields() == ["age"]
|
||||
assert p._delta() == ({"age": 24}, {})
|
||||
|
||||
p = Person.objects(age=22).get()
|
||||
p.age = 24
|
||||
assert p.age == 24
|
||||
assert p._get_changed_fields() == ["age"]
|
||||
assert p._delta() == ({"age": 24}, {})
|
||||
|
||||
p.save()
|
||||
assert 1 == Person.objects(age=24).count()
|
||||
|
||||
def test_dynamic_delta(self):
|
||||
class Doc(DynamicDocument):
|
||||
pass
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
doc.string_field = "hello"
|
||||
assert doc._get_changed_fields() == ["string_field"]
|
||||
assert doc._delta() == ({"string_field": "hello"}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
assert doc._get_changed_fields() == ["int_field"]
|
||||
assert doc._delta() == ({"int_field": 1}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {"hello": "world", "ping": "pong"}
|
||||
doc.dict_field = dict_value
|
||||
assert doc._get_changed_fields() == ["dict_field"]
|
||||
assert doc._delta() == ({"dict_field": dict_value}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ["1", 2, {"hello": "world"}]
|
||||
doc.list_field = list_value
|
||||
assert doc._get_changed_fields() == ["list_field"]
|
||||
assert doc._delta() == ({"list_field": list_value}, {})
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
assert doc._get_changed_fields() == ["dict_field"]
|
||||
assert doc._delta() == ({}, {"dict_field": 1})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
assert doc._get_changed_fields() == ["list_field"]
|
||||
assert doc._delta() == ({}, {"list_field": 1})
|
||||
|
||||
def test_delta_with_dbref_true(self):
|
||||
person, organization, employee = self.circular_reference_deltas_2(
|
||||
Document, Document, True
|
||||
)
|
||||
employee.name = "test"
|
||||
|
||||
assert organization._get_changed_fields() == []
|
||||
|
||||
updates, removals = organization._delta()
|
||||
assert removals == {}
|
||||
assert updates == {}
|
||||
|
||||
organization.employees.append(person)
|
||||
updates, removals = organization._delta()
|
||||
assert removals == {}
|
||||
assert "employees" in updates
|
||||
|
||||
def test_delta_with_dbref_false(self):
|
||||
person, organization, employee = self.circular_reference_deltas_2(
|
||||
Document, Document, False
|
||||
)
|
||||
employee.name = "test"
|
||||
|
||||
assert organization._get_changed_fields() == []
|
||||
|
||||
updates, removals = organization._delta()
|
||||
assert removals == {}
|
||||
assert updates == {}
|
||||
|
||||
organization.employees.append(person)
|
||||
updates, removals = organization._delta()
|
||||
assert removals == {}
|
||||
assert "employees" in updates
|
||||
|
||||
def test_nested_nested_fields_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc)))
|
||||
name = StringField()
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc(name="testcase1", subs={"a": {"b": EmbeddedDoc(name="foo")}}).save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
subdoc = mydoc.subs["a"]["b"]
|
||||
subdoc.name = "bar"
|
||||
|
||||
assert subdoc._get_changed_fields() == ["name"]
|
||||
assert mydoc._get_changed_fields() == ["subs.a.b.name"]
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
assert mydoc._get_changed_fields() == []
|
||||
|
||||
def test_nested_nested_fields_db_field_set__gets_mark_as_changed_and_cleaned(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField(db_field="db_name")
|
||||
|
||||
class MyDoc(Document):
|
||||
embed = EmbeddedDocumentField(EmbeddedDoc, db_field="db_embed")
|
||||
name = StringField(db_field="db_name")
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc(name="testcase1", embed=EmbeddedDoc(name="foo")).save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
mydoc.embed.name = "foo1"
|
||||
|
||||
assert mydoc.embed._get_changed_fields() == ["db_name"]
|
||||
assert mydoc._get_changed_fields() == ["db_embed.db_name"]
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
embed = EmbeddedDoc(name="foo2")
|
||||
embed.name = "bar"
|
||||
mydoc.embed = embed
|
||||
|
||||
assert embed._get_changed_fields() == ["db_name"]
|
||||
assert mydoc._get_changed_fields() == ["db_embed"]
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
assert mydoc._get_changed_fields() == []
|
||||
|
||||
def test_lower_level_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc().save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
mydoc.subs["a"] = EmbeddedDoc()
|
||||
assert mydoc._get_changed_fields() == ["subs.a"]
|
||||
|
||||
subdoc = mydoc.subs["a"]
|
||||
subdoc.name = "bar"
|
||||
|
||||
assert subdoc._get_changed_fields() == ["name"]
|
||||
assert mydoc._get_changed_fields() == ["subs.a"]
|
||||
mydoc.save()
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
assert mydoc._get_changed_fields() == []
|
||||
|
||||
def test_upper_level_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc(subs={"a": EmbeddedDoc(name="foo")}).save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
subdoc = mydoc.subs["a"]
|
||||
subdoc.name = "bar"
|
||||
|
||||
assert subdoc._get_changed_fields() == ["name"]
|
||||
assert mydoc._get_changed_fields() == ["subs.a.name"]
|
||||
|
||||
mydoc.subs["a"] = EmbeddedDoc()
|
||||
assert mydoc._get_changed_fields() == ["subs.a"]
|
||||
mydoc.save()
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
assert mydoc._get_changed_fields() == []
|
||||
|
||||
def test_referenced_object_changed_attributes(self):
|
||||
"""Ensures that when you save a new reference to a field, the referenced object isn't altered"""
|
||||
|
||||
class Organization(Document):
|
||||
name = StringField()
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
org = ReferenceField("Organization", required=True)
|
||||
|
||||
Organization.drop_collection()
|
||||
User.drop_collection()
|
||||
|
||||
org1 = Organization(name="Org 1")
|
||||
org1.save()
|
||||
|
||||
org2 = Organization(name="Org 2")
|
||||
org2.save()
|
||||
|
||||
user = User(name="Fred", org=org1)
|
||||
user.save()
|
||||
|
||||
org1.reload()
|
||||
org2.reload()
|
||||
user.reload()
|
||||
assert org1.name == "Org 1"
|
||||
assert org2.name == "Org 2"
|
||||
assert user.name == "Fred"
|
||||
|
||||
user.name = "Harold"
|
||||
user.org = org2
|
||||
|
||||
org2.name = "New Org 2"
|
||||
assert org2.name == "New Org 2"
|
||||
|
||||
user.save()
|
||||
org2.save()
|
||||
|
||||
assert org2.name == "New Org 2"
|
||||
org2.reload()
|
||||
assert org2.name == "New Org 2"
|
||||
|
||||
def test_delta_for_nested_map_fields(self):
|
||||
class UInfoDocument(Document):
|
||||
phone = StringField()
|
||||
|
||||
class EmbeddedRole(EmbeddedDocument):
|
||||
type = StringField()
|
||||
|
||||
class EmbeddedUser(EmbeddedDocument):
|
||||
name = StringField()
|
||||
roles = MapField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||
rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||
info = ReferenceField(UInfoDocument)
|
||||
|
||||
class Doc(Document):
|
||||
users = MapField(field=EmbeddedDocumentField(EmbeddedUser))
|
||||
num = IntField(default=-1)
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
doc = Doc(num=1)
|
||||
doc.users["007"] = EmbeddedUser(name="Agent007")
|
||||
doc.save()
|
||||
|
||||
uinfo = UInfoDocument(phone="79089269066")
|
||||
uinfo.save()
|
||||
|
||||
d = Doc.objects(num=1).first()
|
||||
d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin")
|
||||
d.users["007"]["rolist"].append(EmbeddedRole(type="oops"))
|
||||
d.users["007"]["info"] = uinfo
|
||||
delta = d._delta()
|
||||
assert True == ("users.007.roles.666" in delta[0])
|
||||
assert True == ("users.007.rolist" in delta[0])
|
||||
assert True == ("users.007.info" in delta[0])
|
||||
assert "superadmin" == delta[0]["users.007.roles.666"]["type"]
|
||||
assert "oops" == delta[0]["users.007.rolist"][0]["type"]
|
||||
assert uinfo.id == delta[0]["users.007.info"]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -1,20 +1,20 @@
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
__all__ = ("DynamicTest", )
|
||||
__all__ = ("TestDynamicDocument",)
|
||||
|
||||
|
||||
class DynamicTest(unittest.TestCase):
|
||||
|
||||
class TestDynamicDocument(MongoDBTestCase):
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
super().setUp()
|
||||
|
||||
class Person(DynamicDocument):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
@@ -27,16 +27,28 @@ class DynamicTest(unittest.TestCase):
|
||||
p.name = "James"
|
||||
p.age = 34
|
||||
|
||||
self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James",
|
||||
"age": 34})
|
||||
self.assertEqual(p.to_mongo().keys(), ["_cls", "name", "age"])
|
||||
assert p.to_mongo() == {"_cls": "Person", "name": "James", "age": 34}
|
||||
assert p.to_mongo().keys() == ["_cls", "name", "age"]
|
||||
p.save()
|
||||
self.assertEqual(p.to_mongo().keys(), ["_id", "_cls", "name", "age"])
|
||||
assert p.to_mongo().keys() == ["_id", "_cls", "name", "age"]
|
||||
|
||||
self.assertEqual(self.Person.objects.first().age, 34)
|
||||
assert self.Person.objects.first().age == 34
|
||||
|
||||
# Confirm no changes to self.Person
|
||||
self.assertFalse(hasattr(self.Person, 'age'))
|
||||
assert not hasattr(self.Person, "age")
|
||||
|
||||
def test_dynamic_document_parse_values_in_constructor_like_document_do(self):
|
||||
class ProductDynamicDocument(DynamicDocument):
|
||||
title = StringField()
|
||||
price = FloatField()
|
||||
|
||||
class ProductDocument(Document):
|
||||
title = StringField()
|
||||
price = FloatField()
|
||||
|
||||
product = ProductDocument(title="Blabla", price="12.5")
|
||||
dyn_product = ProductDynamicDocument(title="Blabla", price="12.5")
|
||||
assert product.price == dyn_product.price == 12.5
|
||||
|
||||
def test_change_scope_of_variable(self):
|
||||
"""Test changing the scope of a dynamic field has no adverse effects"""
|
||||
@@ -46,11 +58,11 @@ class DynamicTest(unittest.TestCase):
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
p.misc = {'hello': 'world'}
|
||||
p.misc = {"hello": "world"}
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertEqual(p.misc, {'hello': 'world'})
|
||||
assert p.misc == {"hello": "world"}
|
||||
|
||||
def test_delete_dynamic_field(self):
|
||||
"""Test deleting a dynamic field works"""
|
||||
@@ -61,23 +73,23 @@ class DynamicTest(unittest.TestCase):
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
p.misc = {'hello': 'world'}
|
||||
p.misc = {"hello": "world"}
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertEqual(p.misc, {'hello': 'world'})
|
||||
assert p.misc == {"hello": "world"}
|
||||
collection = self.db[self.Person._get_collection_name()]
|
||||
obj = collection.find_one()
|
||||
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name'])
|
||||
assert sorted(obj.keys()) == ["_cls", "_id", "misc", "name"]
|
||||
|
||||
del p.misc
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertFalse(hasattr(p, 'misc'))
|
||||
assert not hasattr(p, "misc")
|
||||
|
||||
obj = collection.find_one()
|
||||
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name'])
|
||||
assert sorted(obj.keys()) == ["_cls", "_id", "name"]
|
||||
|
||||
def test_reload_after_unsetting(self):
|
||||
p = self.Person()
|
||||
@@ -91,12 +103,55 @@ class DynamicTest(unittest.TestCase):
|
||||
p = self.Person.objects.create()
|
||||
p.update(age=1)
|
||||
|
||||
self.assertEqual(len(p._data), 3)
|
||||
self.assertEqual(sorted(p._data.keys()), ['_cls', 'id', 'name'])
|
||||
assert len(p._data) == 3
|
||||
assert sorted(p._data.keys()) == ["_cls", "id", "name"]
|
||||
|
||||
p.reload()
|
||||
self.assertEqual(len(p._data), 4)
|
||||
self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name'])
|
||||
assert len(p._data) == 4
|
||||
assert sorted(p._data.keys()) == ["_cls", "age", "id", "name"]
|
||||
|
||||
def test_fields_without_underscore(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
Person = self.Person
|
||||
|
||||
p = self.Person(name="Dean")
|
||||
p.save()
|
||||
|
||||
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
||||
assert raw_p == {"_cls": "Person", "_id": p.id, "name": "Dean"}
|
||||
|
||||
p.name = "OldDean"
|
||||
p.newattr = "garbage"
|
||||
p.save()
|
||||
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
||||
assert raw_p == {
|
||||
"_cls": "Person",
|
||||
"_id": p.id,
|
||||
"name": "OldDean",
|
||||
"newattr": "garbage",
|
||||
}
|
||||
|
||||
def test_fields_containing_underscore(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
|
||||
class WeirdPerson(DynamicDocument):
|
||||
name = StringField()
|
||||
_name = StringField()
|
||||
|
||||
WeirdPerson.drop_collection()
|
||||
|
||||
p = WeirdPerson(name="Dean", _name="Dean")
|
||||
p.save()
|
||||
|
||||
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
||||
assert raw_p == {"_id": p.id, "_name": "Dean", "name": "Dean"}
|
||||
|
||||
p.name = "OldDean"
|
||||
p._name = "NewDean"
|
||||
p._newattr1 = "garbage" # Unknown fields won't be added
|
||||
p.save()
|
||||
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
||||
assert raw_p == {"_id": p.id, "_name": "NewDean", "name": "OldDean"}
|
||||
|
||||
def test_dynamic_document_queries(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
@@ -105,10 +160,10 @@ class DynamicTest(unittest.TestCase):
|
||||
p.age = 22
|
||||
p.save()
|
||||
|
||||
self.assertEqual(1, self.Person.objects(age=22).count())
|
||||
assert 1 == self.Person.objects(age=22).count()
|
||||
p = self.Person.objects(age=22)
|
||||
p = p.get()
|
||||
self.assertEqual(22, p.age)
|
||||
assert 22 == p.age
|
||||
|
||||
def test_complex_dynamic_document_queries(self):
|
||||
class Person(DynamicDocument):
|
||||
@@ -128,26 +183,25 @@ class DynamicTest(unittest.TestCase):
|
||||
p2.age = 10
|
||||
p2.save()
|
||||
|
||||
self.assertEqual(Person.objects(age__icontains='ten').count(), 2)
|
||||
self.assertEqual(Person.objects(age__gte=10).count(), 1)
|
||||
assert Person.objects(age__icontains="ten").count() == 2
|
||||
assert Person.objects(age__gte=10).count() == 1
|
||||
|
||||
def test_complex_data_lookups(self):
|
||||
"""Ensure you can query dynamic document dynamic fields"""
|
||||
p = self.Person()
|
||||
p.misc = {'hello': 'world'}
|
||||
p.misc = {"hello": "world"}
|
||||
p.save()
|
||||
|
||||
self.assertEqual(1, self.Person.objects(misc__hello='world').count())
|
||||
assert 1 == self.Person.objects(misc__hello="world").count()
|
||||
|
||||
def test_three_level_complex_data_lookups(self):
|
||||
"""Ensure you can query three level document dynamic fields"""
|
||||
p = self.Person.objects.create(
|
||||
misc={'hello': {'hello2': 'world'}}
|
||||
)
|
||||
self.assertEqual(1, self.Person.objects(misc__hello__hello2='world').count())
|
||||
self.Person.objects.create(misc={"hello": {"hello2": "world"}})
|
||||
assert 1 == self.Person.objects(misc__hello__hello2="world").count()
|
||||
|
||||
def test_complex_embedded_document_validation(self):
|
||||
"""Ensure embedded dynamic documents may be validated"""
|
||||
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
content = URLField()
|
||||
|
||||
@@ -157,27 +211,29 @@ class DynamicTest(unittest.TestCase):
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
|
||||
embedded_doc_1 = Embedded(content='http://mongoengine.org')
|
||||
embedded_doc_1 = Embedded(content="http://mongoengine.org")
|
||||
embedded_doc_1.validate()
|
||||
|
||||
embedded_doc_2 = Embedded(content='this is not a url')
|
||||
self.assertRaises(ValidationError, embedded_doc_2.validate)
|
||||
embedded_doc_2 = Embedded(content="this is not a url")
|
||||
with pytest.raises(ValidationError):
|
||||
embedded_doc_2.validate()
|
||||
|
||||
doc.embedded_field_1 = embedded_doc_1
|
||||
doc.embedded_field_2 = embedded_doc_2
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
doc.validate()
|
||||
|
||||
def test_inheritance(self):
|
||||
"""Ensure that dynamic document plays nice with inheritance"""
|
||||
|
||||
class Employee(self.Person):
|
||||
salary = IntField()
|
||||
|
||||
Employee.drop_collection()
|
||||
|
||||
self.assertTrue('name' in Employee._fields)
|
||||
self.assertTrue('salary' in Employee._fields)
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
self.Person._get_collection_name())
|
||||
assert "name" in Employee._fields
|
||||
assert "salary" in Employee._fields
|
||||
assert Employee._get_collection_name() == self.Person._get_collection_name()
|
||||
|
||||
joe_bloggs = Employee()
|
||||
joe_bloggs.name = "Joe Bloggs"
|
||||
@@ -185,14 +241,15 @@ class DynamicTest(unittest.TestCase):
|
||||
joe_bloggs.age = 20
|
||||
joe_bloggs.save()
|
||||
|
||||
self.assertEqual(1, self.Person.objects(age=20).count())
|
||||
self.assertEqual(1, Employee.objects(age=20).count())
|
||||
assert 1 == self.Person.objects(age=20).count()
|
||||
assert 1 == Employee.objects(age=20).count()
|
||||
|
||||
joe_bloggs = self.Person.objects.first()
|
||||
self.assertTrue(isinstance(joe_bloggs, Employee))
|
||||
assert isinstance(joe_bloggs, Employee)
|
||||
|
||||
def test_embedded_dynamic_document(self):
|
||||
"""Test dynamic embedded documents"""
|
||||
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
pass
|
||||
|
||||
@@ -203,33 +260,33 @@ class DynamicTest(unittest.TestCase):
|
||||
doc = Doc()
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.string_field = "hello"
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||
embedded_1.dict_field = {"hello": "world"}
|
||||
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(doc.to_mongo(), {
|
||||
assert doc.to_mongo() == {
|
||||
"embedded_field": {
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ['1', 2, {'hello': 'world'}]
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
}
|
||||
}
|
||||
})
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||
self.assertEqual(doc.embedded_field.list_field,
|
||||
['1', 2, {'hello': 'world'}])
|
||||
assert doc.embedded_field.__class__ == Embedded
|
||||
assert doc.embedded_field.string_field == "hello"
|
||||
assert doc.embedded_field.int_field == 1
|
||||
assert doc.embedded_field.dict_field == {"hello": "world"}
|
||||
assert doc.embedded_field.list_field == ["1", 2, {"hello": "world"}]
|
||||
|
||||
def test_complex_embedded_documents(self):
|
||||
"""Test complex dynamic embedded documents setups"""
|
||||
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
pass
|
||||
|
||||
@@ -240,51 +297,54 @@ class DynamicTest(unittest.TestCase):
|
||||
doc = Doc()
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.string_field = "hello"
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
embedded_1.dict_field = {"hello": "world"}
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = 'hello'
|
||||
embedded_2.string_field = "hello"
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {'hello': 'world'}
|
||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||
embedded_2.dict_field = {"hello": "world"}
|
||||
embedded_2.list_field = ["1", 2, {"hello": "world"}]
|
||||
|
||||
embedded_1.list_field = ['1', 2, embedded_2]
|
||||
embedded_1.list_field = ["1", 2, embedded_2]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(doc.to_mongo(), {
|
||||
assert doc.to_mongo() == {
|
||||
"embedded_field": {
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ['1', 2,
|
||||
{"_cls": "Embedded",
|
||||
"list_field": [
|
||||
"1",
|
||||
2,
|
||||
{
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ['1', 2, {'hello': 'world'}]}
|
||||
]
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
})
|
||||
doc.save()
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||
assert doc.embedded_field.__class__ == Embedded
|
||||
assert doc.embedded_field.string_field == "hello"
|
||||
assert doc.embedded_field.int_field == 1
|
||||
assert doc.embedded_field.dict_field == {"hello": "world"}
|
||||
assert doc.embedded_field.list_field[0] == "1"
|
||||
assert doc.embedded_field.list_field[1] == 2
|
||||
|
||||
embedded_field = doc.embedded_field.list_field[2]
|
||||
|
||||
self.assertEqual(embedded_field.__class__, Embedded)
|
||||
self.assertEqual(embedded_field.string_field, "hello")
|
||||
self.assertEqual(embedded_field.int_field, 1)
|
||||
self.assertEqual(embedded_field.dict_field, {'hello': 'world'})
|
||||
self.assertEqual(embedded_field.list_field, ['1', 2,
|
||||
{'hello': 'world'}])
|
||||
assert embedded_field.__class__ == Embedded
|
||||
assert embedded_field.string_field == "hello"
|
||||
assert embedded_field.int_field == 1
|
||||
assert embedded_field.dict_field == {"hello": "world"}
|
||||
assert embedded_field.list_field == ["1", 2, {"hello": "world"}]
|
||||
|
||||
def test_dynamic_and_embedded(self):
|
||||
"""Ensure embedded documents play nicely"""
|
||||
@@ -303,18 +363,18 @@ class DynamicTest(unittest.TestCase):
|
||||
person.address.city = "Lundenne"
|
||||
person.save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.city, "Lundenne")
|
||||
assert Person.objects.first().address.city == "Lundenne"
|
||||
|
||||
person = Person.objects.first()
|
||||
person.address = Address(city="Londinium")
|
||||
person.save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.city, "Londinium")
|
||||
assert Person.objects.first().address.city == "Londinium"
|
||||
|
||||
person = Person.objects.first()
|
||||
person.age = 35
|
||||
person.save()
|
||||
self.assertEqual(Person.objects.first().age, 35)
|
||||
assert Person.objects.first().age == 35
|
||||
|
||||
def test_dynamic_embedded_works_with_only(self):
|
||||
"""Ensure custom fieldnames on a dynamic embedded document are found by qs.only()"""
|
||||
@@ -327,10 +387,15 @@ class DynamicTest(unittest.TestCase):
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(name="Eric", address=Address(city="San Francisco", street_number="1337")).save()
|
||||
Person(
|
||||
name="Eric", address=Address(city="San Francisco", street_number="1337")
|
||||
).save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.street_number, '1337')
|
||||
self.assertEqual(Person.objects.only('address__street_number').first().address.street_number, '1337')
|
||||
assert Person.objects.first().address.street_number == "1337"
|
||||
assert (
|
||||
Person.objects.only("address__street_number").first().address.street_number
|
||||
== "1337"
|
||||
)
|
||||
|
||||
def test_dynamic_and_embedded_dict_access(self):
|
||||
"""Ensure embedded dynamic documents work with dict[] style access"""
|
||||
@@ -354,20 +419,21 @@ class DynamicTest(unittest.TestCase):
|
||||
person["address"]["city"] = "Lundenne"
|
||||
person.save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.city, "Lundenne")
|
||||
assert Person.objects.first().address.city == "Lundenne"
|
||||
|
||||
self.assertEqual(Person.objects.first().phone, "555-1212")
|
||||
assert Person.objects.first().phone == "555-1212"
|
||||
|
||||
person = Person.objects.first()
|
||||
person.address = Address(city="Londinium")
|
||||
person.save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.city, "Londinium")
|
||||
assert Person.objects.first().address.city == "Londinium"
|
||||
|
||||
person = Person.objects.first()
|
||||
person["age"] = 35
|
||||
person.save()
|
||||
self.assertEqual(Person.objects.first().age, 35)
|
||||
assert Person.objects.first().age == 35
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
1075
tests/document/test_indexes.py
Normal file
1075
tests/document/test_indexes.py
Normal file
File diff suppressed because it is too large
Load Diff
617
tests/document/test_inheritance.py
Normal file
617
tests/document/test_inheritance.py
Normal file
@@ -0,0 +1,617 @@
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import (
|
||||
BooleanField,
|
||||
Document,
|
||||
EmbeddedDocument,
|
||||
EmbeddedDocumentField,
|
||||
GenericReferenceField,
|
||||
IntField,
|
||||
ReferenceField,
|
||||
StringField,
|
||||
)
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from tests.fixtures import Base
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestInheritance(MongoDBTestCase):
|
||||
def tearDown(self):
|
||||
for collection in list_collection_names(self.db):
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_constructor_cls(self):
|
||||
# Ensures _cls is properly set during construction
|
||||
# and when object gets reloaded (prevent regression of #1950)
|
||||
class EmbedData(EmbeddedDocument):
|
||||
data = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class DataDoc(Document):
|
||||
name = StringField()
|
||||
embed = EmbeddedDocumentField(EmbedData)
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
test_doc = DataDoc(name="test", embed=EmbedData(data="data"))
|
||||
assert test_doc._cls == "DataDoc"
|
||||
assert test_doc.embed._cls == "EmbedData"
|
||||
test_doc.save()
|
||||
saved_doc = DataDoc.objects.with_id(test_doc.id)
|
||||
assert test_doc._cls == saved_doc._cls
|
||||
assert test_doc.embed._cls == saved_doc.embed._cls
|
||||
test_doc.delete()
|
||||
|
||||
def test_superclasses(self):
|
||||
"""Ensure that the correct list of superclasses is assembled."""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
assert Animal._superclasses == ()
|
||||
assert Fish._superclasses == ("Animal",)
|
||||
assert Guppy._superclasses == ("Animal", "Animal.Fish")
|
||||
assert Mammal._superclasses == ("Animal",)
|
||||
assert Dog._superclasses == ("Animal", "Animal.Mammal")
|
||||
assert Human._superclasses == ("Animal", "Animal.Mammal")
|
||||
|
||||
def test_external_superclasses(self):
|
||||
"""Ensure that the correct list of super classes is assembled when
|
||||
importing part of the model.
|
||||
"""
|
||||
|
||||
class Animal(Base):
|
||||
pass
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
assert Animal._superclasses == ("Base",)
|
||||
assert Fish._superclasses == ("Base", "Base.Animal")
|
||||
assert Guppy._superclasses == ("Base", "Base.Animal", "Base.Animal.Fish")
|
||||
assert Mammal._superclasses == ("Base", "Base.Animal")
|
||||
assert Dog._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal")
|
||||
assert Human._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal")
|
||||
|
||||
def test_subclasses(self):
|
||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||
assembled.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
assert Animal._subclasses == (
|
||||
"Animal",
|
||||
"Animal.Fish",
|
||||
"Animal.Fish.Guppy",
|
||||
"Animal.Mammal",
|
||||
"Animal.Mammal.Dog",
|
||||
"Animal.Mammal.Human",
|
||||
)
|
||||
assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Guppy")
|
||||
assert Guppy._subclasses == ("Animal.Fish.Guppy",)
|
||||
assert Mammal._subclasses == (
|
||||
"Animal.Mammal",
|
||||
"Animal.Mammal.Dog",
|
||||
"Animal.Mammal.Human",
|
||||
)
|
||||
assert Human._subclasses == ("Animal.Mammal.Human",)
|
||||
|
||||
def test_external_subclasses(self):
|
||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||
assembled when importing part of the model.
|
||||
"""
|
||||
|
||||
class Animal(Base):
|
||||
pass
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
assert Animal._subclasses == (
|
||||
"Base.Animal",
|
||||
"Base.Animal.Fish",
|
||||
"Base.Animal.Fish.Guppy",
|
||||
"Base.Animal.Mammal",
|
||||
"Base.Animal.Mammal.Dog",
|
||||
"Base.Animal.Mammal.Human",
|
||||
)
|
||||
assert Fish._subclasses == ("Base.Animal.Fish", "Base.Animal.Fish.Guppy")
|
||||
assert Guppy._subclasses == ("Base.Animal.Fish.Guppy",)
|
||||
assert Mammal._subclasses == (
|
||||
"Base.Animal.Mammal",
|
||||
"Base.Animal.Mammal.Dog",
|
||||
"Base.Animal.Mammal.Human",
|
||||
)
|
||||
assert Human._subclasses == ("Base.Animal.Mammal.Human",)
|
||||
|
||||
def test_dynamic_declarations(self):
|
||||
"""Test that declaring an extra class updates meta data"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
assert Animal._superclasses == ()
|
||||
assert Animal._subclasses == ("Animal",)
|
||||
|
||||
# Test dynamically adding a class changes the meta data
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
assert Animal._superclasses == ()
|
||||
assert Animal._subclasses == ("Animal", "Animal.Fish")
|
||||
|
||||
assert Fish._superclasses == ("Animal",)
|
||||
assert Fish._subclasses == ("Animal.Fish",)
|
||||
|
||||
# Test dynamically adding an inherited class changes the meta data
|
||||
class Pike(Fish):
|
||||
pass
|
||||
|
||||
assert Animal._superclasses == ()
|
||||
assert Animal._subclasses == ("Animal", "Animal.Fish", "Animal.Fish.Pike")
|
||||
|
||||
assert Fish._superclasses == ("Animal",)
|
||||
assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Pike")
|
||||
|
||||
assert Pike._superclasses == ("Animal", "Animal.Fish")
|
||||
assert Pike._subclasses == ("Animal.Fish.Pike",)
|
||||
|
||||
def test_inheritance_meta_data(self):
|
||||
"""Ensure that document may inherit fields from a superclass document."""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
assert ["_cls", "age", "id", "name", "salary"] == sorted(
|
||||
Employee._fields.keys()
|
||||
)
|
||||
assert Employee._get_collection_name() == Person._get_collection_name()
|
||||
|
||||
def test_inheritance_to_mongo_keys(self):
|
||||
"""Ensure that document may inherit fields from a superclass document."""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
assert ["_cls", "age", "id", "name", "salary"] == sorted(
|
||||
Employee._fields.keys()
|
||||
)
|
||||
assert Person(name="Bob", age=35).to_mongo().keys() == ["_cls", "name", "age"]
|
||||
assert Employee(name="Bob", age=35, salary=0).to_mongo().keys() == [
|
||||
"_cls",
|
||||
"name",
|
||||
"age",
|
||||
"salary",
|
||||
]
|
||||
assert Employee._get_collection_name() == Person._get_collection_name()
|
||||
|
||||
def test_indexes_and_multiple_inheritance(self):
|
||||
"""Ensure that all of the indexes are created for a document with
|
||||
multiple inheritance.
|
||||
"""
|
||||
|
||||
class A(Document):
|
||||
a = StringField()
|
||||
|
||||
meta = {"allow_inheritance": True, "indexes": ["a"]}
|
||||
|
||||
class B(Document):
|
||||
b = StringField()
|
||||
|
||||
meta = {"allow_inheritance": True, "indexes": ["b"]}
|
||||
|
||||
class C(A, B):
|
||||
pass
|
||||
|
||||
A.drop_collection()
|
||||
B.drop_collection()
|
||||
C.drop_collection()
|
||||
|
||||
C.ensure_indexes()
|
||||
|
||||
assert sorted(
|
||||
idx["key"] for idx in C._get_collection().index_information().values()
|
||||
) == sorted([[("_cls", 1), ("b", 1)], [("_id", 1)], [("_cls", 1), ("a", 1)]])
|
||||
|
||||
def test_polymorphic_queries(self):
|
||||
"""Ensure that the correct subclasses are returned from a query"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
Animal.drop_collection()
|
||||
|
||||
Animal().save()
|
||||
Fish().save()
|
||||
Mammal().save()
|
||||
Dog().save()
|
||||
Human().save()
|
||||
|
||||
classes = [obj.__class__ for obj in Animal.objects]
|
||||
assert classes == [Animal, Fish, Mammal, Dog, Human]
|
||||
|
||||
classes = [obj.__class__ for obj in Mammal.objects]
|
||||
assert classes == [Mammal, Dog, Human]
|
||||
|
||||
classes = [obj.__class__ for obj in Human.objects]
|
||||
assert classes == [Human]
|
||||
|
||||
def test_allow_inheritance(self):
|
||||
"""Ensure that inheritance is disabled by default on simple
|
||||
classes and that _cls will not be used.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
|
||||
# can't inherit because Animal didn't explicitly allow inheritance
|
||||
with pytest.raises(ValueError, match="Document Animal may not be subclassed"):
|
||||
|
||||
class Dog(Animal):
|
||||
pass
|
||||
|
||||
# Check that _cls etc aren't present on simple documents
|
||||
dog = Animal(name="dog").save()
|
||||
assert dog.to_mongo().keys() == ["_id", "name"]
|
||||
|
||||
collection = self.db[Animal._get_collection_name()]
|
||||
obj = collection.find_one()
|
||||
assert "_cls" not in obj
|
||||
|
||||
def test_cant_turn_off_inheritance_on_subclass(self):
|
||||
"""Ensure if inheritance is on in a subclass you cant turn it off."""
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
|
||||
class Mammal(Animal):
|
||||
meta = {"allow_inheritance": False}
|
||||
|
||||
assert (
|
||||
str(exc_info.value)
|
||||
== 'Only direct subclasses of Document may set "allow_inheritance" to False'
|
||||
)
|
||||
|
||||
def test_allow_inheritance_abstract_document(self):
|
||||
"""Ensure that abstract documents can set inheritance rules and that
|
||||
_cls will not be used.
|
||||
"""
|
||||
|
||||
class FinalDocument(Document):
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
class Animal(FinalDocument):
|
||||
name = StringField()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
# Check that _cls isn't present in simple documents
|
||||
doc = Animal(name="dog")
|
||||
assert "_cls" not in doc.to_mongo()
|
||||
|
||||
def test_using_abstract_class_in_reference_field(self):
|
||||
# Ensures no regression of #1920
|
||||
class AbstractHuman(Document):
|
||||
meta = {"abstract": True}
|
||||
|
||||
class Dad(AbstractHuman):
|
||||
name = StringField()
|
||||
|
||||
class Home(Document):
|
||||
dad = ReferenceField(AbstractHuman) # Referencing the abstract class
|
||||
address = StringField()
|
||||
|
||||
dad = Dad(name="5").save()
|
||||
Home(dad=dad, address="street").save()
|
||||
|
||||
home = Home.objects.first()
|
||||
home.address = "garbage"
|
||||
home.save() # Was failing with ValidationError
|
||||
|
||||
def test_abstract_class_referencing_self(self):
|
||||
# Ensures no regression of #1920
|
||||
class Human(Document):
|
||||
meta = {"abstract": True}
|
||||
creator = ReferenceField("self", dbref=True)
|
||||
|
||||
class User(Human):
|
||||
name = StringField()
|
||||
|
||||
user = User(name="John").save()
|
||||
user2 = User(name="Foo", creator=user).save()
|
||||
|
||||
user2 = User.objects.with_id(user2.id)
|
||||
user2.name = "Bar"
|
||||
user2.save() # Was failing with ValidationError
|
||||
|
||||
def test_abstract_handle_ids_in_metaclass_properly(self):
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
||||
assert len(berlin._db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._fields_ordered) == 3
|
||||
assert berlin._fields_ordered[0] == "id"
|
||||
|
||||
def test_auto_id_not_set_if_specific_in_parent_class(self):
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
city_id = IntField(primary_key=True)
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
||||
assert len(berlin._db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._fields_ordered) == 3
|
||||
assert berlin._fields_ordered[0] == "city_id"
|
||||
|
||||
def test_auto_id_vs_non_pk_id_field(self):
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
id = IntField()
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
||||
assert len(berlin._db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._fields_ordered) == 4
|
||||
assert berlin._fields_ordered[0] == "auto_id_0"
|
||||
berlin.save()
|
||||
assert berlin.pk == berlin.auto_id_0
|
||||
|
||||
def test_abstract_document_creation_does_not_fail(self):
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
city = City(continent="asia")
|
||||
assert city.pk is None
|
||||
# TODO: expected error? Shouldn't we create a new error type?
|
||||
with pytest.raises(KeyError):
|
||||
city.pk = 1
|
||||
|
||||
def test_allow_inheritance_embedded_document(self):
|
||||
"""Ensure embedded documents respect inheritance."""
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
|
||||
class SpecialComment(Comment):
|
||||
pass
|
||||
|
||||
doc = Comment(content="test")
|
||||
assert "_cls" not in doc.to_mongo()
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
doc = Comment(content="test")
|
||||
assert "_cls" in doc.to_mongo()
|
||||
|
||||
def test_document_inheritance(self):
|
||||
"""Ensure mutliple inheritance of abstract documents"""
|
||||
|
||||
class DateCreatedDocument(Document):
|
||||
meta = {"allow_inheritance": True, "abstract": True}
|
||||
|
||||
class DateUpdatedDocument(Document):
|
||||
meta = {"allow_inheritance": True, "abstract": True}
|
||||
|
||||
class MyDocument(DateCreatedDocument, DateUpdatedDocument):
|
||||
pass
|
||||
|
||||
def test_abstract_documents(self):
|
||||
"""Ensure that a document superclass can be marked as abstract
|
||||
thereby not using it as the name for the collection."""
|
||||
|
||||
defaults = {
|
||||
"index_background": True,
|
||||
"index_opts": {"hello": "world"},
|
||||
"allow_inheritance": True,
|
||||
"queryset_class": "QuerySet",
|
||||
"db_alias": "myDB",
|
||||
"shard_key": ("hello", "world"),
|
||||
}
|
||||
|
||||
meta_settings = {"abstract": True}
|
||||
meta_settings.update(defaults)
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = meta_settings
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
meta = {"abstract": True}
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
for k, v in defaults.items():
|
||||
for cls in [Animal, Fish, Guppy]:
|
||||
assert cls._meta[k] == v
|
||||
|
||||
assert "collection" not in Animal._meta
|
||||
assert "collection" not in Mammal._meta
|
||||
|
||||
assert Animal._get_collection_name() is None
|
||||
assert Mammal._get_collection_name() is None
|
||||
|
||||
assert Fish._get_collection_name() == "fish"
|
||||
assert Guppy._get_collection_name() == "fish"
|
||||
assert Human._get_collection_name() == "human"
|
||||
|
||||
# ensure that a subclass of a non-abstract class can't be abstract
|
||||
with pytest.raises(ValueError):
|
||||
|
||||
class EvilHuman(Human):
|
||||
evil = BooleanField(default=True)
|
||||
meta = {"abstract": True}
|
||||
|
||||
def test_abstract_embedded_documents(self):
|
||||
# 789: EmbeddedDocument shouldn't inherit abstract
|
||||
class A(EmbeddedDocument):
|
||||
meta = {"abstract": True}
|
||||
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
assert not B._meta["abstract"]
|
||||
|
||||
def test_inherited_collections(self):
|
||||
"""Ensure that subclassed documents don't override parents'
|
||||
collections
|
||||
"""
|
||||
|
||||
class Drink(Document):
|
||||
name = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Drinker(Document):
|
||||
drink = GenericReferenceField()
|
||||
|
||||
try:
|
||||
warnings.simplefilter("error")
|
||||
|
||||
class AcloholicDrink(Drink):
|
||||
meta = {"collection": "booze"}
|
||||
|
||||
except SyntaxWarning:
|
||||
warnings.simplefilter("ignore")
|
||||
|
||||
class AlcoholicDrink(Drink):
|
||||
meta = {"collection": "booze"}
|
||||
|
||||
else:
|
||||
raise AssertionError("SyntaxWarning should be triggered")
|
||||
|
||||
warnings.resetwarnings()
|
||||
|
||||
Drink.drop_collection()
|
||||
AlcoholicDrink.drop_collection()
|
||||
Drinker.drop_collection()
|
||||
|
||||
red_bull = Drink(name="Red Bull")
|
||||
red_bull.save()
|
||||
|
||||
programmer = Drinker(drink=red_bull)
|
||||
programmer.save()
|
||||
|
||||
beer = AlcoholicDrink(name="Beer")
|
||||
beer.save()
|
||||
real_person = Drinker(drink=beer)
|
||||
real_person.save()
|
||||
|
||||
assert Drinker.objects[0].drink.name == red_bull.name
|
||||
assert Drinker.objects[1].drink.name == beer.name
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,22 +1,14 @@
|
||||
import unittest
|
||||
import uuid
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
from datetime import datetime
|
||||
|
||||
from bson import ObjectId
|
||||
|
||||
import pymongo
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
__all__ = ("TestJson",)
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestJson(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
class TestJson(MongoDBTestCase):
|
||||
def test_json_names(self):
|
||||
"""
|
||||
Going to test reported issue:
|
||||
@@ -25,22 +17,24 @@ class TestJson(unittest.TestCase):
|
||||
a to_json with the original class names and not the abreviated
|
||||
mongodb document keys
|
||||
"""
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
string = StringField(db_field='s')
|
||||
string = StringField(db_field="s")
|
||||
|
||||
class Doc(Document):
|
||||
string = StringField(db_field='s')
|
||||
embedded = EmbeddedDocumentField(Embedded, db_field='e')
|
||||
string = StringField(db_field="s")
|
||||
embedded = EmbeddedDocumentField(Embedded, db_field="e")
|
||||
|
||||
doc = Doc(string="Hello", embedded=Embedded(string="Inner Hello"))
|
||||
doc_json = doc.to_json(sort_keys=True, use_db_field=False,separators=(',', ':'))
|
||||
doc_json = doc.to_json(
|
||||
sort_keys=True, use_db_field=False, separators=(",", ":")
|
||||
)
|
||||
|
||||
expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}"""
|
||||
|
||||
self.assertEqual( doc_json, expected_json)
|
||||
assert doc_json == expected_json
|
||||
|
||||
def test_json_simple(self):
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
string = StringField()
|
||||
|
||||
@@ -49,22 +43,20 @@ class TestJson(unittest.TestCase):
|
||||
embedded_field = EmbeddedDocumentField(Embedded)
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.string == other.string and
|
||||
self.embedded_field == other.embedded_field)
|
||||
return (
|
||||
self.string == other.string
|
||||
and self.embedded_field == other.embedded_field
|
||||
)
|
||||
|
||||
doc = Doc(string="Hi", embedded_field=Embedded(string="Hi"))
|
||||
|
||||
doc_json = doc.to_json(sort_keys=True, separators=(',', ':'))
|
||||
doc_json = doc.to_json(sort_keys=True, separators=(",", ":"))
|
||||
expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}"""
|
||||
self.assertEqual(doc_json, expected_json)
|
||||
assert doc_json == expected_json
|
||||
|
||||
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
||||
assert doc == Doc.from_json(doc.to_json())
|
||||
|
||||
def test_json_complex(self):
|
||||
|
||||
if pymongo.version_tuple[0] <= 2 and pymongo.version_tuple[1] <= 3:
|
||||
raise SkipTest("Need pymongo 2.4 as has a fix for DBRefs")
|
||||
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
pass
|
||||
|
||||
@@ -72,41 +64,43 @@ class TestJson(unittest.TestCase):
|
||||
pass
|
||||
|
||||
class Doc(Document):
|
||||
string_field = StringField(default='1')
|
||||
string_field = StringField(default="1")
|
||||
int_field = IntField(default=1)
|
||||
float_field = FloatField(default=1.1)
|
||||
boolean_field = BooleanField(default=True)
|
||||
datetime_field = DateTimeField(default=datetime.now)
|
||||
embedded_document_field = EmbeddedDocumentField(EmbeddedDoc,
|
||||
default=lambda: EmbeddedDoc())
|
||||
embedded_document_field = EmbeddedDocumentField(
|
||||
EmbeddedDoc, default=lambda: EmbeddedDoc()
|
||||
)
|
||||
list_field = ListField(default=lambda: [1, 2, 3])
|
||||
dict_field = DictField(default=lambda: {"hello": "world"})
|
||||
objectid_field = ObjectIdField(default=ObjectId)
|
||||
reference_field = ReferenceField(Simple, default=lambda:
|
||||
Simple().save())
|
||||
reference_field = ReferenceField(Simple, default=lambda: Simple().save())
|
||||
map_field = MapField(IntField(), default=lambda: {"simple": 1})
|
||||
decimal_field = DecimalField(default=1.0)
|
||||
complex_datetime_field = ComplexDateTimeField(default=datetime.now)
|
||||
url_field = URLField(default="http://mongoengine.org")
|
||||
dynamic_field = DynamicField(default=1)
|
||||
generic_reference_field = GenericReferenceField(
|
||||
default=lambda: Simple().save())
|
||||
sorted_list_field = SortedListField(IntField(),
|
||||
default=lambda: [1, 2, 3])
|
||||
default=lambda: Simple().save()
|
||||
)
|
||||
sorted_list_field = SortedListField(IntField(), default=lambda: [1, 2, 3])
|
||||
email_field = EmailField(default="ross@example.com")
|
||||
geo_point_field = GeoPointField(default=lambda: [1, 2])
|
||||
sequence_field = SequenceField()
|
||||
uuid_field = UUIDField(default=uuid.uuid4)
|
||||
generic_embedded_document_field = GenericEmbeddedDocumentField(
|
||||
default=lambda: EmbeddedDoc())
|
||||
default=lambda: EmbeddedDoc()
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
import json
|
||||
|
||||
return json.loads(self.to_json()) == json.loads(other.to_json())
|
||||
|
||||
doc = Doc()
|
||||
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
||||
assert doc == Doc.from_json(doc.to_json())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
217
tests/document/test_validation.py
Normal file
217
tests/document/test_validation.py
Normal file
@@ -0,0 +1,217 @@
|
||||
import unittest
|
||||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestValidatorError(MongoDBTestCase):
|
||||
def test_to_dict(self):
|
||||
"""Ensure a ValidationError handles error to_dict correctly."""
|
||||
error = ValidationError("root")
|
||||
assert error.to_dict() == {}
|
||||
|
||||
# 1st level error schema
|
||||
error.errors = {"1st": ValidationError("bad 1st")}
|
||||
assert "1st" in error.to_dict()
|
||||
assert error.to_dict()["1st"] == "bad 1st"
|
||||
|
||||
# 2nd level error schema
|
||||
error.errors = {
|
||||
"1st": ValidationError(
|
||||
"bad 1st", errors={"2nd": ValidationError("bad 2nd")}
|
||||
)
|
||||
}
|
||||
assert "1st" in error.to_dict()
|
||||
assert isinstance(error.to_dict()["1st"], dict)
|
||||
assert "2nd" in error.to_dict()["1st"]
|
||||
assert error.to_dict()["1st"]["2nd"] == "bad 2nd"
|
||||
|
||||
# moar levels
|
||||
error.errors = {
|
||||
"1st": ValidationError(
|
||||
"bad 1st",
|
||||
errors={
|
||||
"2nd": ValidationError(
|
||||
"bad 2nd",
|
||||
errors={
|
||||
"3rd": ValidationError(
|
||||
"bad 3rd", errors={"4th": ValidationError("Inception")}
|
||||
)
|
||||
},
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
assert "1st" in error.to_dict()
|
||||
assert "2nd" in error.to_dict()["1st"]
|
||||
assert "3rd" in error.to_dict()["1st"]["2nd"]
|
||||
assert "4th" in error.to_dict()["1st"]["2nd"]["3rd"]
|
||||
assert error.to_dict()["1st"]["2nd"]["3rd"]["4th"] == "Inception"
|
||||
|
||||
assert error.message == "root(2nd.3rd.4th.Inception: ['1st'])"
|
||||
|
||||
def test_model_validation(self):
|
||||
class User(Document):
|
||||
username = StringField(primary_key=True)
|
||||
name = StringField(required=True)
|
||||
|
||||
try:
|
||||
User().validate()
|
||||
except ValidationError as e:
|
||||
assert "User:None" in e.message
|
||||
assert e.to_dict() == {
|
||||
"username": "Field is required",
|
||||
"name": "Field is required",
|
||||
}
|
||||
|
||||
user = User(username="RossC0", name="Ross").save()
|
||||
user.name = None
|
||||
try:
|
||||
user.save()
|
||||
except ValidationError as e:
|
||||
assert "User:RossC0" in e.message
|
||||
assert e.to_dict() == {"name": "Field is required"}
|
||||
|
||||
def test_fields_rewrite(self):
|
||||
class BasePerson(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
meta = {"abstract": True}
|
||||
|
||||
class Person(BasePerson):
|
||||
name = StringField(required=True)
|
||||
|
||||
p = Person(age=15)
|
||||
with pytest.raises(ValidationError):
|
||||
p.validate()
|
||||
|
||||
def test_embedded_document_validation(self):
|
||||
"""Ensure that embedded documents may be validated."""
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
date = DateTimeField()
|
||||
content = StringField(required=True)
|
||||
|
||||
comment = Comment()
|
||||
with pytest.raises(ValidationError):
|
||||
comment.validate()
|
||||
|
||||
comment.content = "test"
|
||||
comment.validate()
|
||||
|
||||
comment.date = 4
|
||||
with pytest.raises(ValidationError):
|
||||
comment.validate()
|
||||
|
||||
comment.date = datetime.now()
|
||||
comment.validate()
|
||||
assert comment._instance is None
|
||||
|
||||
def test_embedded_db_field_validate(self):
|
||||
class SubDoc(EmbeddedDocument):
|
||||
val = IntField(required=True)
|
||||
|
||||
class Doc(Document):
|
||||
id = StringField(primary_key=True)
|
||||
e = EmbeddedDocumentField(SubDoc, db_field="eb")
|
||||
|
||||
try:
|
||||
Doc(id="bad").validate()
|
||||
except ValidationError as e:
|
||||
assert "SubDoc:None" in e.message
|
||||
assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}}
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
Doc(id="test", e=SubDoc(val=15)).save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
keys = doc._data.keys()
|
||||
assert 2 == len(keys)
|
||||
assert "e" in keys
|
||||
assert "id" in keys
|
||||
|
||||
doc.e.val = "OK"
|
||||
try:
|
||||
doc.save()
|
||||
except ValidationError as e:
|
||||
assert "Doc:test" in e.message
|
||||
assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}}
|
||||
|
||||
def test_embedded_weakref(self):
|
||||
class SubDoc(EmbeddedDocument):
|
||||
val = IntField(required=True)
|
||||
|
||||
class Doc(Document):
|
||||
e = EmbeddedDocumentField(SubDoc, db_field="eb")
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
d1 = Doc()
|
||||
d2 = Doc()
|
||||
|
||||
s = SubDoc()
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
s.validate()
|
||||
|
||||
d1.e = s
|
||||
d2.e = s
|
||||
|
||||
del d1
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
d2.validate()
|
||||
|
||||
def test_parent_reference_in_child_document(self):
|
||||
"""
|
||||
Test to ensure a ReferenceField can store a reference to a parent
|
||||
class when inherited. Issue #954.
|
||||
"""
|
||||
|
||||
class Parent(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
reference = ReferenceField("self")
|
||||
|
||||
class Child(Parent):
|
||||
pass
|
||||
|
||||
parent = Parent()
|
||||
parent.save()
|
||||
|
||||
child = Child(reference=parent)
|
||||
|
||||
# Saving child should not raise a ValidationError
|
||||
try:
|
||||
child.save()
|
||||
except ValidationError as e:
|
||||
self.fail("ValidationError raised: %s" % e.message)
|
||||
|
||||
def test_parent_reference_set_as_attribute_in_child_document(self):
|
||||
"""
|
||||
Test to ensure a ReferenceField can store a reference to a parent
|
||||
class when inherited and when set via attribute. Issue #954.
|
||||
"""
|
||||
|
||||
class Parent(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
reference = ReferenceField("self")
|
||||
|
||||
class Child(Parent):
|
||||
pass
|
||||
|
||||
parent = Parent()
|
||||
parent.save()
|
||||
|
||||
child = Child()
|
||||
child.reference = parent
|
||||
|
||||
# Saving the child should not raise a ValidationError
|
||||
child.save()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -1,214 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
from datetime import datetime
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
__all__ = ("ValidatorErrorTest",)
|
||||
|
||||
|
||||
class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
def test_to_dict(self):
|
||||
"""Ensure a ValidationError handles error to_dict correctly.
|
||||
"""
|
||||
error = ValidationError('root')
|
||||
self.assertEqual(error.to_dict(), {})
|
||||
|
||||
# 1st level error schema
|
||||
error.errors = {'1st': ValidationError('bad 1st'), }
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertEqual(error.to_dict()['1st'], 'bad 1st')
|
||||
|
||||
# 2nd level error schema
|
||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||
'2nd': ValidationError('bad 2nd'),
|
||||
})}
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertTrue(isinstance(error.to_dict()['1st'], dict))
|
||||
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||
self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd')
|
||||
|
||||
# moar levels
|
||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||
'2nd': ValidationError('bad 2nd', errors={
|
||||
'3rd': ValidationError('bad 3rd', errors={
|
||||
'4th': ValidationError('Inception'),
|
||||
}),
|
||||
}),
|
||||
})}
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||
self.assertTrue('3rd' in error.to_dict()['1st']['2nd'])
|
||||
self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd'])
|
||||
self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'],
|
||||
'Inception')
|
||||
|
||||
self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])")
|
||||
|
||||
def test_model_validation(self):
|
||||
|
||||
class User(Document):
|
||||
username = StringField(primary_key=True)
|
||||
name = StringField(required=True)
|
||||
|
||||
try:
|
||||
User().validate()
|
||||
except ValidationError as e:
|
||||
self.assertTrue("User:None" in e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
'username': 'Field is required',
|
||||
'name': 'Field is required'})
|
||||
|
||||
user = User(username="RossC0", name="Ross").save()
|
||||
user.name = None
|
||||
try:
|
||||
user.save()
|
||||
except ValidationError as e:
|
||||
self.assertTrue("User:RossC0" in e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
'name': 'Field is required'})
|
||||
|
||||
def test_fields_rewrite(self):
|
||||
class BasePerson(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
meta = {'abstract': True}
|
||||
|
||||
class Person(BasePerson):
|
||||
name = StringField(required=True)
|
||||
|
||||
p = Person(age=15)
|
||||
self.assertRaises(ValidationError, p.validate)
|
||||
|
||||
def test_embedded_document_validation(self):
|
||||
"""Ensure that embedded documents may be validated.
|
||||
"""
|
||||
class Comment(EmbeddedDocument):
|
||||
date = DateTimeField()
|
||||
content = StringField(required=True)
|
||||
|
||||
comment = Comment()
|
||||
self.assertRaises(ValidationError, comment.validate)
|
||||
|
||||
comment.content = 'test'
|
||||
comment.validate()
|
||||
|
||||
comment.date = 4
|
||||
self.assertRaises(ValidationError, comment.validate)
|
||||
|
||||
comment.date = datetime.now()
|
||||
comment.validate()
|
||||
self.assertEqual(comment._instance, None)
|
||||
|
||||
def test_embedded_db_field_validate(self):
|
||||
|
||||
class SubDoc(EmbeddedDocument):
|
||||
val = IntField(required=True)
|
||||
|
||||
class Doc(Document):
|
||||
id = StringField(primary_key=True)
|
||||
e = EmbeddedDocumentField(SubDoc, db_field='eb')
|
||||
|
||||
try:
|
||||
Doc(id="bad").validate()
|
||||
except ValidationError as e:
|
||||
self.assertTrue("SubDoc:None" in e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
"e": {'val': 'OK could not be converted to int'}})
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
Doc(id="test", e=SubDoc(val=15)).save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
keys = doc._data.keys()
|
||||
self.assertEqual(2, len(keys))
|
||||
self.assertTrue('e' in keys)
|
||||
self.assertTrue('id' in keys)
|
||||
|
||||
doc.e.val = "OK"
|
||||
try:
|
||||
doc.save()
|
||||
except ValidationError as e:
|
||||
self.assertTrue("Doc:test" in e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
"e": {'val': 'OK could not be converted to int'}})
|
||||
|
||||
def test_embedded_weakref(self):
|
||||
|
||||
class SubDoc(EmbeddedDocument):
|
||||
val = IntField(required=True)
|
||||
|
||||
class Doc(Document):
|
||||
e = EmbeddedDocumentField(SubDoc, db_field='eb')
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
d1 = Doc()
|
||||
d2 = Doc()
|
||||
|
||||
s = SubDoc()
|
||||
|
||||
self.assertRaises(ValidationError, s.validate)
|
||||
|
||||
d1.e = s
|
||||
d2.e = s
|
||||
|
||||
del d1
|
||||
|
||||
self.assertRaises(ValidationError, d2.validate)
|
||||
|
||||
def test_parent_reference_in_child_document(self):
|
||||
"""
|
||||
Test to ensure a ReferenceField can store a reference to a parent
|
||||
class when inherited. Issue #954.
|
||||
"""
|
||||
class Parent(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
reference = ReferenceField('self')
|
||||
|
||||
class Child(Parent):
|
||||
pass
|
||||
|
||||
parent = Parent()
|
||||
parent.save()
|
||||
|
||||
child = Child(reference=parent)
|
||||
|
||||
# Saving child should not raise a ValidationError
|
||||
try:
|
||||
child.save()
|
||||
except ValidationError as e:
|
||||
self.fail("ValidationError raised: %s" % e.message)
|
||||
|
||||
def test_parent_reference_set_as_attribute_in_child_document(self):
|
||||
"""
|
||||
Test to ensure a ReferenceField can store a reference to a parent
|
||||
class when inherited and when set via attribute. Issue #954.
|
||||
"""
|
||||
class Parent(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
reference = ReferenceField('self')
|
||||
|
||||
class Child(Parent):
|
||||
pass
|
||||
|
||||
parent = Parent()
|
||||
parent.save()
|
||||
|
||||
child = Child()
|
||||
child.reference = parent
|
||||
|
||||
# Saving the child should not raise a ValidationError
|
||||
try:
|
||||
child.save()
|
||||
except ValidationError as e:
|
||||
self.fail("ValidationError raised: %s" % e.message)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -1,3 +0,0 @@
|
||||
from fields import *
|
||||
from file_tests import *
|
||||
from geo import *
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
140
tests/fields/test_binary_field.py
Normal file
140
tests/fields/test_binary_field.py
Normal file
@@ -0,0 +1,140 @@
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
from bson import Binary
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
BIN_VALUE = "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5".encode(
|
||||
"latin-1"
|
||||
)
|
||||
|
||||
|
||||
class TestBinaryField(MongoDBTestCase):
|
||||
def test_binary_fields(self):
|
||||
"""Ensure that binary fields can be stored and retrieved."""
|
||||
|
||||
class Attachment(Document):
|
||||
content_type = StringField()
|
||||
blob = BinaryField()
|
||||
|
||||
BLOB = b"\xe6\x00\xc4\xff\x07"
|
||||
MIME_TYPE = "application/octet-stream"
|
||||
|
||||
Attachment.drop_collection()
|
||||
|
||||
attachment = Attachment(content_type=MIME_TYPE, blob=BLOB)
|
||||
attachment.save()
|
||||
|
||||
attachment_1 = Attachment.objects().first()
|
||||
assert MIME_TYPE == attachment_1.content_type
|
||||
assert BLOB == bytes(attachment_1.blob)
|
||||
|
||||
def test_validation_succeeds(self):
|
||||
"""Ensure that valid values can be assigned to binary fields."""
|
||||
|
||||
class AttachmentRequired(Document):
|
||||
blob = BinaryField(required=True)
|
||||
|
||||
class AttachmentSizeLimit(Document):
|
||||
blob = BinaryField(max_bytes=4)
|
||||
|
||||
attachment_required = AttachmentRequired()
|
||||
with pytest.raises(ValidationError):
|
||||
attachment_required.validate()
|
||||
attachment_required.blob = Binary(b"\xe6\x00\xc4\xff\x07")
|
||||
attachment_required.validate()
|
||||
|
||||
_5_BYTES = b"\xe6\x00\xc4\xff\x07"
|
||||
_4_BYTES = b"\xe6\x00\xc4\xff"
|
||||
with pytest.raises(ValidationError):
|
||||
AttachmentSizeLimit(blob=_5_BYTES).validate()
|
||||
AttachmentSizeLimit(blob=_4_BYTES).validate()
|
||||
|
||||
def test_validation_fails(self):
|
||||
"""Ensure that invalid values cannot be assigned to binary fields."""
|
||||
|
||||
class Attachment(Document):
|
||||
blob = BinaryField()
|
||||
|
||||
for invalid_data in (2, "Im_a_unicode", ["some_str"]):
|
||||
with pytest.raises(ValidationError):
|
||||
Attachment(blob=invalid_data).validate()
|
||||
|
||||
def test__primary(self):
|
||||
class Attachment(Document):
|
||||
id = BinaryField(primary_key=True)
|
||||
|
||||
Attachment.drop_collection()
|
||||
binary_id = uuid.uuid4().bytes
|
||||
att = Attachment(id=binary_id).save()
|
||||
assert 1 == Attachment.objects.count()
|
||||
assert 1 == Attachment.objects.filter(id=att.id).count()
|
||||
att.delete()
|
||||
assert 0 == Attachment.objects.count()
|
||||
|
||||
def test_primary_filter_by_binary_pk_as_str(self):
|
||||
class Attachment(Document):
|
||||
id = BinaryField(primary_key=True)
|
||||
|
||||
Attachment.drop_collection()
|
||||
binary_id = uuid.uuid4().bytes
|
||||
att = Attachment(id=binary_id).save()
|
||||
assert 1 == Attachment.objects.filter(id=binary_id).count()
|
||||
att.delete()
|
||||
assert 0 == Attachment.objects.count()
|
||||
|
||||
def test_match_querying_with_bytes(self):
|
||||
class MyDocument(Document):
|
||||
bin_field = BinaryField()
|
||||
|
||||
MyDocument.drop_collection()
|
||||
|
||||
doc = MyDocument(bin_field=BIN_VALUE).save()
|
||||
matched_doc = MyDocument.objects(bin_field=BIN_VALUE).first()
|
||||
assert matched_doc.id == doc.id
|
||||
|
||||
def test_match_querying_with_binary(self):
|
||||
class MyDocument(Document):
|
||||
bin_field = BinaryField()
|
||||
|
||||
MyDocument.drop_collection()
|
||||
|
||||
doc = MyDocument(bin_field=BIN_VALUE).save()
|
||||
|
||||
matched_doc = MyDocument.objects(bin_field=Binary(BIN_VALUE)).first()
|
||||
assert matched_doc.id == doc.id
|
||||
|
||||
def test_modify_operation__set(self):
|
||||
"""Ensures no regression of bug #1127"""
|
||||
|
||||
class MyDocument(Document):
|
||||
some_field = StringField()
|
||||
bin_field = BinaryField()
|
||||
|
||||
MyDocument.drop_collection()
|
||||
|
||||
doc = MyDocument.objects(some_field="test").modify(
|
||||
upsert=True, new=True, set__bin_field=BIN_VALUE
|
||||
)
|
||||
assert doc.some_field == "test"
|
||||
assert doc.bin_field == BIN_VALUE
|
||||
|
||||
def test_update_one(self):
|
||||
"""Ensures no regression of bug #1127"""
|
||||
|
||||
class MyDocument(Document):
|
||||
bin_field = BinaryField()
|
||||
|
||||
MyDocument.drop_collection()
|
||||
|
||||
bin_data = b"\xe6\x00\xc4\xff\x07"
|
||||
doc = MyDocument(bin_field=bin_data).save()
|
||||
|
||||
n_updated = MyDocument.objects(bin_field=bin_data).update_one(
|
||||
bin_field=BIN_VALUE
|
||||
)
|
||||
assert n_updated == 1
|
||||
fetched = MyDocument.objects.with_id(doc.id)
|
||||
assert fetched.bin_field == BIN_VALUE
|
||||
62
tests/fields/test_boolean_field.py
Normal file
62
tests/fields/test_boolean_field.py
Normal file
@@ -0,0 +1,62 @@
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
class TestBooleanField(MongoDBTestCase):
|
||||
def test_storage(self):
|
||||
class Person(Document):
|
||||
admin = BooleanField()
|
||||
|
||||
person = Person(admin=True)
|
||||
person.save()
|
||||
assert get_as_pymongo(person) == {"_id": person.id, "admin": True}
|
||||
|
||||
def test_construction_does_not_fail_uncastable_value(self):
|
||||
class BoolFail:
|
||||
def __bool__(self):
|
||||
return "bogus"
|
||||
|
||||
class Person(Document):
|
||||
admin = BooleanField()
|
||||
|
||||
person = Person(admin=BoolFail())
|
||||
person.admin == "bogus"
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to boolean
|
||||
fields.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
admin = BooleanField()
|
||||
|
||||
person = Person()
|
||||
person.admin = True
|
||||
person.validate()
|
||||
|
||||
person.admin = 2
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.admin = "Yes"
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.admin = "False"
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
def test_weirdness_constructor(self):
|
||||
"""When attribute is set in contructor, it gets cast into a bool
|
||||
which causes some weird behavior. We dont necessarily want to maintain this behavior
|
||||
but its a known issue
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
admin = BooleanField()
|
||||
|
||||
new_person = Person(admin="False")
|
||||
assert new_person.admin
|
||||
|
||||
new_person = Person(admin="0")
|
||||
assert new_person.admin
|
||||
393
tests/fields/test_cached_reference_field.py
Normal file
393
tests/fields/test_cached_reference_field.py
Normal file
@@ -0,0 +1,393 @@
|
||||
from decimal import Decimal
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import (
|
||||
CachedReferenceField,
|
||||
DecimalField,
|
||||
Document,
|
||||
EmbeddedDocument,
|
||||
EmbeddedDocumentField,
|
||||
InvalidDocumentError,
|
||||
ListField,
|
||||
ReferenceField,
|
||||
StringField,
|
||||
ValidationError,
|
||||
)
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestCachedReferenceField(MongoDBTestCase):
|
||||
def test_constructor_fail_bad_document_type(self):
|
||||
with pytest.raises(
|
||||
ValidationError, match="must be a document class or a string"
|
||||
):
|
||||
CachedReferenceField(document_type=0)
|
||||
|
||||
def test_get_and_save(self):
|
||||
"""
|
||||
Tests #1047: CachedReferenceField creates DBRefs on to_python,
|
||||
but can't save them on to_mongo.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(Animal)
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
Ocorrence(
|
||||
person="testte", animal=Animal(name="Leopard", tag="heavy").save()
|
||||
).save()
|
||||
p = Ocorrence.objects.get()
|
||||
p.person = "new_testte"
|
||||
p.save()
|
||||
|
||||
def test_general_things(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(Animal, fields=["tag"])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
a = Animal(name="Leopard", tag="heavy")
|
||||
a.save()
|
||||
|
||||
assert Animal._cached_reference_fields == [Ocorrence.animal]
|
||||
o = Ocorrence(person="teste", animal=a)
|
||||
o.save()
|
||||
|
||||
p = Ocorrence(person="Wilson")
|
||||
p.save()
|
||||
|
||||
assert Ocorrence.objects(animal=None).count() == 1
|
||||
|
||||
assert a.to_mongo(fields=["tag"]) == {"tag": "heavy", "_id": a.pk}
|
||||
|
||||
assert o.to_mongo()["animal"]["tag"] == "heavy"
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
count = Ocorrence.objects(animal__tag="heavy").count()
|
||||
assert count == 1
|
||||
|
||||
ocorrence = Ocorrence.objects(animal__tag="heavy").first()
|
||||
assert ocorrence.person == "teste"
|
||||
assert isinstance(ocorrence.animal, Animal)
|
||||
|
||||
def test_with_decimal(self):
|
||||
class PersonAuto(Document):
|
||||
name = StringField()
|
||||
salary = DecimalField()
|
||||
|
||||
class SocialTest(Document):
|
||||
group = StringField()
|
||||
person = CachedReferenceField(PersonAuto, fields=("salary",))
|
||||
|
||||
PersonAuto.drop_collection()
|
||||
SocialTest.drop_collection()
|
||||
|
||||
p = PersonAuto(name="Alberto", salary=Decimal("7000.00"))
|
||||
p.save()
|
||||
|
||||
s = SocialTest(group="dev", person=p)
|
||||
s.save()
|
||||
|
||||
assert SocialTest.objects._collection.find_one({"person.salary": 7000.00}) == {
|
||||
"_id": s.pk,
|
||||
"group": s.group,
|
||||
"person": {"_id": p.pk, "salary": 7000.00},
|
||||
}
|
||||
|
||||
def test_cached_reference_field_reference(self):
|
||||
class Group(Document):
|
||||
name = StringField()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
group = ReferenceField(Group)
|
||||
|
||||
class SocialData(Document):
|
||||
obs = StringField()
|
||||
tags = ListField(StringField())
|
||||
person = CachedReferenceField(Person, fields=("group",))
|
||||
|
||||
Group.drop_collection()
|
||||
Person.drop_collection()
|
||||
SocialData.drop_collection()
|
||||
|
||||
g1 = Group(name="dev")
|
||||
g1.save()
|
||||
|
||||
g2 = Group(name="designers")
|
||||
g2.save()
|
||||
|
||||
p1 = Person(name="Alberto", group=g1)
|
||||
p1.save()
|
||||
|
||||
p2 = Person(name="Andre", group=g1)
|
||||
p2.save()
|
||||
|
||||
p3 = Person(name="Afro design", group=g2)
|
||||
p3.save()
|
||||
|
||||
s1 = SocialData(obs="testing 123", person=p1, tags=["tag1", "tag2"])
|
||||
s1.save()
|
||||
|
||||
s2 = SocialData(obs="testing 321", person=p3, tags=["tag3", "tag4"])
|
||||
s2.save()
|
||||
|
||||
assert SocialData.objects._collection.find_one({"tags": "tag2"}) == {
|
||||
"_id": s1.pk,
|
||||
"obs": "testing 123",
|
||||
"tags": ["tag1", "tag2"],
|
||||
"person": {"_id": p1.pk, "group": g1.pk},
|
||||
}
|
||||
|
||||
assert SocialData.objects(person__group=g2).count() == 1
|
||||
assert SocialData.objects(person__group=g2).first() == s2
|
||||
|
||||
def test_cached_reference_field_push_with_fields(self):
|
||||
class Product(Document):
|
||||
name = StringField()
|
||||
|
||||
Product.drop_collection()
|
||||
|
||||
class Basket(Document):
|
||||
products = ListField(CachedReferenceField(Product, fields=["name"]))
|
||||
|
||||
Basket.drop_collection()
|
||||
product1 = Product(name="abc").save()
|
||||
product2 = Product(name="def").save()
|
||||
basket = Basket(products=[product1]).save()
|
||||
assert Basket.objects._collection.find_one() == {
|
||||
"_id": basket.pk,
|
||||
"products": [{"_id": product1.pk, "name": product1.name}],
|
||||
}
|
||||
# push to list
|
||||
basket.update(push__products=product2)
|
||||
basket.reload()
|
||||
assert Basket.objects._collection.find_one() == {
|
||||
"_id": basket.pk,
|
||||
"products": [
|
||||
{"_id": product1.pk, "name": product1.name},
|
||||
{"_id": product2.pk, "name": product2.name},
|
||||
],
|
||||
}
|
||||
|
||||
def test_cached_reference_field_update_all(self):
|
||||
class Person(Document):
|
||||
TYPES = (("pf", "PF"), ("pj", "PJ"))
|
||||
name = StringField()
|
||||
tp = StringField(choices=TYPES)
|
||||
father = CachedReferenceField("self", fields=("tp",))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
a1 = Person(name="Wilson Father", tp="pj")
|
||||
a1.save()
|
||||
|
||||
a2 = Person(name="Wilson Junior", tp="pf", father=a1)
|
||||
a2.save()
|
||||
|
||||
a2 = Person.objects.with_id(a2.id)
|
||||
assert a2.father.tp == a1.tp
|
||||
|
||||
assert dict(a2.to_mongo()) == {
|
||||
"_id": a2.pk,
|
||||
"name": "Wilson Junior",
|
||||
"tp": "pf",
|
||||
"father": {"_id": a1.pk, "tp": "pj"},
|
||||
}
|
||||
|
||||
assert Person.objects(father=a1)._query == {"father._id": a1.pk}
|
||||
assert Person.objects(father=a1).count() == 1
|
||||
|
||||
Person.objects.update(set__tp="pf")
|
||||
Person.father.sync_all()
|
||||
|
||||
a2.reload()
|
||||
assert dict(a2.to_mongo()) == {
|
||||
"_id": a2.pk,
|
||||
"name": "Wilson Junior",
|
||||
"tp": "pf",
|
||||
"father": {"_id": a1.pk, "tp": "pf"},
|
||||
}
|
||||
|
||||
def test_cached_reference_fields_on_embedded_documents(self):
|
||||
with pytest.raises(InvalidDocumentError):
|
||||
|
||||
class Test(Document):
|
||||
name = StringField()
|
||||
|
||||
type(
|
||||
"WrongEmbeddedDocument",
|
||||
(EmbeddedDocument,),
|
||||
{"test": CachedReferenceField(Test)},
|
||||
)
|
||||
|
||||
def test_cached_reference_auto_sync(self):
|
||||
class Person(Document):
|
||||
TYPES = (("pf", "PF"), ("pj", "PJ"))
|
||||
name = StringField()
|
||||
tp = StringField(choices=TYPES)
|
||||
|
||||
father = CachedReferenceField("self", fields=("tp",))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
a1 = Person(name="Wilson Father", tp="pj")
|
||||
a1.save()
|
||||
|
||||
a2 = Person(name="Wilson Junior", tp="pf", father=a1)
|
||||
a2.save()
|
||||
|
||||
a1.tp = "pf"
|
||||
a1.save()
|
||||
|
||||
a2.reload()
|
||||
assert dict(a2.to_mongo()) == {
|
||||
"_id": a2.pk,
|
||||
"name": "Wilson Junior",
|
||||
"tp": "pf",
|
||||
"father": {"_id": a1.pk, "tp": "pf"},
|
||||
}
|
||||
|
||||
def test_cached_reference_auto_sync_disabled(self):
|
||||
class Persone(Document):
|
||||
TYPES = (("pf", "PF"), ("pj", "PJ"))
|
||||
name = StringField()
|
||||
tp = StringField(choices=TYPES)
|
||||
|
||||
father = CachedReferenceField("self", fields=("tp",), auto_sync=False)
|
||||
|
||||
Persone.drop_collection()
|
||||
|
||||
a1 = Persone(name="Wilson Father", tp="pj")
|
||||
a1.save()
|
||||
|
||||
a2 = Persone(name="Wilson Junior", tp="pf", father=a1)
|
||||
a2.save()
|
||||
|
||||
a1.tp = "pf"
|
||||
a1.save()
|
||||
|
||||
assert Persone.objects._collection.find_one({"_id": a2.pk}) == {
|
||||
"_id": a2.pk,
|
||||
"name": "Wilson Junior",
|
||||
"tp": "pf",
|
||||
"father": {"_id": a1.pk, "tp": "pj"},
|
||||
}
|
||||
|
||||
def test_cached_reference_embedded_fields(self):
|
||||
class Owner(EmbeddedDocument):
|
||||
TPS = (("n", "Normal"), ("u", "Urgent"))
|
||||
name = StringField()
|
||||
tp = StringField(verbose_name="Type", db_field="t", choices=TPS)
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
owner = EmbeddedDocumentField(Owner)
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(Animal, fields=["tag", "owner.tp"])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
a = Animal(
|
||||
name="Leopard", tag="heavy", owner=Owner(tp="u", name="Wilson Júnior")
|
||||
)
|
||||
a.save()
|
||||
|
||||
o = Ocorrence(person="teste", animal=a)
|
||||
o.save()
|
||||
assert dict(a.to_mongo(fields=["tag", "owner.tp"])) == {
|
||||
"_id": a.pk,
|
||||
"tag": "heavy",
|
||||
"owner": {"t": "u"},
|
||||
}
|
||||
assert o.to_mongo()["animal"]["tag"] == "heavy"
|
||||
assert o.to_mongo()["animal"]["owner"]["t"] == "u"
|
||||
|
||||
# Check to_mongo with fields
|
||||
assert "animal" not in o.to_mongo(fields=["person"])
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
count = Ocorrence.objects(animal__tag="heavy", animal__owner__tp="u").count()
|
||||
assert count == 1
|
||||
|
||||
ocorrence = Ocorrence.objects(
|
||||
animal__tag="heavy", animal__owner__tp="u"
|
||||
).first()
|
||||
assert ocorrence.person == "teste"
|
||||
assert isinstance(ocorrence.animal, Animal)
|
||||
|
||||
def test_cached_reference_embedded_list_fields(self):
|
||||
class Owner(EmbeddedDocument):
|
||||
name = StringField()
|
||||
tags = ListField(StringField())
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
owner = EmbeddedDocumentField(Owner)
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(Animal, fields=["tag", "owner.tags"])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
a = Animal(
|
||||
name="Leopard",
|
||||
tag="heavy",
|
||||
owner=Owner(tags=["cool", "funny"], name="Wilson Júnior"),
|
||||
)
|
||||
a.save()
|
||||
|
||||
o = Ocorrence(person="teste 2", animal=a)
|
||||
o.save()
|
||||
assert dict(a.to_mongo(fields=["tag", "owner.tags"])) == {
|
||||
"_id": a.pk,
|
||||
"tag": "heavy",
|
||||
"owner": {"tags": ["cool", "funny"]},
|
||||
}
|
||||
|
||||
assert o.to_mongo()["animal"]["tag"] == "heavy"
|
||||
assert o.to_mongo()["animal"]["owner"]["tags"] == ["cool", "funny"]
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
query = Ocorrence.objects(
|
||||
animal__tag="heavy", animal__owner__tags="cool"
|
||||
)._query
|
||||
assert query == {"animal.owner.tags": "cool", "animal.tag": "heavy"}
|
||||
|
||||
ocorrence = Ocorrence.objects(
|
||||
animal__tag="heavy", animal__owner__tags="cool"
|
||||
).first()
|
||||
assert ocorrence.person == "teste 2"
|
||||
assert isinstance(ocorrence.animal, Animal)
|
||||
208
tests/fields/test_complex_datetime_field.py
Normal file
208
tests/fields/test_complex_datetime_field.py
Normal file
@@ -0,0 +1,208 @@
|
||||
import datetime
|
||||
import itertools
|
||||
import math
|
||||
import re
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
def test_complexdatetime_storage(self):
|
||||
"""Tests for complex datetime fields - which can handle
|
||||
microseconds without rounding.
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = ComplexDateTimeField()
|
||||
date_with_dots = ComplexDateTimeField(separator=".")
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond and
|
||||
# dropped - with default datetimefields
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
assert log.date == d1
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond - with
|
||||
# default datetimefields
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
assert log.date == d1
|
||||
|
||||
# Pre UTC dates microseconds below 1000 are dropped - with default
|
||||
# datetimefields
|
||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
assert log.date == d1
|
||||
|
||||
# Pre UTC microseconds above 1000 is wonky - with default datetimefields
|
||||
# log.date has an invalid microsecond value so I can't construct
|
||||
# a date to compare.
|
||||
for i in range(1001, 3113, 33):
|
||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
assert log.date == d1
|
||||
log1 = LogEntry.objects.get(date=d1)
|
||||
assert log == log1
|
||||
|
||||
# Test string padding
|
||||
microsecond = map(int, (math.pow(10, x) for x in range(6)))
|
||||
mm = dd = hh = ii = ss = [1, 10]
|
||||
|
||||
for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond):
|
||||
stored = LogEntry(date=datetime.datetime(*values)).to_mongo()["date"]
|
||||
assert (
|
||||
re.match(r"^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored)
|
||||
is not None
|
||||
)
|
||||
|
||||
# Test separator
|
||||
stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()[
|
||||
"date_with_dots"
|
||||
]
|
||||
assert (
|
||||
re.match(r"^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None
|
||||
)
|
||||
|
||||
def test_complexdatetime_usage(self):
|
||||
"""Tests for complex datetime fields - which can handle
|
||||
microseconds without rounding.
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = ComplexDateTimeField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
d1 = datetime.datetime(1950, 1, 1, 0, 0, 1, 999)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
log.save()
|
||||
|
||||
log1 = LogEntry.objects.get(date=d1)
|
||||
assert log == log1
|
||||
|
||||
# create extra 59 log entries for a total of 60
|
||||
for i in range(1951, 2010):
|
||||
d = datetime.datetime(i, 1, 1, 0, 0, 1, 999)
|
||||
LogEntry(date=d).save()
|
||||
|
||||
assert LogEntry.objects.count() == 60
|
||||
|
||||
# Test ordering
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
i = 0
|
||||
while i < 59:
|
||||
assert logs[i].date <= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
i = 0
|
||||
while i < 59:
|
||||
assert logs[i].date >= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1))
|
||||
assert logs.count() == 30
|
||||
|
||||
logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1))
|
||||
assert logs.count() == 30
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=datetime.datetime(2011, 1, 1),
|
||||
date__gte=datetime.datetime(2000, 1, 1),
|
||||
)
|
||||
assert logs.count() == 10
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
# Test microsecond-level ordering/filtering
|
||||
for microsecond in (99, 999, 9999, 10000):
|
||||
LogEntry(date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond)).save()
|
||||
|
||||
logs = list(LogEntry.objects.order_by("date"))
|
||||
for next_idx, log in enumerate(logs[:-1], start=1):
|
||||
next_log = logs[next_idx]
|
||||
assert log.date < next_log.date
|
||||
|
||||
logs = list(LogEntry.objects.order_by("-date"))
|
||||
for next_idx, log in enumerate(logs[:-1], start=1):
|
||||
next_log = logs[next_idx]
|
||||
assert log.date > next_log.date
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000)
|
||||
)
|
||||
assert logs.count() == 4
|
||||
|
||||
def test_no_default_value(self):
|
||||
class Log(Document):
|
||||
timestamp = ComplexDateTimeField()
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log()
|
||||
assert log.timestamp is None
|
||||
log.save()
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
assert fetched_log.timestamp is None
|
||||
|
||||
def test_default_static_value(self):
|
||||
NOW = datetime.datetime.utcnow()
|
||||
|
||||
class Log(Document):
|
||||
timestamp = ComplexDateTimeField(default=NOW)
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log()
|
||||
assert log.timestamp == NOW
|
||||
log.save()
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
assert fetched_log.timestamp == NOW
|
||||
|
||||
def test_default_callable(self):
|
||||
NOW = datetime.datetime.utcnow()
|
||||
|
||||
class Log(Document):
|
||||
timestamp = ComplexDateTimeField(default=datetime.datetime.utcnow)
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log()
|
||||
assert log.timestamp >= NOW
|
||||
log.save()
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
assert fetched_log.timestamp >= NOW
|
||||
|
||||
def test_setting_bad_value_does_not_raise_unless_validate_is_called(self):
|
||||
# test regression of #2253
|
||||
|
||||
class Log(Document):
|
||||
timestamp = ComplexDateTimeField()
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log(timestamp="garbage")
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
log.save()
|
||||
163
tests/fields/test_date_field.py
Normal file
163
tests/fields/test_date_field.py
Normal file
@@ -0,0 +1,163 @@
|
||||
import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
try:
|
||||
import dateutil
|
||||
except ImportError:
|
||||
dateutil = None
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestDateField(MongoDBTestCase):
|
||||
def test_date_from_empty_string(self):
|
||||
"""
|
||||
Ensure an exception is raised when trying to
|
||||
cast an empty string to datetime.
|
||||
"""
|
||||
|
||||
class MyDoc(Document):
|
||||
dt = DateField()
|
||||
|
||||
md = MyDoc(dt="")
|
||||
with pytest.raises(ValidationError):
|
||||
md.save()
|
||||
|
||||
def test_date_from_whitespace_string(self):
|
||||
"""
|
||||
Ensure an exception is raised when trying to
|
||||
cast a whitespace-only string to datetime.
|
||||
"""
|
||||
|
||||
class MyDoc(Document):
|
||||
dt = DateField()
|
||||
|
||||
md = MyDoc(dt=" ")
|
||||
with pytest.raises(ValidationError):
|
||||
md.save()
|
||||
|
||||
def test_default_values_today(self):
|
||||
"""Ensure that default field values are used when creating
|
||||
a document.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
day = DateField(default=datetime.date.today)
|
||||
|
||||
person = Person()
|
||||
person.validate()
|
||||
assert person.day == person.day
|
||||
assert person.day == datetime.date.today()
|
||||
assert person._data["day"] == person.day
|
||||
|
||||
def test_date(self):
|
||||
"""Tests showing pymongo date fields
|
||||
|
||||
See: http://api.mongodb.org/python/current/api/bson/son.html#dt
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = DateField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
# Test can save dates
|
||||
log = LogEntry()
|
||||
log.date = datetime.date.today()
|
||||
log.save()
|
||||
log.reload()
|
||||
assert log.date == datetime.date.today()
|
||||
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999)
|
||||
d2 = datetime.datetime(1970, 1, 1, 0, 0, 1)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
assert log.date == d1.date()
|
||||
assert log.date == d2.date()
|
||||
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999)
|
||||
d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
assert log.date == d1.date()
|
||||
assert log.date == d2.date()
|
||||
|
||||
def test_regular_usage(self):
|
||||
"""Tests for regular datetime fields"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = DateField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
log.validate()
|
||||
log.save()
|
||||
|
||||
for query in (d1, d1.isoformat(" ")):
|
||||
log1 = LogEntry.objects.get(date=query)
|
||||
assert log == log1
|
||||
|
||||
if dateutil:
|
||||
log1 = LogEntry.objects.get(date=d1.isoformat("T"))
|
||||
assert log == log1
|
||||
|
||||
# create additional 19 log entries for a total of 20
|
||||
for i in range(1971, 1990):
|
||||
d = datetime.datetime(i, 1, 1, 0, 0, 1)
|
||||
LogEntry(date=d).save()
|
||||
|
||||
assert LogEntry.objects.count() == 20
|
||||
|
||||
# Test ordering
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
assert logs[i].date <= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
assert logs[i].date >= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1))
|
||||
assert logs.count() == 10
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to datetime
|
||||
fields.
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
time = DateField()
|
||||
|
||||
log = LogEntry()
|
||||
log.time = datetime.datetime.now()
|
||||
log.validate()
|
||||
|
||||
log.time = datetime.date.today()
|
||||
log.validate()
|
||||
|
||||
log.time = datetime.datetime.now().isoformat(" ")
|
||||
log.validate()
|
||||
|
||||
if dateutil:
|
||||
log.time = datetime.datetime.now().isoformat("T")
|
||||
log.validate()
|
||||
|
||||
log.time = -1
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "ABC"
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
232
tests/fields/test_datetime_field.py
Normal file
232
tests/fields/test_datetime_field.py
Normal file
@@ -0,0 +1,232 @@
|
||||
import datetime as dt
|
||||
|
||||
import pytest
|
||||
|
||||
try:
|
||||
import dateutil
|
||||
except ImportError:
|
||||
dateutil = None
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine import connection
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestDateTimeField(MongoDBTestCase):
|
||||
def test_datetime_from_empty_string(self):
|
||||
"""
|
||||
Ensure an exception is raised when trying to
|
||||
cast an empty string to datetime.
|
||||
"""
|
||||
|
||||
class MyDoc(Document):
|
||||
dt = DateTimeField()
|
||||
|
||||
md = MyDoc(dt="")
|
||||
with pytest.raises(ValidationError):
|
||||
md.save()
|
||||
|
||||
def test_datetime_from_whitespace_string(self):
|
||||
"""
|
||||
Ensure an exception is raised when trying to
|
||||
cast a whitespace-only string to datetime.
|
||||
"""
|
||||
|
||||
class MyDoc(Document):
|
||||
dt = DateTimeField()
|
||||
|
||||
md = MyDoc(dt=" ")
|
||||
with pytest.raises(ValidationError):
|
||||
md.save()
|
||||
|
||||
def test_default_value_utcnow(self):
|
||||
"""Ensure that default field values are used when creating
|
||||
a document.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
created = DateTimeField(default=dt.datetime.utcnow)
|
||||
|
||||
utcnow = dt.datetime.utcnow()
|
||||
person = Person()
|
||||
person.validate()
|
||||
person_created_t0 = person.created
|
||||
assert person.created - utcnow < dt.timedelta(seconds=1)
|
||||
assert person_created_t0 == person.created # make sure it does not change
|
||||
assert person._data["created"] == person.created
|
||||
|
||||
def test_handling_microseconds(self):
|
||||
"""Tests showing pymongo datetime fields handling of microseconds.
|
||||
Microseconds are rounded to the nearest millisecond and pre UTC
|
||||
handling is wonky.
|
||||
|
||||
See: http://api.mongodb.org/python/current/api/bson/son.html#dt
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = DateTimeField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
# Test can save dates
|
||||
log = LogEntry()
|
||||
log.date = dt.date.today()
|
||||
log.save()
|
||||
log.reload()
|
||||
assert log.date.date() == dt.date.today()
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond and
|
||||
# dropped
|
||||
d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 999)
|
||||
d2 = dt.datetime(1970, 1, 1, 0, 0, 1)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
assert log.date != d1
|
||||
assert log.date == d2
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond
|
||||
d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 9999)
|
||||
d2 = dt.datetime(1970, 1, 1, 0, 0, 1, 9000)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
assert log.date != d1
|
||||
assert log.date == d2
|
||||
|
||||
def test_regular_usage(self):
|
||||
"""Tests for regular datetime fields"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = DateTimeField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
d1 = dt.datetime(1970, 1, 1, 0, 0, 1)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
log.validate()
|
||||
log.save()
|
||||
|
||||
for query in (d1, d1.isoformat(" ")):
|
||||
log1 = LogEntry.objects.get(date=query)
|
||||
assert log == log1
|
||||
|
||||
if dateutil:
|
||||
log1 = LogEntry.objects.get(date=d1.isoformat("T"))
|
||||
assert log == log1
|
||||
|
||||
# create additional 19 log entries for a total of 20
|
||||
for i in range(1971, 1990):
|
||||
d = dt.datetime(i, 1, 1, 0, 0, 1)
|
||||
LogEntry(date=d).save()
|
||||
|
||||
assert LogEntry.objects.count() == 20
|
||||
|
||||
# Test ordering
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
assert logs[i].date <= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
assert logs[i].date >= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=dt.datetime(1980, 1, 1))
|
||||
assert logs.count() == 10
|
||||
|
||||
logs = LogEntry.objects.filter(date__lte=dt.datetime(1980, 1, 1))
|
||||
assert logs.count() == 10
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=dt.datetime(1980, 1, 1), date__gte=dt.datetime(1975, 1, 1)
|
||||
)
|
||||
assert logs.count() == 5
|
||||
|
||||
def test_datetime_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to datetime
|
||||
fields.
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
time = DateTimeField()
|
||||
|
||||
log = LogEntry()
|
||||
log.time = dt.datetime.now()
|
||||
log.validate()
|
||||
|
||||
log.time = dt.date.today()
|
||||
log.validate()
|
||||
|
||||
log.time = dt.datetime.now().isoformat(" ")
|
||||
log.validate()
|
||||
|
||||
log.time = "2019-05-16 21:42:57.897847"
|
||||
log.validate()
|
||||
|
||||
if dateutil:
|
||||
log.time = dt.datetime.now().isoformat("T")
|
||||
log.validate()
|
||||
|
||||
log.time = -1
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "ABC"
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "2019-05-16 21:GARBAGE:12"
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "2019-05-16 21:42:57.GARBAGE"
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "2019-05-16 21:42:57.123.456"
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
|
||||
def test_parse_datetime_as_str(self):
|
||||
class DTDoc(Document):
|
||||
date = DateTimeField()
|
||||
|
||||
date_str = "2019-03-02 22:26:01"
|
||||
|
||||
# make sure that passing a parsable datetime works
|
||||
dtd = DTDoc()
|
||||
dtd.date = date_str
|
||||
assert isinstance(dtd.date, str)
|
||||
dtd.save()
|
||||
dtd.reload()
|
||||
|
||||
assert isinstance(dtd.date, dt.datetime)
|
||||
assert str(dtd.date) == date_str
|
||||
|
||||
dtd.date = "January 1st, 9999999999"
|
||||
with pytest.raises(ValidationError):
|
||||
dtd.validate()
|
||||
|
||||
|
||||
class TestDateTimeTzAware(MongoDBTestCase):
|
||||
def test_datetime_tz_aware_mark_as_changed(self):
|
||||
# Reset the connections
|
||||
connection._connection_settings = {}
|
||||
connection._connections = {}
|
||||
connection._dbs = {}
|
||||
|
||||
connect(db="mongoenginetest", tz_aware=True)
|
||||
|
||||
class LogEntry(Document):
|
||||
time = DateTimeField()
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
LogEntry(time=dt.datetime(2013, 1, 1, 0, 0, 0)).save()
|
||||
|
||||
log = LogEntry.objects.first()
|
||||
log.time = dt.datetime(2013, 1, 1, 0, 0, 0)
|
||||
assert ["time"] == log._changed_fields
|
||||
120
tests/fields/test_decimal_field.py
Normal file
120
tests/fields/test_decimal_field.py
Normal file
@@ -0,0 +1,120 @@
|
||||
from decimal import Decimal
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import DecimalField, Document, ValidationError
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestDecimalField(MongoDBTestCase):
|
||||
def test_storage(self):
|
||||
class Person(Document):
|
||||
float_value = DecimalField(precision=4)
|
||||
string_value = DecimalField(precision=4, force_string=True)
|
||||
|
||||
Person.drop_collection()
|
||||
values_to_store = [
|
||||
10,
|
||||
10.1,
|
||||
10.11,
|
||||
"10.111",
|
||||
Decimal("10.1111"),
|
||||
Decimal("10.11111"),
|
||||
]
|
||||
for store_at_creation in [True, False]:
|
||||
for value in values_to_store:
|
||||
# to_python is called explicitly if values were sent in the kwargs of __init__
|
||||
if store_at_creation:
|
||||
Person(float_value=value, string_value=value).save()
|
||||
else:
|
||||
person = Person.objects.create()
|
||||
person.float_value = value
|
||||
person.string_value = value
|
||||
person.save()
|
||||
|
||||
# How its stored
|
||||
expected = [
|
||||
{"float_value": 10.0, "string_value": "10.0000"},
|
||||
{"float_value": 10.1, "string_value": "10.1000"},
|
||||
{"float_value": 10.11, "string_value": "10.1100"},
|
||||
{"float_value": 10.111, "string_value": "10.1110"},
|
||||
{"float_value": 10.1111, "string_value": "10.1111"},
|
||||
{"float_value": 10.1111, "string_value": "10.1111"},
|
||||
]
|
||||
expected.extend(expected)
|
||||
actual = list(Person.objects.exclude("id").as_pymongo())
|
||||
assert expected == actual
|
||||
|
||||
# How it comes out locally
|
||||
expected = [
|
||||
Decimal("10.0000"),
|
||||
Decimal("10.1000"),
|
||||
Decimal("10.1100"),
|
||||
Decimal("10.1110"),
|
||||
Decimal("10.1111"),
|
||||
Decimal("10.1111"),
|
||||
]
|
||||
expected.extend(expected)
|
||||
for field_name in ["float_value", "string_value"]:
|
||||
actual = list(Person.objects().scalar(field_name))
|
||||
assert expected == actual
|
||||
|
||||
def test_save_none(self):
|
||||
class Person(Document):
|
||||
value = DecimalField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
person = Person(value=None)
|
||||
assert person.value is None
|
||||
person.save()
|
||||
fetched_person = Person.objects.first()
|
||||
fetched_person.value is None
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to decimal fields."""
|
||||
|
||||
class Person(Document):
|
||||
height = DecimalField(min_value=Decimal("0.1"), max_value=Decimal("3.5"))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(height=Decimal("1.89")).save()
|
||||
person = Person.objects.first()
|
||||
assert person.height == Decimal("1.89")
|
||||
|
||||
person.height = "2.0"
|
||||
person.save()
|
||||
person.height = 0.01
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.height = Decimal("0.01")
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.height = Decimal("4.0")
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.height = "something invalid"
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person_2 = Person(height="something invalid")
|
||||
with pytest.raises(ValidationError):
|
||||
person_2.validate()
|
||||
|
||||
def test_comparison(self):
|
||||
class Person(Document):
|
||||
money = DecimalField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(money=6).save()
|
||||
Person(money=7).save()
|
||||
Person(money=8).save()
|
||||
Person(money=10).save()
|
||||
|
||||
assert 2 == Person.objects(money__gt=Decimal("7")).count()
|
||||
assert 2 == Person.objects(money__gt=7).count()
|
||||
assert 2 == Person.objects(money__gt="7").count()
|
||||
|
||||
assert 3 == Person.objects(money__gte="7").count()
|
||||
369
tests/fields/test_dict_field.py
Normal file
369
tests/fields/test_dict_field.py
Normal file
@@ -0,0 +1,369 @@
|
||||
import pytest
|
||||
from bson import InvalidDocument
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.base import BaseDict
|
||||
from mongoengine.mongodb_support import (
|
||||
MONGODB_36,
|
||||
get_mongodb_version,
|
||||
)
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
class TestDictField(MongoDBTestCase):
|
||||
def test_storage(self):
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
info = {"testkey": "testvalue"}
|
||||
post = BlogPost(info=info).save()
|
||||
assert get_as_pymongo(post) == {"_id": post.id, "info": info}
|
||||
|
||||
def test_validate_invalid_type(self):
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
invalid_infos = ["my post", ["test", "test"], {1: "test"}]
|
||||
for invalid_info in invalid_infos:
|
||||
with pytest.raises(ValidationError):
|
||||
BlogPost(info=invalid_info).validate()
|
||||
|
||||
def test_keys_with_dots_or_dollars(self):
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost()
|
||||
|
||||
post.info = {"$title": "test"}
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = {"nested": {"$title": "test"}}
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = {"$title.test": "test"}
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = {"nested": {"the.title": "test"}}
|
||||
if get_mongodb_version() < MONGODB_36:
|
||||
# MongoDB < 3.6 rejects dots
|
||||
# To avoid checking the mongodb version from the DictField class
|
||||
# we rely on MongoDB to reject the data during the save
|
||||
post.validate()
|
||||
with pytest.raises(InvalidDocument):
|
||||
post.save()
|
||||
else:
|
||||
post.validate()
|
||||
|
||||
post.info = {"dollar_and_dot": {"te$st.test": "test"}}
|
||||
if get_mongodb_version() < MONGODB_36:
|
||||
post.validate()
|
||||
with pytest.raises(InvalidDocument):
|
||||
post.save()
|
||||
else:
|
||||
post.validate()
|
||||
|
||||
def test_general_things(self):
|
||||
"""Ensure that dict types work as expected."""
|
||||
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(info={"title": "test"})
|
||||
post.save()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = {"title": "dollar_sign", "details": {"te$t": "test"}}
|
||||
post.save()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = {"details": {"test": "test"}}
|
||||
post.save()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = {"details": {"test": 3}}
|
||||
post.save()
|
||||
|
||||
assert BlogPost.objects.count() == 4
|
||||
assert BlogPost.objects.filter(info__title__exact="test").count() == 1
|
||||
assert BlogPost.objects.filter(info__details__test__exact="test").count() == 1
|
||||
|
||||
post = BlogPost.objects.filter(info__title__exact="dollar_sign").first()
|
||||
assert "te$t" in post["info"]["details"]
|
||||
|
||||
# Confirm handles non strings or non existing keys
|
||||
assert BlogPost.objects.filter(info__details__test__exact=5).count() == 0
|
||||
assert BlogPost.objects.filter(info__made_up__test__exact="test").count() == 0
|
||||
|
||||
post = BlogPost.objects.create(info={"title": "original"})
|
||||
post.info.update({"title": "updated"})
|
||||
post.save()
|
||||
post.reload()
|
||||
assert "updated" == post.info["title"]
|
||||
|
||||
post.info.setdefault("authors", [])
|
||||
post.save()
|
||||
post.reload()
|
||||
assert post.info["authors"] == []
|
||||
|
||||
def test_dictfield_dump_document(self):
|
||||
"""Ensure a DictField can handle another document's dump."""
|
||||
|
||||
class Doc(Document):
|
||||
field = DictField()
|
||||
|
||||
class ToEmbed(Document):
|
||||
id = IntField(primary_key=True, default=1)
|
||||
recursive = DictField()
|
||||
|
||||
class ToEmbedParent(Document):
|
||||
id = IntField(primary_key=True, default=1)
|
||||
recursive = DictField()
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class ToEmbedChild(ToEmbedParent):
|
||||
pass
|
||||
|
||||
to_embed_recursive = ToEmbed(id=1).save()
|
||||
to_embed = ToEmbed(
|
||||
id=2, recursive=to_embed_recursive.to_mongo().to_dict()
|
||||
).save()
|
||||
doc = Doc(field=to_embed.to_mongo().to_dict())
|
||||
doc.save()
|
||||
assert isinstance(doc.field, dict)
|
||||
assert doc.field == {"_id": 2, "recursive": {"_id": 1, "recursive": {}}}
|
||||
# Same thing with a Document with a _cls field
|
||||
to_embed_recursive = ToEmbedChild(id=1).save()
|
||||
to_embed_child = ToEmbedChild(
|
||||
id=2, recursive=to_embed_recursive.to_mongo().to_dict()
|
||||
).save()
|
||||
doc = Doc(field=to_embed_child.to_mongo().to_dict())
|
||||
doc.save()
|
||||
assert isinstance(doc.field, dict)
|
||||
expected = {
|
||||
"_id": 2,
|
||||
"_cls": "ToEmbedParent.ToEmbedChild",
|
||||
"recursive": {
|
||||
"_id": 1,
|
||||
"_cls": "ToEmbedParent.ToEmbedChild",
|
||||
"recursive": {},
|
||||
},
|
||||
}
|
||||
assert doc.field == expected
|
||||
|
||||
def test_dictfield_strict(self):
|
||||
"""Ensure that dict field handles validation if provided a strict field type."""
|
||||
|
||||
class Simple(Document):
|
||||
mapping = DictField(field=IntField())
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping["someint"] = 1
|
||||
e.save()
|
||||
|
||||
# try creating an invalid mapping
|
||||
with pytest.raises(ValidationError):
|
||||
e.mapping["somestring"] = "abc"
|
||||
e.save()
|
||||
|
||||
def test_dictfield_complex(self):
|
||||
"""Ensure that the dict field can handle the complex types."""
|
||||
|
||||
class SettingBase(EmbeddedDocument):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class StringSetting(SettingBase):
|
||||
value = StringField()
|
||||
|
||||
class IntegerSetting(SettingBase):
|
||||
value = IntField()
|
||||
|
||||
class Simple(Document):
|
||||
mapping = DictField()
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping["somestring"] = StringSetting(value="foo")
|
||||
e.mapping["someint"] = IntegerSetting(value=42)
|
||||
e.mapping["nested_dict"] = {
|
||||
"number": 1,
|
||||
"string": "Hi!",
|
||||
"float": 1.001,
|
||||
"complex": IntegerSetting(value=42),
|
||||
"list": [IntegerSetting(value=42), StringSetting(value="foo")],
|
||||
}
|
||||
e.save()
|
||||
|
||||
e2 = Simple.objects.get(id=e.id)
|
||||
assert isinstance(e2.mapping["somestring"], StringSetting)
|
||||
assert isinstance(e2.mapping["someint"], IntegerSetting)
|
||||
|
||||
# Test querying
|
||||
assert Simple.objects.filter(mapping__someint__value=42).count() == 1
|
||||
assert Simple.objects.filter(mapping__nested_dict__number=1).count() == 1
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__complex__value=42).count() == 1
|
||||
)
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__list__0__value=42).count() == 1
|
||||
)
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count()
|
||||
== 1
|
||||
)
|
||||
|
||||
# Confirm can update
|
||||
Simple.objects().update(set__mapping={"someint": IntegerSetting(value=10)})
|
||||
Simple.objects().update(
|
||||
set__mapping__nested_dict__list__1=StringSetting(value="Boo")
|
||||
)
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count()
|
||||
== 0
|
||||
)
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value="Boo").count()
|
||||
== 1
|
||||
)
|
||||
|
||||
def test_push_dict(self):
|
||||
class MyModel(Document):
|
||||
events = ListField(DictField())
|
||||
|
||||
doc = MyModel(events=[{"a": 1}]).save()
|
||||
raw_doc = get_as_pymongo(doc)
|
||||
expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}]}
|
||||
assert raw_doc == expected_raw_doc
|
||||
|
||||
MyModel.objects(id=doc.id).update(push__events={})
|
||||
raw_doc = get_as_pymongo(doc)
|
||||
expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}, {}]}
|
||||
assert raw_doc == expected_raw_doc
|
||||
|
||||
def test_ensure_unique_default_instances(self):
|
||||
"""Ensure that every field has it's own unique default instance."""
|
||||
|
||||
class D(Document):
|
||||
data = DictField()
|
||||
data2 = DictField(default=lambda: {})
|
||||
|
||||
d1 = D()
|
||||
d1.data["foo"] = "bar"
|
||||
d1.data2["foo"] = "bar"
|
||||
d2 = D()
|
||||
assert d2.data == {}
|
||||
assert d2.data2 == {}
|
||||
|
||||
def test_dict_field_invalid_dict_value(self):
|
||||
class DictFieldTest(Document):
|
||||
dictionary = DictField(required=True)
|
||||
|
||||
DictFieldTest.drop_collection()
|
||||
|
||||
test = DictFieldTest(dictionary=None)
|
||||
test.dictionary # Just access to test getter
|
||||
with pytest.raises(ValidationError):
|
||||
test.validate()
|
||||
|
||||
test = DictFieldTest(dictionary=False)
|
||||
test.dictionary # Just access to test getter
|
||||
with pytest.raises(ValidationError):
|
||||
test.validate()
|
||||
|
||||
def test_dict_field_raises_validation_error_if_wrongly_assign_embedded_doc(self):
|
||||
class DictFieldTest(Document):
|
||||
dictionary = DictField(required=True)
|
||||
|
||||
DictFieldTest.drop_collection()
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
embed = Embedded(name="garbage")
|
||||
doc = DictFieldTest(dictionary=embed)
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
doc.validate()
|
||||
|
||||
error_msg = str(exc_info.value)
|
||||
assert "'dictionary'" in error_msg
|
||||
assert "Only dictionaries may be used in a DictField" in error_msg
|
||||
|
||||
def test_atomic_update_dict_field(self):
|
||||
"""Ensure that the entire DictField can be atomically updated."""
|
||||
|
||||
class Simple(Document):
|
||||
mapping = DictField(field=ListField(IntField(required=True)))
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping["someints"] = [1, 2]
|
||||
e.save()
|
||||
e.update(set__mapping={"ints": [3, 4]})
|
||||
e.reload()
|
||||
assert isinstance(e.mapping, BaseDict)
|
||||
assert {"ints": [3, 4]} == e.mapping
|
||||
|
||||
# try creating an invalid mapping
|
||||
with pytest.raises(ValueError):
|
||||
e.update(set__mapping={"somestrings": ["foo", "bar"]})
|
||||
|
||||
def test_dictfield_with_referencefield_complex_nesting_cases(self):
|
||||
"""Ensure complex nesting inside DictField handles dereferencing of ReferenceField(dbref=True | False)"""
|
||||
# Relates to Issue #1453
|
||||
class Doc(Document):
|
||||
s = StringField()
|
||||
|
||||
class Simple(Document):
|
||||
mapping0 = DictField(ReferenceField(Doc, dbref=True))
|
||||
mapping1 = DictField(ReferenceField(Doc, dbref=False))
|
||||
mapping2 = DictField(ListField(ReferenceField(Doc, dbref=True)))
|
||||
mapping3 = DictField(ListField(ReferenceField(Doc, dbref=False)))
|
||||
mapping4 = DictField(DictField(field=ReferenceField(Doc, dbref=True)))
|
||||
mapping5 = DictField(DictField(field=ReferenceField(Doc, dbref=False)))
|
||||
mapping6 = DictField(ListField(DictField(ReferenceField(Doc, dbref=True))))
|
||||
mapping7 = DictField(ListField(DictField(ReferenceField(Doc, dbref=False))))
|
||||
mapping8 = DictField(
|
||||
ListField(DictField(ListField(ReferenceField(Doc, dbref=True))))
|
||||
)
|
||||
mapping9 = DictField(
|
||||
ListField(DictField(ListField(ReferenceField(Doc, dbref=False))))
|
||||
)
|
||||
|
||||
Doc.drop_collection()
|
||||
Simple.drop_collection()
|
||||
|
||||
d = Doc(s="aa").save()
|
||||
e = Simple()
|
||||
e.mapping0["someint"] = e.mapping1["someint"] = d
|
||||
e.mapping2["someint"] = e.mapping3["someint"] = [d]
|
||||
e.mapping4["someint"] = e.mapping5["someint"] = {"d": d}
|
||||
e.mapping6["someint"] = e.mapping7["someint"] = [{"d": d}]
|
||||
e.mapping8["someint"] = e.mapping9["someint"] = [{"d": [d]}]
|
||||
e.save()
|
||||
|
||||
s = Simple.objects.first()
|
||||
assert isinstance(s.mapping0["someint"], Doc)
|
||||
assert isinstance(s.mapping1["someint"], Doc)
|
||||
assert isinstance(s.mapping2["someint"][0], Doc)
|
||||
assert isinstance(s.mapping3["someint"][0], Doc)
|
||||
assert isinstance(s.mapping4["someint"]["d"], Doc)
|
||||
assert isinstance(s.mapping5["someint"]["d"], Doc)
|
||||
assert isinstance(s.mapping6["someint"][0]["d"], Doc)
|
||||
assert isinstance(s.mapping7["someint"][0]["d"], Doc)
|
||||
assert isinstance(s.mapping8["someint"][0]["d"][0], Doc)
|
||||
assert isinstance(s.mapping9["someint"][0]["d"][0], Doc)
|
||||
134
tests/fields/test_email_field.py
Normal file
134
tests/fields/test_email_field.py
Normal file
@@ -0,0 +1,134 @@
|
||||
import pytest
|
||||
|
||||
from mongoengine import Document, EmailField, ValidationError
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestEmailField(MongoDBTestCase):
|
||||
def test_generic_behavior(self):
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
user = User(email="ross@example.com")
|
||||
user.validate()
|
||||
|
||||
user = User(email="ross@example.co.uk")
|
||||
user.validate()
|
||||
|
||||
user = User(
|
||||
email=("Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5SaJIazqqWkm7.net")
|
||||
)
|
||||
user.validate()
|
||||
|
||||
user = User(email="new-tld@example.technology")
|
||||
user.validate()
|
||||
|
||||
user = User(email="ross@example.com.")
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# unicode domain
|
||||
user = User(email="user@пример.рф")
|
||||
user.validate()
|
||||
|
||||
# invalid unicode domain
|
||||
user = User(email="user@пример")
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# invalid data type
|
||||
user = User(email=123)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
def test_email_field_unicode_user(self):
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
# unicode user shouldn't validate by default...
|
||||
user = User(email="Dörte@Sörensen.example.com")
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# ...but it should be fine with allow_utf8_user set to True
|
||||
class User(Document):
|
||||
email = EmailField(allow_utf8_user=True)
|
||||
|
||||
user = User(email="Dörte@Sörensen.example.com")
|
||||
user.validate()
|
||||
|
||||
def test_email_field_domain_whitelist(self):
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
# localhost domain shouldn't validate by default...
|
||||
user = User(email="me@localhost")
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# ...but it should be fine if it's whitelisted
|
||||
class User(Document):
|
||||
email = EmailField(domain_whitelist=["localhost"])
|
||||
|
||||
user = User(email="me@localhost")
|
||||
user.validate()
|
||||
|
||||
def test_email_domain_validation_fails_if_invalid_idn(self):
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
invalid_idn = ".google.com"
|
||||
user = User(email="me@%s" % invalid_idn)
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
user.validate()
|
||||
assert "domain failed IDN encoding" in str(exc_info.value)
|
||||
|
||||
def test_email_field_ip_domain(self):
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
valid_ipv4 = "email@[127.0.0.1]"
|
||||
valid_ipv6 = "email@[2001:dB8::1]"
|
||||
invalid_ip = "email@[324.0.0.1]"
|
||||
|
||||
# IP address as a domain shouldn't validate by default...
|
||||
user = User(email=valid_ipv4)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
user = User(email=valid_ipv6)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
user = User(email=invalid_ip)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# ...but it should be fine with allow_ip_domain set to True
|
||||
class User(Document):
|
||||
email = EmailField(allow_ip_domain=True)
|
||||
|
||||
user = User(email=valid_ipv4)
|
||||
user.validate()
|
||||
|
||||
user = User(email=valid_ipv6)
|
||||
user.validate()
|
||||
|
||||
# invalid IP should still fail validation
|
||||
user = User(email=invalid_ip)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
def test_email_field_honors_regex(self):
|
||||
class User(Document):
|
||||
email = EmailField(regex=r"\w+@example.com")
|
||||
|
||||
# Fails regex validation
|
||||
user = User(email="me@foo.com")
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# Passes regex validation
|
||||
user = User(email="me@example.com")
|
||||
assert user.validate() is None
|
||||
352
tests/fields/test_embedded_document_field.py
Normal file
352
tests/fields/test_embedded_document_field.py
Normal file
@@ -0,0 +1,352 @@
|
||||
import pytest
|
||||
|
||||
from mongoengine import (
|
||||
Document,
|
||||
EmbeddedDocument,
|
||||
EmbeddedDocumentField,
|
||||
GenericEmbeddedDocumentField,
|
||||
IntField,
|
||||
InvalidQueryError,
|
||||
ListField,
|
||||
LookUpError,
|
||||
StringField,
|
||||
ValidationError,
|
||||
)
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestEmbeddedDocumentField(MongoDBTestCase):
|
||||
def test___init___(self):
|
||||
class MyDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
field = EmbeddedDocumentField(MyDoc)
|
||||
assert field.document_type_obj == MyDoc
|
||||
|
||||
field2 = EmbeddedDocumentField("MyDoc")
|
||||
assert field2.document_type_obj == "MyDoc"
|
||||
|
||||
def test___init___throw_error_if_document_type_is_not_EmbeddedDocument(self):
|
||||
with pytest.raises(ValidationError):
|
||||
EmbeddedDocumentField(dict)
|
||||
|
||||
def test_document_type_throw_error_if_not_EmbeddedDocument_subclass(self):
|
||||
class MyDoc(Document):
|
||||
name = StringField()
|
||||
|
||||
emb = EmbeddedDocumentField("MyDoc")
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
emb.document_type
|
||||
assert (
|
||||
"Invalid embedded document class provided to an EmbeddedDocumentField"
|
||||
in str(exc_info.value)
|
||||
)
|
||||
|
||||
def test_embedded_document_field_only_allow_subclasses_of_embedded_document(self):
|
||||
# Relates to #1661
|
||||
class MyDoc(Document):
|
||||
name = StringField()
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
|
||||
class MyFailingDoc(Document):
|
||||
emb = EmbeddedDocumentField(MyDoc)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
|
||||
class MyFailingdoc2(Document):
|
||||
emb = EmbeddedDocumentField("MyDoc")
|
||||
|
||||
def test_query_embedded_document_attribute(self):
|
||||
class AdminSettings(EmbeddedDocument):
|
||||
foo1 = StringField()
|
||||
foo2 = StringField()
|
||||
|
||||
class Person(Document):
|
||||
settings = EmbeddedDocumentField(AdminSettings)
|
||||
name = StringField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(settings=AdminSettings(foo1="bar1", foo2="bar2"), name="John").save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
Person.objects(settings__notexist="bar").first()
|
||||
assert str(exc_info.value) == 'Cannot resolve field "notexist"'
|
||||
|
||||
with pytest.raises(LookUpError):
|
||||
Person.objects.only("settings.notexist")
|
||||
|
||||
# Test existing attribute
|
||||
assert Person.objects(settings__foo1="bar1").first().id == p.id
|
||||
only_p = Person.objects.only("settings.foo1").first()
|
||||
assert only_p.settings.foo1 == p.settings.foo1
|
||||
assert only_p.settings.foo2 is None
|
||||
assert only_p.name is None
|
||||
|
||||
exclude_p = Person.objects.exclude("settings.foo1").first()
|
||||
assert exclude_p.settings.foo1 is None
|
||||
assert exclude_p.settings.foo2 == p.settings.foo2
|
||||
assert exclude_p.name == p.name
|
||||
|
||||
def test_query_embedded_document_attribute_with_inheritance(self):
|
||||
class BaseSettings(EmbeddedDocument):
|
||||
meta = {"allow_inheritance": True}
|
||||
base_foo = StringField()
|
||||
|
||||
class AdminSettings(BaseSettings):
|
||||
sub_foo = StringField()
|
||||
|
||||
class Person(Document):
|
||||
settings = EmbeddedDocumentField(BaseSettings)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(settings=AdminSettings(base_foo="basefoo", sub_foo="subfoo"))
|
||||
p.save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
assert Person.objects(settings__notexist="bar").first().id == p.id
|
||||
assert str(exc_info.value) == 'Cannot resolve field "notexist"'
|
||||
|
||||
# Test existing attribute
|
||||
assert Person.objects(settings__base_foo="basefoo").first().id == p.id
|
||||
assert Person.objects(settings__sub_foo="subfoo").first().id == p.id
|
||||
|
||||
only_p = Person.objects.only("settings.base_foo", "settings._cls").first()
|
||||
assert only_p.settings.base_foo == "basefoo"
|
||||
assert only_p.settings.sub_foo is None
|
||||
|
||||
def test_query_list_embedded_document_with_inheritance(self):
|
||||
class Post(EmbeddedDocument):
|
||||
title = StringField(max_length=120, required=True)
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class TextPost(Post):
|
||||
content = StringField()
|
||||
|
||||
class MoviePost(Post):
|
||||
author = StringField()
|
||||
|
||||
class Record(Document):
|
||||
posts = ListField(EmbeddedDocumentField(Post))
|
||||
|
||||
record_movie = Record(posts=[MoviePost(author="John", title="foo")]).save()
|
||||
record_text = Record(posts=[TextPost(content="a", title="foo")]).save()
|
||||
|
||||
records = list(Record.objects(posts__author=record_movie.posts[0].author))
|
||||
assert len(records) == 1
|
||||
assert records[0].id == record_movie.id
|
||||
|
||||
records = list(Record.objects(posts__content=record_text.posts[0].content))
|
||||
assert len(records) == 1
|
||||
assert records[0].id == record_text.id
|
||||
|
||||
assert Record.objects(posts__title="foo").count() == 2
|
||||
|
||||
|
||||
class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
def test_generic_embedded_document(self):
|
||||
class Car(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class Dish(EmbeddedDocument):
|
||||
food = StringField(required=True)
|
||||
number = IntField()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
like = GenericEmbeddedDocumentField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
person = Person(name="Test User")
|
||||
person.like = Car(name="Fiat")
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
assert isinstance(person.like, Car)
|
||||
|
||||
person.like = Dish(food="arroz", number=15)
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
assert isinstance(person.like, Dish)
|
||||
|
||||
def test_generic_embedded_document_choices(self):
|
||||
"""Ensure you can limit GenericEmbeddedDocument choices."""
|
||||
|
||||
class Car(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class Dish(EmbeddedDocument):
|
||||
food = StringField(required=True)
|
||||
number = IntField()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
like = GenericEmbeddedDocumentField(choices=(Dish,))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
person = Person(name="Test User")
|
||||
person.like = Car(name="Fiat")
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person.like = Dish(food="arroz", number=15)
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
assert isinstance(person.like, Dish)
|
||||
|
||||
def test_generic_list_embedded_document_choices(self):
|
||||
"""Ensure you can limit GenericEmbeddedDocument choices inside
|
||||
a list field.
|
||||
"""
|
||||
|
||||
class Car(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class Dish(EmbeddedDocument):
|
||||
food = StringField(required=True)
|
||||
number = IntField()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
likes = ListField(GenericEmbeddedDocumentField(choices=(Dish,)))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
person = Person(name="Test User")
|
||||
person.likes = [Car(name="Fiat")]
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person.likes = [Dish(food="arroz", number=15)]
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
assert isinstance(person.likes[0], Dish)
|
||||
|
||||
def test_choices_validation_documents(self):
|
||||
"""
|
||||
Ensure fields with document choices validate given a valid choice.
|
||||
"""
|
||||
|
||||
class UserComments(EmbeddedDocument):
|
||||
author = StringField()
|
||||
message = StringField()
|
||||
|
||||
class BlogPost(Document):
|
||||
comments = ListField(GenericEmbeddedDocumentField(choices=(UserComments,)))
|
||||
|
||||
# Ensure Validation Passes
|
||||
BlogPost(comments=[UserComments(author="user2", message="message2")]).save()
|
||||
|
||||
def test_choices_validation_documents_invalid(self):
|
||||
"""
|
||||
Ensure fields with document choices validate given an invalid choice.
|
||||
This should throw a ValidationError exception.
|
||||
"""
|
||||
|
||||
class UserComments(EmbeddedDocument):
|
||||
author = StringField()
|
||||
message = StringField()
|
||||
|
||||
class ModeratorComments(EmbeddedDocument):
|
||||
author = StringField()
|
||||
message = StringField()
|
||||
|
||||
class BlogPost(Document):
|
||||
comments = ListField(GenericEmbeddedDocumentField(choices=(UserComments,)))
|
||||
|
||||
# Single Entry Failure
|
||||
post = BlogPost(comments=[ModeratorComments(author="mod1", message="message1")])
|
||||
with pytest.raises(ValidationError):
|
||||
post.save()
|
||||
|
||||
# Mixed Entry Failure
|
||||
post = BlogPost(
|
||||
comments=[
|
||||
ModeratorComments(author="mod1", message="message1"),
|
||||
UserComments(author="user2", message="message2"),
|
||||
]
|
||||
)
|
||||
with pytest.raises(ValidationError):
|
||||
post.save()
|
||||
|
||||
def test_choices_validation_documents_inheritance(self):
|
||||
"""
|
||||
Ensure fields with document choices validate given subclass of choice.
|
||||
"""
|
||||
|
||||
class Comments(EmbeddedDocument):
|
||||
meta = {"abstract": True}
|
||||
author = StringField()
|
||||
message = StringField()
|
||||
|
||||
class UserComments(Comments):
|
||||
pass
|
||||
|
||||
class BlogPost(Document):
|
||||
comments = ListField(GenericEmbeddedDocumentField(choices=(Comments,)))
|
||||
|
||||
# Save Valid EmbeddedDocument Type
|
||||
BlogPost(comments=[UserComments(author="user2", message="message2")]).save()
|
||||
|
||||
def test_query_generic_embedded_document_attribute(self):
|
||||
class AdminSettings(EmbeddedDocument):
|
||||
foo1 = StringField()
|
||||
|
||||
class NonAdminSettings(EmbeddedDocument):
|
||||
foo2 = StringField()
|
||||
|
||||
class Person(Document):
|
||||
settings = GenericEmbeddedDocumentField(
|
||||
choices=(AdminSettings, NonAdminSettings)
|
||||
)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(settings=AdminSettings(foo1="bar1")).save()
|
||||
p2 = Person(settings=NonAdminSettings(foo2="bar2")).save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
Person.objects(settings__notexist="bar").first()
|
||||
assert str(exc_info.value) == 'Cannot resolve field "notexist"'
|
||||
|
||||
with pytest.raises(LookUpError):
|
||||
Person.objects.only("settings.notexist")
|
||||
|
||||
# Test existing attribute
|
||||
assert Person.objects(settings__foo1="bar1").first().id == p1.id
|
||||
assert Person.objects(settings__foo2="bar2").first().id == p2.id
|
||||
|
||||
def test_query_generic_embedded_document_attribute_with_inheritance(self):
|
||||
class BaseSettings(EmbeddedDocument):
|
||||
meta = {"allow_inheritance": True}
|
||||
base_foo = StringField()
|
||||
|
||||
class AdminSettings(BaseSettings):
|
||||
sub_foo = StringField()
|
||||
|
||||
class Person(Document):
|
||||
settings = GenericEmbeddedDocumentField(choices=[BaseSettings])
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(settings=AdminSettings(base_foo="basefoo", sub_foo="subfoo"))
|
||||
p.save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
assert Person.objects(settings__notexist="bar").first().id == p.id
|
||||
assert str(exc_info.value) == 'Cannot resolve field "notexist"'
|
||||
|
||||
# Test existing attribute
|
||||
assert Person.objects(settings__base_foo="basefoo").first().id == p.id
|
||||
assert Person.objects(settings__sub_foo="subfoo").first().id == p.id
|
||||
127
tests/fields/test_enum_field.py
Normal file
127
tests/fields/test_enum_field.py
Normal file
@@ -0,0 +1,127 @@
|
||||
from enum import Enum
|
||||
|
||||
import pytest
|
||||
from bson import InvalidDocument
|
||||
|
||||
from mongoengine import Document, EnumField, ValidationError
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
class Status(Enum):
|
||||
NEW = "new"
|
||||
DONE = "done"
|
||||
|
||||
|
||||
class ModelWithEnum(Document):
|
||||
status = EnumField(Status)
|
||||
|
||||
|
||||
class TestStringEnumField(MongoDBTestCase):
|
||||
def test_storage(self):
|
||||
model = ModelWithEnum(status=Status.NEW).save()
|
||||
assert get_as_pymongo(model) == {"_id": model.id, "status": "new"}
|
||||
|
||||
def test_set_enum(self):
|
||||
ModelWithEnum.drop_collection()
|
||||
ModelWithEnum(status=Status.NEW).save()
|
||||
assert ModelWithEnum.objects(status=Status.NEW).count() == 1
|
||||
assert ModelWithEnum.objects.first().status == Status.NEW
|
||||
|
||||
def test_set_by_value(self):
|
||||
ModelWithEnum.drop_collection()
|
||||
ModelWithEnum(status="new").save()
|
||||
assert ModelWithEnum.objects.first().status == Status.NEW
|
||||
|
||||
def test_filter(self):
|
||||
ModelWithEnum.drop_collection()
|
||||
ModelWithEnum(status="new").save()
|
||||
assert ModelWithEnum.objects(status="new").count() == 1
|
||||
assert ModelWithEnum.objects(status=Status.NEW).count() == 1
|
||||
assert ModelWithEnum.objects(status=Status.DONE).count() == 0
|
||||
|
||||
def test_change_value(self):
|
||||
m = ModelWithEnum(status="new")
|
||||
m.status = Status.DONE
|
||||
m.save()
|
||||
assert m.status == Status.DONE
|
||||
|
||||
m.status = "wrong"
|
||||
assert m.status == "wrong"
|
||||
with pytest.raises(ValidationError):
|
||||
m.validate()
|
||||
|
||||
def test_set_default(self):
|
||||
class ModelWithDefault(Document):
|
||||
status = EnumField(Status, default=Status.DONE)
|
||||
|
||||
m = ModelWithDefault().save()
|
||||
assert m.status == Status.DONE
|
||||
|
||||
def test_enum_field_can_be_empty(self):
|
||||
ModelWithEnum.drop_collection()
|
||||
m = ModelWithEnum().save()
|
||||
assert m.status is None
|
||||
assert ModelWithEnum.objects()[0].status is None
|
||||
assert ModelWithEnum.objects(status=None).count() == 1
|
||||
|
||||
def test_set_none_explicitly(self):
|
||||
ModelWithEnum.drop_collection()
|
||||
ModelWithEnum(status=None).save()
|
||||
assert ModelWithEnum.objects.first().status is None
|
||||
|
||||
def test_cannot_create_model_with_wrong_enum_value(self):
|
||||
m = ModelWithEnum(status="wrong_one")
|
||||
with pytest.raises(ValidationError):
|
||||
m.validate()
|
||||
|
||||
def test_user_is_informed_when_tries_to_set_choices(self):
|
||||
with pytest.raises(ValueError, match="'choices' can't be set on EnumField"):
|
||||
EnumField(Status, choices=["my", "custom", "options"])
|
||||
|
||||
|
||||
class Color(Enum):
|
||||
RED = 1
|
||||
BLUE = 2
|
||||
|
||||
|
||||
class ModelWithColor(Document):
|
||||
color = EnumField(Color, default=Color.RED)
|
||||
|
||||
|
||||
class TestIntEnumField(MongoDBTestCase):
|
||||
def test_enum_with_int(self):
|
||||
ModelWithColor.drop_collection()
|
||||
m = ModelWithColor().save()
|
||||
assert m.color == Color.RED
|
||||
assert ModelWithColor.objects(color=Color.RED).count() == 1
|
||||
assert ModelWithColor.objects(color=1).count() == 1
|
||||
assert ModelWithColor.objects(color=2).count() == 0
|
||||
|
||||
def test_create_int_enum_by_value(self):
|
||||
model = ModelWithColor(color=2).save()
|
||||
assert model.color == Color.BLUE
|
||||
|
||||
def test_storage_enum_with_int(self):
|
||||
model = ModelWithColor(color=Color.BLUE).save()
|
||||
assert get_as_pymongo(model) == {"_id": model.id, "color": 2}
|
||||
|
||||
def test_validate_model(self):
|
||||
with pytest.raises(ValidationError, match="Value must be one of"):
|
||||
ModelWithColor(color=3).validate()
|
||||
|
||||
with pytest.raises(ValidationError, match="Value must be one of"):
|
||||
ModelWithColor(color="wrong_type").validate()
|
||||
|
||||
|
||||
class TestFunkyEnumField(MongoDBTestCase):
|
||||
def test_enum_incompatible_bson_type_fails_during_save(self):
|
||||
class FunkyColor(Enum):
|
||||
YELLOW = object()
|
||||
|
||||
class ModelWithFunkyColor(Document):
|
||||
color = EnumField(FunkyColor)
|
||||
|
||||
m = ModelWithFunkyColor(color=FunkyColor.YELLOW)
|
||||
|
||||
with pytest.raises(InvalidDocument, match="[cC]annot encode object"):
|
||||
m.save()
|
||||
2694
tests/fields/test_fields.py
Normal file
2694
tests/fields/test_fields.py
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user