Compare commits
1892 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
904fcd1a0a | ||
|
|
2ec454447f | ||
|
|
ecd297e227 | ||
|
|
079ee3c191 | ||
|
|
f2638ecd02 | ||
|
|
ad6ff819fe | ||
|
|
48357640c6 | ||
|
|
e6c2169f76 | ||
|
|
1d17dc4663 | ||
|
|
eeac3bd2e6 | ||
|
|
3f5a15d236 | ||
|
|
91493a1e79 | ||
|
|
0c274908ec | ||
|
|
338c40b5d5 | ||
|
|
fc3ccf9606 | ||
|
|
746faceb5c | ||
|
|
8c3058d99b | ||
|
|
eb56fb9bda | ||
|
|
161493c0d2 | ||
|
|
cb9f329d11 | ||
|
|
03af784ebe | ||
|
|
e5f6e4584a | ||
|
|
79f9f223d0 | ||
|
|
0bc18cd6e1 | ||
|
|
30a3c6a5b7 | ||
|
|
90c5d83f84 | ||
|
|
d8b8ff6851 | ||
|
|
ee664f0c90 | ||
|
|
f8d371229e | ||
|
|
94a7e813b1 | ||
|
|
8ef7213426 | ||
|
|
2f4464ead5 | ||
|
|
89b93461ac | ||
|
|
9e40f3ae83 | ||
|
|
f4962fbc40 | ||
|
|
c9d53ca5d5 | ||
|
|
65f50fd713 | ||
|
|
bf1d04e399 | ||
|
|
5a8e5e5a40 | ||
|
|
f3919dd839 | ||
|
|
9f82a02ddf | ||
|
|
015a36c85f | ||
|
|
fbd3388a59 | ||
|
|
d8a52d68c5 | ||
|
|
4286708e2e | ||
|
|
e362d089e1 | ||
|
|
6b657886a5 | ||
|
|
eb16945147 | ||
|
|
38047ca992 | ||
|
|
c801e79d4b | ||
|
|
3fca3739de | ||
|
|
c218c8bb6c | ||
|
|
0bbc05995a | ||
|
|
3adb67901b | ||
|
|
d4350e7da4 | ||
|
|
4665658145 | ||
|
|
0d289fd5a1 | ||
|
|
aabc18755c | ||
|
|
1f2a5db016 | ||
|
|
ff40f66291 | ||
|
|
7f77084e0e | ||
|
|
aca4de728e | ||
|
|
9e7ca43cad | ||
|
|
7116dec74a | ||
|
|
a5302b870b | ||
|
|
604e9974b6 | ||
|
|
3e1c83f8fa | ||
|
|
e431e27cb2 | ||
|
|
4f188655d0 | ||
|
|
194b0cac88 | ||
|
|
7b4175fc5c | ||
|
|
adb5f74ddb | ||
|
|
107a1c34c8 | ||
|
|
dc7da5204f | ||
|
|
0301bca176 | ||
|
|
49f9bca23b | ||
|
|
31498bd7dd | ||
|
|
1698f398eb | ||
|
|
4275c2d7b7 | ||
|
|
22bff8566d | ||
|
|
d8657be320 | ||
|
|
3db9d58dac | ||
|
|
3fbe9c3cdd | ||
|
|
130e9c519c | ||
|
|
78c9e9745d | ||
|
|
38ebb5abf4 | ||
|
|
9b73be26ab | ||
|
|
fd0095b73f | ||
|
|
226049f66a | ||
|
|
dc1cf88ca6 | ||
|
|
f5f8b730b5 | ||
|
|
e8f6b42316 | ||
|
|
49b0d73654 | ||
|
|
394da67cf1 | ||
|
|
ef7da36ac6 | ||
|
|
1312100bc7 | ||
|
|
4085bc2152 | ||
|
|
f4d7e72426 | ||
|
|
ece63ad071 | ||
|
|
a9550b8243 | ||
|
|
43724e40b2 | ||
|
|
1bfa40e926 | ||
|
|
d493f71c4e | ||
|
|
87f4d1a323 | ||
|
|
0a0e6114f5 | ||
|
|
41d36fa3bf | ||
|
|
707923e3f5 | ||
|
|
d9b9581df2 | ||
|
|
463e7c66af | ||
|
|
2be28a22a7 | ||
|
|
d73f0bb1af | ||
|
|
ce74978b1e | ||
|
|
2b0157aecd | ||
|
|
f49baf5d90 | ||
|
|
7cc964c7d8 | ||
|
|
bc77322c2f | ||
|
|
8913a74a86 | ||
|
|
af35b25d15 | ||
|
|
476b07af6e | ||
|
|
e2b9a02531 | ||
|
|
6cc6229066 | ||
|
|
4c62a060f0 | ||
|
|
3d80637fa4 | ||
|
|
68be9fe979 | ||
|
|
547cd4a3ae | ||
|
|
ee2d50b2d1 | ||
|
|
15c3ddece8 | ||
|
|
beaa9744b7 | ||
|
|
8eb51790b5 | ||
|
|
aadc6262ed | ||
|
|
00ae6298d4 | ||
|
|
ad0669a326 | ||
|
|
85df76c623 | ||
|
|
87512246cb | ||
|
|
a3f9016ae9 | ||
|
|
4e58e9f8d1 | ||
|
|
7c533394fd | ||
|
|
333e014f13 | ||
|
|
c0c0efce18 | ||
|
|
beabaee345 | ||
|
|
c937af3919 | ||
|
|
aa4a6ae023 | ||
|
|
b57946ec98 | ||
|
|
1e110a2c41 | ||
|
|
b234aa48e4 | ||
|
|
8086576677 | ||
|
|
03e34299f0 | ||
|
|
421e3f324f | ||
|
|
a0b803959c | ||
|
|
ff4d57032a | ||
|
|
ba34589065 | ||
|
|
a4d11eef46 | ||
|
|
fda2e2b47a | ||
|
|
d287f480e5 | ||
|
|
d85f0e6226 | ||
|
|
cfb4943986 | ||
|
|
b453a96211 | ||
|
|
81f9b351b3 | ||
|
|
4bca3de42f | ||
|
|
235b1a3679 | ||
|
|
450658d7ac | ||
|
|
8e17e42e26 | ||
|
|
2d6a4c4b90 | ||
|
|
38703acc29 | ||
|
|
095217e797 | ||
|
|
86e965f854 | ||
|
|
57db68dc04 | ||
|
|
72de6d67c7 | ||
|
|
b2c3acd025 | ||
|
|
605de59bd0 | ||
|
|
e0565ddac5 | ||
|
|
18b68f1b80 | ||
|
|
ea88806630 | ||
|
|
412bed0f6d | ||
|
|
53cf26b9af | ||
|
|
d738462139 | ||
|
|
2fa48cd9e5 | ||
|
|
e64a7a9448 | ||
|
|
9490ad2bf7 | ||
|
|
84f3dce492 | ||
|
|
60c42dddd5 | ||
|
|
f93f9406ee | ||
|
|
705c55ce24 | ||
|
|
928770c43a | ||
|
|
59fbd505a0 | ||
|
|
1cc20c9770 | ||
|
|
f8f267a880 | ||
|
|
80ea1f6883 | ||
|
|
75ee282a3d | ||
|
|
4edad4601c | ||
|
|
152b51fd33 | ||
|
|
66a0fca4ad | ||
|
|
e7c7a66cd1 | ||
|
|
b3dbb87c3c | ||
|
|
3d45538998 | ||
|
|
8df9d3fef9 | ||
|
|
99e660c66d | ||
|
|
aa02f87b69 | ||
|
|
f0d1ee2cb4 | ||
|
|
ca4967311d | ||
|
|
65eb6ab611 | ||
|
|
1cb2f7814c | ||
|
|
b5485b16e6 | ||
|
|
62c8597a3b | ||
|
|
488604ff2e | ||
|
|
bd88a17b8e | ||
|
|
8e892dccfe | ||
|
|
c22eb34017 | ||
|
|
dcf3edb03e | ||
|
|
c85b59d3b5 | ||
|
|
1170de1e8e | ||
|
|
332bd767d4 | ||
|
|
0053b30237 | ||
|
|
d44533d956 | ||
|
|
12d8bd5a22 | ||
|
|
ae326678ec | ||
|
|
8d31f165c0 | ||
|
|
cfd4d6a161 | ||
|
|
329f030a41 | ||
|
|
68dc2925fb | ||
|
|
0d4e61d489 | ||
|
|
dc7b96a569 | ||
|
|
50882e5bb0 | ||
|
|
280a73af3b | ||
|
|
d8c0631dab | ||
|
|
9166ba91d7 | ||
|
|
6bc4e602bb | ||
|
|
45a7520fc3 | ||
|
|
64c0cace85 | ||
|
|
82af5e4a19 | ||
|
|
7e0ba1b335 | ||
|
|
44b7f792fe | ||
|
|
a3e432eb68 | ||
|
|
009f9a2b14 | ||
|
|
2ca905b6e5 | ||
|
|
3b099f936a | ||
|
|
4d6ddb070e | ||
|
|
b205314424 | ||
|
|
e83132f32c | ||
|
|
1b38309d70 | ||
|
|
6e8196d475 | ||
|
|
90fecc56dd | ||
|
|
d3d7f0e670 | ||
|
|
37ffeafeff | ||
|
|
abc159b7b9 | ||
|
|
648b28876d | ||
|
|
5b9f2bac87 | ||
|
|
17151f67c2 | ||
|
|
5f14d958ac | ||
|
|
bd6c52e025 | ||
|
|
cb77bb6b69 | ||
|
|
78b240b740 | ||
|
|
7e30f00178 | ||
|
|
35310dbc73 | ||
|
|
af82c07acc | ||
|
|
3f75f30f26 | ||
|
|
f7f0e10d4d | ||
|
|
091238a2cf | ||
|
|
0458ef869e | ||
|
|
0bf08db7b9 | ||
|
|
d3420918cd | ||
|
|
138e759161 | ||
|
|
f1d6ce7d12 | ||
|
|
ff749a7a0a | ||
|
|
bff78ca8dd | ||
|
|
81647d67a0 | ||
|
|
d8924ed892 | ||
|
|
799cdafae6 | ||
|
|
bc0c55e49a | ||
|
|
c61c6a8525 | ||
|
|
3e764d068c | ||
|
|
ac25f4b98b | ||
|
|
aa6ff8c84a | ||
|
|
37ca79e9c5 | ||
|
|
6040b4b494 | ||
|
|
51ea3e3c6f | ||
|
|
5a16dda50d | ||
|
|
bbfa978861 | ||
|
|
54ca7bf09f | ||
|
|
8bf5370b6c | ||
|
|
ecefa05e03 | ||
|
|
e013494fb2 | ||
|
|
4853f74dbf | ||
|
|
6f45ee6813 | ||
|
|
c60ed32f3a | ||
|
|
178851589d | ||
|
|
5bcc679194 | ||
|
|
1e17b5ac66 | ||
|
|
19f12f3f2f | ||
|
|
71e8d9a490 | ||
|
|
e3cd553f82 | ||
|
|
b61c8cd104 | ||
|
|
8f288fe458 | ||
|
|
02a920feea | ||
|
|
be2c4f2b3c | ||
|
|
7ac74b1c1f | ||
|
|
933cb1d5c7 | ||
|
|
6203e30152 | ||
|
|
7d94af0e31 | ||
|
|
564a2b5f1e | ||
|
|
1dbe7a3163 | ||
|
|
47f8a126ca | ||
|
|
693195f70b | ||
|
|
2267b7e7d7 | ||
|
|
a06e605e67 | ||
|
|
47c67ecc99 | ||
|
|
4c4b7cbeae | ||
|
|
ddececbfea | ||
|
|
71a6f3d1a4 | ||
|
|
e86cf962e9 | ||
|
|
99a58d5c91 | ||
|
|
eecbb5ca90 | ||
|
|
fbb3bf869c | ||
|
|
b887ea9623 | ||
|
|
c68e3e1238 | ||
|
|
c5080e4030 | ||
|
|
0d01365751 | ||
|
|
f4a06ad65d | ||
|
|
05a22d5a54 | ||
|
|
2424ece0c5 | ||
|
|
2d02551d0a | ||
|
|
ac416aeeb3 | ||
|
|
d09af430e8 | ||
|
|
79454b5eed | ||
|
|
921c1fa412 | ||
|
|
1aba145bc6 | ||
|
|
290d9df3eb | ||
|
|
aa76ccdd25 | ||
|
|
abe8070c36 | ||
|
|
2d28c258fd | ||
|
|
1338839b52 | ||
|
|
058203a0ec | ||
|
|
8fdf664968 | ||
|
|
50555ec73e | ||
|
|
951a532a9f | ||
|
|
e940044603 | ||
|
|
babfbb0fcd | ||
|
|
bbed312bdd | ||
|
|
b593764ded | ||
|
|
483c840fc8 | ||
|
|
de80f0ccff | ||
|
|
d0b87f7f82 | ||
|
|
bf32d3c39a | ||
|
|
bc14f2cdaa | ||
|
|
06a21e038a | ||
|
|
4d5eba317e | ||
|
|
d37a30e083 | ||
|
|
9170eea784 | ||
|
|
2769967e1e | ||
|
|
609f50d261 | ||
|
|
82f0eb1cbc | ||
|
|
b47669403b | ||
|
|
91899acfe5 | ||
|
|
ffedd33101 | ||
|
|
c9ed930606 | ||
|
|
af292b0ec2 | ||
|
|
1ead7f9b2b | ||
|
|
5c91877b69 | ||
|
|
e57d834a0d | ||
|
|
0578cdb62e | ||
|
|
b661afba01 | ||
|
|
b1002dd4f9 | ||
|
|
8e69008699 | ||
|
|
f45552f8f8 | ||
|
|
a4fe091a51 | ||
|
|
216217e2c6 | ||
|
|
799775b3a7 | ||
|
|
ae0384df29 | ||
|
|
8f57279dc7 | ||
|
|
e8dbd12f22 | ||
|
|
ca230d28b4 | ||
|
|
c96065b187 | ||
|
|
2abcf4764d | ||
|
|
6a4c342e45 | ||
|
|
bb0b1e88ef | ||
|
|
63c9135184 | ||
|
|
7fac0ef961 | ||
|
|
5a2e268160 | ||
|
|
a4e4e8f440 | ||
|
|
b62ce947a6 | ||
|
|
9538662262 | ||
|
|
09d7ae4f80 | ||
|
|
d7ded366c7 | ||
|
|
09c77973a0 | ||
|
|
22f3c70234 | ||
|
|
6527b1386f | ||
|
|
baabf97acd | ||
|
|
97005aca66 | ||
|
|
6e8ea50c19 | ||
|
|
1fcd706e11 | ||
|
|
008bb19b0b | ||
|
|
023acab779 | ||
|
|
68e8584520 | ||
|
|
5d120ebca0 | ||
|
|
f91b89f723 | ||
|
|
1181b75e16 | ||
|
|
5f00b4f923 | ||
|
|
4c31193b82 | ||
|
|
17fc9d1886 | ||
|
|
d7285d43dd | ||
|
|
aa8a991d20 | ||
|
|
40ba51ac43 | ||
|
|
d20430a778 | ||
|
|
f08f749cd9 | ||
|
|
a6c04f4f9a | ||
|
|
15b6c1590f | ||
|
|
4a8985278d | ||
|
|
996618a495 | ||
|
|
1f02d5fbbd | ||
|
|
c58b9f00f0 | ||
|
|
f131b18cbe | ||
|
|
118a998138 | ||
|
|
7ad6f036e7 | ||
|
|
1d29b824a8 | ||
|
|
3caf2dce28 | ||
|
|
1fc5b954f2 | ||
|
|
31d99c0bd2 | ||
|
|
0ac59c67ea | ||
|
|
8e8c74c621 | ||
|
|
f996f3df74 | ||
|
|
9499c97e18 | ||
|
|
c1c81fc07b | ||
|
|
072e86a2f0 | ||
|
|
70d6e763b0 | ||
|
|
15f4d4fee6 | ||
|
|
82e28dec43 | ||
|
|
b407c0e6c6 | ||
|
|
27ea01ee05 | ||
|
|
7ed5829b2c | ||
|
|
5bf1dd55b1 | ||
|
|
36aebffcc0 | ||
|
|
84c42ed58c | ||
|
|
9634e44343 | ||
|
|
048a045966 | ||
|
|
a18c8c0eb4 | ||
|
|
5fb0f46e3f | ||
|
|
962997ed16 | ||
|
|
daca0ebc14 | ||
|
|
9ae8fe7c2d | ||
|
|
1907133f99 | ||
|
|
4334955e39 | ||
|
|
f00c9dc4d6 | ||
|
|
7d0687ec73 | ||
|
|
da3773bfe8 | ||
|
|
6e1c132ee8 | ||
|
|
24ba35d76f | ||
|
|
64b63e9d52 | ||
|
|
7848a82a1c | ||
|
|
6a843cc8b2 | ||
|
|
ecdb0785a4 | ||
|
|
9a55caed75 | ||
|
|
2e01eb87db | ||
|
|
597b962ad5 | ||
|
|
7531f533e0 | ||
|
|
6b9d71554e | ||
|
|
bb1089e03d | ||
|
|
c82f0c937d | ||
|
|
00d2fd685a | ||
|
|
f28e1b8c90 | ||
|
|
2b17985a11 | ||
|
|
b392e3102e | ||
|
|
58b0b18ddd | ||
|
|
6a9ef319d0 | ||
|
|
cf38ef70cb | ||
|
|
ac64ade10f | ||
|
|
ee85af34d8 | ||
|
|
9d53ad53e5 | ||
|
|
9cdc3ebee6 | ||
|
|
14a5e05d64 | ||
|
|
f7b7d0f79e | ||
|
|
d98f36ceff | ||
|
|
abfabc30c9 | ||
|
|
c1aff7a248 | ||
|
|
e44f71eeb1 | ||
|
|
cb578c84e2 | ||
|
|
565e1dc0ed | ||
|
|
b1e28d02f7 | ||
|
|
d1467c2f73 | ||
|
|
c439150431 | ||
|
|
9bb3dfd639 | ||
|
|
4caa58b9ec | ||
|
|
b5213097e8 | ||
|
|
61081651e4 | ||
|
|
4ccfdf051d | ||
|
|
9f2a9d9cda | ||
|
|
827de76345 | ||
|
|
fdcaca42ae | ||
|
|
0744892244 | ||
|
|
b70ffc69df | ||
|
|
73b12cc32f | ||
|
|
ba6a37f315 | ||
|
|
6f8be8c8ac | ||
|
|
68497542b3 | ||
|
|
3d762fed10 | ||
|
|
48b849c031 | ||
|
|
88c4aa2d87 | ||
|
|
fb8c0d8fe3 | ||
|
|
1a863725d1 | ||
|
|
7b4245c91c | ||
|
|
9bd0d6b99d | ||
|
|
b640c766db | ||
|
|
50ffa8014e | ||
|
|
7ef688b256 | ||
|
|
b4fe0b35e4 | ||
|
|
a2cbbdf819 | ||
|
|
35b7efe3f4 | ||
|
|
7cea2a768f | ||
|
|
7247b9b68e | ||
|
|
dca837b843 | ||
|
|
c60c2ee8d0 | ||
|
|
3cdb5b5db2 | ||
|
|
b9cc8a4ca9 | ||
|
|
28606e9985 | ||
|
|
5bbe782812 | ||
|
|
d65861cdf7 | ||
|
|
c8df3fd2a7 | ||
|
|
6cfe6652a3 | ||
|
|
6b711da69d | ||
|
|
9b02867293 | ||
|
|
595cb99b2d | ||
|
|
f0a3445250 | ||
|
|
6d353dae1e | ||
|
|
57a38282a9 | ||
|
|
db47604865 | ||
|
|
2a121fe202 | ||
|
|
36baff0d7f | ||
|
|
201f3008b1 | ||
|
|
f4873fee18 | ||
|
|
e02261be6d | ||
|
|
2919e6765c | ||
|
|
b8fc4d0079 | ||
|
|
4a46f5f095 | ||
|
|
3484ceabb8 | ||
|
|
cab659dce6 | ||
|
|
a657f29439 | ||
|
|
4c054bf316 | ||
|
|
dc7922c38b | ||
|
|
c6c68abfcc | ||
|
|
6aacb0c898 | ||
|
|
e7000db491 | ||
|
|
fce994ea7f | ||
|
|
6c6446765e | ||
|
|
69a99c70c6 | ||
|
|
56d9f7a8af | ||
|
|
363aefe399 | ||
|
|
7fd4f792ba | ||
|
|
6fbdde63d8 | ||
|
|
b04dc90cdf | ||
|
|
b525c91bd3 | ||
|
|
a32c893078 | ||
|
|
2c6a744848 | ||
|
|
4492874d08 | ||
|
|
d3a592e5bf | ||
|
|
cab21b1b21 | ||
|
|
1319e422ea | ||
|
|
c88ea40b57 | ||
|
|
3194a37fcb | ||
|
|
72ebaa52e9 | ||
|
|
0e00695fc7 | ||
|
|
48a691e722 | ||
|
|
cf54d6d6f8 | ||
|
|
a03fe234d0 | ||
|
|
d88d40cc08 | ||
|
|
d3b4af116e | ||
|
|
352b23331b | ||
|
|
bdd6041a5c | ||
|
|
1894003f8a | ||
|
|
220513ae42 | ||
|
|
fcbabbe357 | ||
|
|
3627969fce | ||
|
|
8807c0dbef | ||
|
|
23cc9f6ff8 | ||
|
|
e50799e9c4 | ||
|
|
b92c4844eb | ||
|
|
c306d42d08 | ||
|
|
e31558318e | ||
|
|
78a9420f26 | ||
|
|
b47c5b5bfc | ||
|
|
28a312accf | ||
|
|
611094e92e | ||
|
|
2a8579a6a5 | ||
|
|
47577f2f47 | ||
|
|
34e3e45843 | ||
|
|
364dc9ddfb | ||
|
|
23324f0f87 | ||
|
|
17fa9a3b77 | ||
|
|
424b3ca308 | ||
|
|
26e2fc8fd4 | ||
|
|
8e18484898 | ||
|
|
354cfe0f9c | ||
|
|
983474b2bd | ||
|
|
14d861bcbb | ||
|
|
f6cd349a16 | ||
|
|
8e1c4dec87 | ||
|
|
18b47e4a73 | ||
|
|
4f157f50ed | ||
|
|
f44a2f4857 | ||
|
|
c685ace327 | ||
|
|
f23b0faf41 | ||
|
|
e0e2ca7ccd | ||
|
|
83fe7f7eef | ||
|
|
1feaa8f2e9 | ||
|
|
598d6bf4c5 | ||
|
|
0afd5a40d6 | ||
|
|
26b70e9ed3 | ||
|
|
a1a93a4bdd | ||
|
|
4939a7dd7c | ||
|
|
0fa6610fdb | ||
|
|
b0148e7860 | ||
|
|
59a06a242d | ||
|
|
ffe902605d | ||
|
|
556f7e85fc | ||
|
|
45c86be402 | ||
|
|
bf34f413de | ||
|
|
9b022b187f | ||
|
|
c3409d64dc | ||
|
|
3c5c3b5026 | ||
|
|
f240f00d84 | ||
|
|
68c7764c63 | ||
|
|
adfb039ba6 | ||
|
|
89416d9856 | ||
|
|
9b6c972e0f | ||
|
|
55fc04752a | ||
|
|
96f0919633 | ||
|
|
17b140baf4 | ||
|
|
45c2151d0f | ||
|
|
1887f5b7e7 | ||
|
|
708d1c7a32 | ||
|
|
acf8c3015a | ||
|
|
f83ae5789b | ||
|
|
57ccfcfc1b | ||
|
|
dd0fdcfdd4 | ||
|
|
5c805be067 | ||
|
|
e423380d7f | ||
|
|
4d8bebc917 | ||
|
|
4314fa883f | ||
|
|
d6e39b362b | ||
|
|
f89214f9cf | ||
|
|
d17cac8210 | ||
|
|
aa49283fa9 | ||
|
|
e79ea7a2cf | ||
|
|
8a1d280f19 | ||
|
|
6a8eb9562f | ||
|
|
8f76e1e344 | ||
|
|
7b9f084e6b | ||
|
|
5b1693a908 | ||
|
|
fd7c00da49 | ||
|
|
7fc5ced3af | ||
|
|
a86092fb64 | ||
|
|
003827e916 | ||
|
|
b15673c525 | ||
|
|
00363303b1 | ||
|
|
48fbe890f8 | ||
|
|
4179877cc7 | ||
|
|
282b83ac08 | ||
|
|
193656e71b | ||
|
|
a25d127f36 | ||
|
|
cf9df548ca | ||
|
|
f29b93c762 | ||
|
|
032ace40d1 | ||
|
|
f74dd1cb3c | ||
|
|
29889d1e35 | ||
|
|
d6d19c4229 | ||
|
|
ab08e67eaf | ||
|
|
00bf6ac258 | ||
|
|
b65478e7d9 | ||
|
|
e83b529f1c | ||
|
|
408274152b | ||
|
|
8ff82996fb | ||
|
|
d59c4044b7 | ||
|
|
3574e21e4f | ||
|
|
5a091956ef | ||
|
|
14e9c58444 | ||
|
|
bfe5b03c69 | ||
|
|
f96f7f840e | ||
|
|
a3bcf26dce | ||
|
|
a7852a89cc | ||
|
|
1b0c761fc0 | ||
|
|
5e4e8d4eda | ||
|
|
bd524d2e1e | ||
|
|
60fe919992 | ||
|
|
b90063b170 | ||
|
|
d9fce49b08 | ||
|
|
5dbee2a270 | ||
|
|
4779106139 | ||
|
|
bf2de81873 | ||
|
|
28cdedc9aa | ||
|
|
7e90571404 | ||
|
|
42bbe63927 | ||
|
|
7ddbea697e | ||
|
|
b4860de34d | ||
|
|
576f23d5fb | ||
|
|
86548fc7bf | ||
|
|
b3b4d992fe | ||
|
|
d72daf5f39 | ||
|
|
9ad959a478 | ||
|
|
cc00a321da | ||
|
|
de74273108 | ||
|
|
a7658c7573 | ||
|
|
48a85ee6e0 | ||
|
|
461b789515 | ||
|
|
b71ff6fbb8 | ||
|
|
1bcdcce93a | ||
|
|
c09bfca634 | ||
|
|
36c5f02bfb | ||
|
|
eae6e5d9a1 | ||
|
|
364813dd73 | ||
|
|
1a2b1f283b | ||
|
|
a0e5cf4ecc | ||
|
|
820f7b4d93 | ||
|
|
727866f090 | ||
|
|
3d45cdc339 | ||
|
|
02a557aa67 | ||
|
|
6da27e5976 | ||
|
|
19a6e324c4 | ||
|
|
62eadbc174 | ||
|
|
ae783d4f45 | ||
|
|
1241a902e3 | ||
|
|
fdba648afb | ||
|
|
b070e7de07 | ||
|
|
d0741946c7 | ||
|
|
080226dd72 | ||
|
|
3cb6a5cfac | ||
|
|
758971e068 | ||
|
|
8739ab9c66 | ||
|
|
e8e47c39d7 | ||
|
|
446c101018 | ||
|
|
3654591a1b | ||
|
|
7fb1c9dd35 | ||
|
|
0fffaccdf4 | ||
|
|
5902b241f9 | ||
|
|
784386fddc | ||
|
|
d424583cbf | ||
|
|
290b821a3a | ||
|
|
a0dfa8d421 | ||
|
|
ceb00f6748 | ||
|
|
9bd328e147 | ||
|
|
6fb5c312c3 | ||
|
|
3f9ff7254f | ||
|
|
f7a3acfaf4 | ||
|
|
e4451ccaf8 | ||
|
|
2adb640821 | ||
|
|
765038274c | ||
|
|
2cbdced974 | ||
|
|
fc5d9ae100 | ||
|
|
506168ab83 | ||
|
|
088fd6334b | ||
|
|
94cda90a6e | ||
|
|
78601d90c9 | ||
|
|
fa4ac95ecc | ||
|
|
dd4d4e23ad | ||
|
|
acba86993d | ||
|
|
0fc55451c2 | ||
|
|
5c0bd8a810 | ||
|
|
1aebc95145 | ||
|
|
1d3f20b666 | ||
|
|
eb2e106871 | ||
|
|
f9a887c8c6 | ||
|
|
67ab810cb2 | ||
|
|
3e0d84383e | ||
|
|
d245ea3eaa | ||
|
|
843fc03bf4 | ||
|
|
c83c635067 | ||
|
|
f605eb14e8 | ||
|
|
fd02d77c59 | ||
|
|
0da8fb379d | ||
|
|
257a43298b | ||
|
|
a2d3bcd571 | ||
|
|
d4142c2cdd | ||
|
|
e50d66b303 | ||
|
|
08b6433843 | ||
|
|
8cd536aab5 | ||
|
|
2b495c648f | ||
|
|
06048b6d71 | ||
|
|
bb22287336 | ||
|
|
a45942a966 | ||
|
|
85d621846d | ||
|
|
534acf8df2 | ||
|
|
5a6d4387ea | ||
|
|
317e844886 | ||
|
|
b1f62a2735 | ||
|
|
65e4fea4ef | ||
|
|
faca8512c5 | ||
|
|
2121387aa2 | ||
|
|
72c4444a60 | ||
|
|
2d8d2e7e6f | ||
|
|
49bff5d544 | ||
|
|
806a80cef1 | ||
|
|
c6f0d5e478 | ||
|
|
bf30aba005 | ||
|
|
727778b730 | ||
|
|
b081ffce50 | ||
|
|
e46779f87b | ||
|
|
dabe8c1bb7 | ||
|
|
4042f88bd8 | ||
|
|
a0947d0c54 | ||
|
|
a34fd9ac89 | ||
|
|
aa68322641 | ||
|
|
2d76aebb8e | ||
|
|
7cc1d23bc7 | ||
|
|
0bd2103a8c | ||
|
|
7d8916b6e9 | ||
|
|
8b5df3ca17 | ||
|
|
ffdfe99d37 | ||
|
|
7efa67e7e6 | ||
|
|
d69808c204 | ||
|
|
de360c61dd | ||
|
|
6b04ddfad1 | ||
|
|
0d854ce906 | ||
|
|
38fdf26405 | ||
|
|
6835c15d9b | ||
|
|
fa38bfd4e8 | ||
|
|
4d5c6d11ab | ||
|
|
9e80da705a | ||
|
|
9b04391f82 | ||
|
|
8f6c0796e3 | ||
|
|
326fcf4398 | ||
|
|
fdda27abd1 | ||
|
|
7e8c62104a | ||
|
|
fb213f6e74 | ||
|
|
22e75c1691 | ||
|
|
919f221be9 | ||
|
|
da7d64667e | ||
|
|
d19c6a1573 | ||
|
|
5cd23039a0 | ||
|
|
19b18d3d0a | ||
|
|
101947da8b | ||
|
|
d3c3c23630 | ||
|
|
abc14316ea | ||
|
|
b66621f9c6 | ||
|
|
aa5510531d | ||
|
|
12b846586c | ||
|
|
b705f5b743 | ||
|
|
18a5fba42b | ||
|
|
b5a3b6f86a | ||
|
|
00f2eda576 | ||
|
|
c70d252dc3 | ||
|
|
2f088ce29e | ||
|
|
ff408c604b | ||
|
|
6621c318db | ||
|
|
22a8ad2fde | ||
|
|
7674dc9b34 | ||
|
|
9e0ca51c2f | ||
|
|
961629d156 | ||
|
|
2cbebf9c99 | ||
|
|
08a4deca17 | ||
|
|
ce9ea7baad | ||
|
|
b35efb9f72 | ||
|
|
c45dfacb41 | ||
|
|
91152a7977 | ||
|
|
0ce081323f | ||
|
|
79486e3393 | ||
|
|
60758dd76b | ||
|
|
e74f659015 | ||
|
|
c1c09fa6b4 | ||
|
|
47c7cb9327 | ||
|
|
4d6256e1a1 | ||
|
|
13180d92e3 | ||
|
|
6b38ef3c9f | ||
|
|
4f5b0634ad | ||
|
|
ea25972257 | ||
|
|
b6168898ec | ||
|
|
da33cb54fe | ||
|
|
35d0458228 | ||
|
|
e6c0280b40 | ||
|
|
15451ff42b | ||
|
|
9ab856e186 | ||
|
|
6e2db1ced6 | ||
|
|
5c4ce8754e | ||
|
|
416486c370 | ||
|
|
2f075be6f8 | ||
|
|
a1494c4c93 | ||
|
|
d79ab5ffeb | ||
|
|
01526a7b37 | ||
|
|
091a02f737 | ||
|
|
aa4996ef28 | ||
|
|
2f4e2bde6b | ||
|
|
e90f6a2fa3 | ||
|
|
be8f1b9fdd | ||
|
|
ba99190f53 | ||
|
|
70088704e2 | ||
|
|
02733e6e58 | ||
|
|
44732a5dd9 | ||
|
|
5bdd35464b | ||
|
|
1eae97731f | ||
|
|
0325a62f18 | ||
|
|
3a5538813c | ||
|
|
1f1b4b95ce | ||
|
|
8c3ed57ecc | ||
|
|
dc8a64fa7d | ||
|
|
0d1e72a764 | ||
|
|
9b3fe09508 | ||
|
|
7c0cfb1da2 | ||
|
|
66429ce331 | ||
|
|
bce859569f | ||
|
|
425fb8905b | ||
|
|
4f59c7f77f | ||
|
|
21d1faa793 | ||
|
|
b9f3991d03 | ||
|
|
c4de879b20 | ||
|
|
ee5686e91a | ||
|
|
2a795e9138 | ||
|
|
9a6aa8f8c6 | ||
|
|
3794b181d5 | ||
|
|
f09256a24e | ||
|
|
34fca9d6f5 | ||
|
|
433f10ef93 | ||
|
|
9f02f71c52 | ||
|
|
3dcc9bc143 | ||
|
|
7311895894 | ||
|
|
a7cab51369 | ||
|
|
437b11af9a | ||
|
|
820b5cbb86 | ||
|
|
e6a30f899c | ||
|
|
0bc6507df3 | ||
|
|
71c3c632d7 | ||
|
|
99a5f2cd9d | ||
|
|
fb00b79d19 | ||
|
|
7782aa7379 | ||
|
|
f3ee4a5dac | ||
|
|
a8d6e59a7a | ||
|
|
1d4b1870cf | ||
|
|
f63ad2dd69 | ||
|
|
6903eed4e7 | ||
|
|
b9e922c658 | ||
|
|
54d8c64ad5 | ||
|
|
2f1fe5468e | ||
|
|
24d15d4274 | ||
|
|
0bc7aa52d8 | ||
|
|
e52603b4a7 | ||
|
|
3b88712402 | ||
|
|
33e9ef2106 | ||
|
|
689fe4ed9a | ||
|
|
b82d026f39 | ||
|
|
009059def4 | ||
|
|
03ff61d113 | ||
|
|
c00914bea2 | ||
|
|
944d1c0a4a | ||
|
|
2cf23e33e3 | ||
|
|
e2a0b42d03 | ||
|
|
894e9818ac | ||
|
|
de18e256ce | ||
|
|
1a3c70ce1b | ||
|
|
bd4a603e16 | ||
|
|
358b80d782 | ||
|
|
824ec42005 | ||
|
|
466935e9a3 | ||
|
|
b52d3e3a7b | ||
|
|
888a6da4a5 | ||
|
|
972ac73dd9 | ||
|
|
d8b238d5f1 | ||
|
|
63206c3da2 | ||
|
|
5713de8966 | ||
|
|
58f293fef3 | ||
|
|
ffbb2c9689 | ||
|
|
9cd3dcdebf | ||
|
|
f2fe58c3c5 | ||
|
|
b78010aa94 | ||
|
|
49035543b9 | ||
|
|
f9ccf635ca | ||
|
|
e8ea294964 | ||
|
|
19ef2be88b | ||
|
|
30e8b8186f | ||
|
|
741643af5f | ||
|
|
6aaf9ba470 | ||
|
|
5957dc72eb | ||
|
|
e32a9777d7 | ||
|
|
84a8f1eb2b | ||
|
|
6810953014 | ||
|
|
398964945a | ||
|
|
5f43c032f2 | ||
|
|
627cf90de0 | ||
|
|
2bedb36d7f | ||
|
|
e93a95d0cb | ||
|
|
3f31666796 | ||
|
|
3fe8031cf3 | ||
|
|
b27c7ce11b | ||
|
|
ed34c2ca68 | ||
|
|
3ca2e953fb | ||
|
|
d8a7328365 | ||
|
|
f33cd625bf | ||
|
|
80530bb13c | ||
|
|
affc12df4b | ||
|
|
4eedf00025 | ||
|
|
e5acbcc0dd | ||
|
|
1b6743ee53 | ||
|
|
b5fb82d95d | ||
|
|
193aa4e1f2 | ||
|
|
ebd34427c7 | ||
|
|
3d75573889 | ||
|
|
c6240ca415 | ||
|
|
2ee8984b44 | ||
|
|
b7ec587e5b | ||
|
|
47c58bce2b | ||
|
|
96e95ac533 | ||
|
|
b013a065f7 | ||
|
|
74b37d11cf | ||
|
|
c6cc013617 | ||
|
|
f4e1d80a87 | ||
|
|
91dad4060f | ||
|
|
e07cb82c15 | ||
|
|
2770cec187 | ||
|
|
5c3928190a | ||
|
|
9f4b04ea0f | ||
|
|
96d20756ca | ||
|
|
b8454c7f5b | ||
|
|
c84f703f92 | ||
|
|
57c2e867d8 | ||
|
|
553f496d84 | ||
|
|
b1d8aca46a | ||
|
|
8e884fd3ea | ||
|
|
76524b7498 | ||
|
|
65914fb2b2 | ||
|
|
a4d0da0085 | ||
|
|
c9d496e9a0 | ||
|
|
88a951ba4f | ||
|
|
403ceb19dc | ||
|
|
835d3c3d18 | ||
|
|
3135b456be | ||
|
|
0be6d3661a | ||
|
|
6f5f5b4711 | ||
|
|
c6c5f85abb | ||
|
|
7b860f7739 | ||
|
|
e28804c03a | ||
|
|
1b9432824b | ||
|
|
3b71a6b5c5 | ||
|
|
7ce8768c19 | ||
|
|
25e0f12976 | ||
|
|
f168682a68 | ||
|
|
d25058a46d | ||
|
|
4d0c092d9f | ||
|
|
15714ef855 | ||
|
|
eb743beaa3 | ||
|
|
0007535a46 | ||
|
|
8391af026c | ||
|
|
800f656dcf | ||
|
|
088c5f49d9 | ||
|
|
d8d98b6143 | ||
|
|
02fb3b9315 | ||
|
|
4f87db784e | ||
|
|
7e6287b925 | ||
|
|
999cdfd997 | ||
|
|
8d6cb087c6 | ||
|
|
2b7417c728 | ||
|
|
3c455cf1c1 | ||
|
|
5135185e31 | ||
|
|
b461f26e5d | ||
|
|
faef5b8570 | ||
|
|
0a20e04c10 | ||
|
|
d19bb2308d | ||
|
|
d8dd07d9ef | ||
|
|
36c56243cd | ||
|
|
23d06b79a6 | ||
|
|
e4c4e923ee | ||
|
|
936d2f1f47 | ||
|
|
07018b5060 | ||
|
|
ac90d6ae5c | ||
|
|
2141f2c4c5 | ||
|
|
81870777a9 | ||
|
|
845092dcad | ||
|
|
dd473d1e1e | ||
|
|
d2869bf4ed | ||
|
|
891a3f4b29 | ||
|
|
6767b50d75 | ||
|
|
d9e4b562a9 | ||
|
|
fb3243f1bc | ||
|
|
5fe1497c92 | ||
|
|
5446592d44 | ||
|
|
40ed9a53c9 | ||
|
|
f7ac8cea90 | ||
|
|
4ef5d1f0cd | ||
|
|
6992615c98 | ||
|
|
43dabb2825 | ||
|
|
05e40e5681 | ||
|
|
2c4536e137 | ||
|
|
3dc81058a0 | ||
|
|
bd84667a2b | ||
|
|
e5b6a12977 | ||
|
|
ca415d5d62 | ||
|
|
99b4fe7278 | ||
|
|
327e164869 | ||
|
|
25bc571f30 | ||
|
|
38c7e8a1d2 | ||
|
|
ca282e28e0 | ||
|
|
5ef59c06df | ||
|
|
8f55d385d6 | ||
|
|
cd2fc25c19 | ||
|
|
709983eea6 | ||
|
|
40e99b1b80 | ||
|
|
488684d960 | ||
|
|
f35034b989 | ||
|
|
9d6f9b1f26 | ||
|
|
6148a608fb | ||
|
|
3fa9e70383 | ||
|
|
16fea6f009 | ||
|
|
df9ed835ca | ||
|
|
e394c8f0f2 | ||
|
|
21974f7288 | ||
|
|
5ef0170d77 | ||
|
|
c21dcf14de | ||
|
|
a8d20d4e1e | ||
|
|
8b307485b0 | ||
|
|
4544afe422 | ||
|
|
9d7eba5f70 | ||
|
|
be0aee95f2 | ||
|
|
3469ed7ab9 | ||
|
|
1f223aa7e6 | ||
|
|
0a431ead5e | ||
|
|
f750796444 | ||
|
|
c82bcd882a | ||
|
|
7d0ec33b54 | ||
|
|
43d48b3feb | ||
|
|
2e406d2687 | ||
|
|
3f30808104 | ||
|
|
ab10217c86 | ||
|
|
00430491ca | ||
|
|
109202329f | ||
|
|
3b1509f307 | ||
|
|
7ad7b08bed | ||
|
|
4650e5e8fb | ||
|
|
af59d4929e | ||
|
|
e34100bab4 | ||
|
|
d9b3a9fb60 | ||
|
|
39eec59c90 | ||
|
|
d651d0d472 | ||
|
|
87a2358a65 | ||
|
|
cef4e313e1 | ||
|
|
7cc1a4eba0 | ||
|
|
c6cc0133b3 | ||
|
|
7748e68440 | ||
|
|
6c2230a076 | ||
|
|
66b233eaea | ||
|
|
fed58f3920 | ||
|
|
815b2be7f7 | ||
|
|
f420c9fb7c | ||
|
|
01bdf10b94 | ||
|
|
ddedc1ee92 | ||
|
|
9e9703183f | ||
|
|
adce9e6220 | ||
|
|
c499133bbe | ||
|
|
8f505c2dcc | ||
|
|
b320064418 | ||
|
|
a643933d16 | ||
|
|
2659ec5887 | ||
|
|
9f8327926d | ||
|
|
7a568dc118 | ||
|
|
c946b06be5 | ||
|
|
c65fd0e477 | ||
|
|
8f8217e928 | ||
|
|
6c9e1799c7 | ||
|
|
decd70eb23 | ||
|
|
a20d40618f | ||
|
|
b4af8ec751 | ||
|
|
feb5eed8a5 | ||
|
|
f4fa39c70e | ||
|
|
7b7165f5d8 | ||
|
|
13897db6d3 | ||
|
|
c4afdb7198 | ||
|
|
0284975f3f | ||
|
|
269e3d1303 | ||
|
|
8c81f7ece9 | ||
|
|
f6e0593774 | ||
|
|
3d80e549cb | ||
|
|
acc7448dc5 | ||
|
|
35d3d3de72 | ||
|
|
0372e07eb0 | ||
|
|
00221e3410 | ||
|
|
9c264611cf | ||
|
|
31d7f70e27 | ||
|
|
04e8b83d45 | ||
|
|
e87bf71f20 | ||
|
|
2dd70c8d62 | ||
|
|
a3886702a3 | ||
|
|
713af133a0 | ||
|
|
057ffffbf2 | ||
|
|
a81d6d124b | ||
|
|
23f07fde5e | ||
|
|
b42b760393 | ||
|
|
bf6f4c48c0 | ||
|
|
6133f04841 | ||
|
|
3c18f79ea4 | ||
|
|
2af8342fea | ||
|
|
fc3db7942d | ||
|
|
164e2b2678 | ||
|
|
b7b28390df | ||
|
|
a6e996d921 | ||
|
|
07e666345d | ||
|
|
007f10d29d | ||
|
|
f9284d20ca | ||
|
|
9050869781 | ||
|
|
54975de0f3 | ||
|
|
a7aead5138 | ||
|
|
6868f66f24 | ||
|
|
3c0b00e42d | ||
|
|
3327388f1f | ||
|
|
04497aec36 | ||
|
|
aa9d596930 | ||
|
|
f96e68cd11 | ||
|
|
013227323d | ||
|
|
19cbb442ee | ||
|
|
c0e7f341cb | ||
|
|
0a1ba7c434 | ||
|
|
b708dabf98 | ||
|
|
899e56e5b8 | ||
|
|
f6d3bd8ccb | ||
|
|
deb5677a57 | ||
|
|
5c464c3f5a | ||
|
|
cceef33fef | ||
|
|
ed8174fe36 | ||
|
|
3c8906494f | ||
|
|
6e745e9882 | ||
|
|
fb4e9c3772 | ||
|
|
2c282f9550 | ||
|
|
d92d41cb05 | ||
|
|
82e7050561 | ||
|
|
44f92d4169 | ||
|
|
2f1fae38dd | ||
|
|
9fe99979fe | ||
|
|
6399de0b51 | ||
|
|
959740a585 | ||
|
|
159b082828 | ||
|
|
8e7c5af16c | ||
|
|
c1645ab7a7 | ||
|
|
2ae2bfdde9 | ||
|
|
3fe93968a6 | ||
|
|
79a2d715b0 | ||
|
|
50b271c868 | ||
|
|
a57f28ac83 | ||
|
|
3f3747a2fe | ||
|
|
d133913c3d | ||
|
|
e049cef00a | ||
|
|
eb8176971c | ||
|
|
5bbfca45fa | ||
|
|
9b500cd867 | ||
|
|
b52cae6575 | ||
|
|
35a0142f9b | ||
|
|
d4f6ef4f1b | ||
|
|
11024deaae | ||
|
|
5a038de1d5 | ||
|
|
903982e896 | ||
|
|
6355c404cc | ||
|
|
92b9cb5d43 | ||
|
|
7580383d26 | ||
|
|
ba0934e41e | ||
|
|
a6a1021521 | ||
|
|
33b4d83c73 | ||
|
|
6cf630c74a | ||
|
|
736fe5b84e | ||
|
|
4241bde6ea | ||
|
|
b4ce14d744 | ||
|
|
10832a2ccc | ||
|
|
91aca44f67 | ||
|
|
96cfbb201a | ||
|
|
b2bc155701 | ||
|
|
a70ef5594d | ||
|
|
6d991586fd | ||
|
|
f8890ca841 | ||
|
|
0752c6b24f | ||
|
|
3ffaf2c0e1 | ||
|
|
a3e0fbd606 | ||
|
|
9c8ceb6b4e | ||
|
|
bebce2c053 | ||
|
|
34c6790762 | ||
|
|
a5fb009b62 | ||
|
|
9671ca5ebf | ||
|
|
5334ea393e | ||
|
|
2aaacc02e3 | ||
|
|
222e929b2d | ||
|
|
6f16d35a92 | ||
|
|
d7a2ccf5ac | ||
|
|
9ce605221a | ||
|
|
1e930fe950 | ||
|
|
4dc158589c | ||
|
|
4525eb457b | ||
|
|
56a2e07dc2 | ||
|
|
9b7fe9ac31 | ||
|
|
c3da07ccf7 | ||
|
|
b691a56d51 | ||
|
|
13e0a1b5bb | ||
|
|
646baddce4 | ||
|
|
02f61c323d | ||
|
|
1e3d2df9e7 | ||
|
|
e43fae86f1 | ||
|
|
c6151e34e0 | ||
|
|
45cb991254 | ||
|
|
839bc99f94 | ||
|
|
0aeb1ca408 | ||
|
|
cd76a906f4 | ||
|
|
e438491938 | ||
|
|
307b35a5bf | ||
|
|
217c9720ea | ||
|
|
778c7dc5f2 | ||
|
|
4c80154437 | ||
|
|
6bd9529a66 | ||
|
|
33ea2b4844 | ||
|
|
5c807f3dc8 | ||
|
|
9063b559c4 | ||
|
|
40f6df7160 | ||
|
|
95165aa92f | ||
|
|
d96fcdb35c | ||
|
|
5efabdcea3 | ||
|
|
2d57dc0565 | ||
|
|
576629f825 | ||
|
|
5badb9d151 | ||
|
|
45dc379d9a | ||
|
|
49c0c9f44c | ||
|
|
ef5fa4d062 | ||
|
|
35b66d5d94 | ||
|
|
d0b749a43c | ||
|
|
bcc4d4e8c6 | ||
|
|
41bff0b293 | ||
|
|
dfc7f35ef1 | ||
|
|
0bbbbdde80 | ||
|
|
5fa5284b58 | ||
|
|
b7ef82cb67 | ||
|
|
1233780265 | ||
|
|
dd095279c8 | ||
|
|
4d5200c50f | ||
|
|
1bcd675ead | ||
|
|
2a3d3de0b2 | ||
|
|
b124836f3a | ||
|
|
93ba95971b | ||
|
|
7b193b3745 | ||
|
|
2b647d2405 | ||
|
|
7714cca599 | ||
|
|
42511aa9cf | ||
|
|
ace2a2f3d1 | ||
|
|
2062fe7a08 | ||
|
|
d4c02c3988 | ||
|
|
4c1496b4a4 | ||
|
|
eec876295d | ||
|
|
3093175f54 | ||
|
|
dd05c4d34a | ||
|
|
57e3a40321 | ||
|
|
9e70152076 | ||
|
|
e1da83a8f6 | ||
|
|
8108198613 | ||
|
|
915849b2ce | ||
|
|
2e96302336 | ||
|
|
051cd744ad | ||
|
|
53fbc165ba | ||
|
|
1862bcf867 | ||
|
|
8909d1d144 | ||
|
|
a2f0f20284 | ||
|
|
1951b52aa5 | ||
|
|
cd7a9345ec | ||
|
|
dba4c33c81 | ||
|
|
153c239c9b | ||
|
|
4034ab4182 | ||
|
|
9c917c3bd3 | ||
|
|
cca0222e1d | ||
|
|
682db9b81f | ||
|
|
3e000f9be1 | ||
|
|
548a552638 | ||
|
|
1d5b5b7d15 | ||
|
|
91aa4586e2 | ||
|
|
6d3bc43ef6 | ||
|
|
0f63e26641 | ||
|
|
ab2ef69c6a | ||
|
|
621350515e | ||
|
|
03ed5c398a | ||
|
|
65d6f8c018 | ||
|
|
79d0673ae6 | ||
|
|
cbd488e19f | ||
|
|
380d869195 | ||
|
|
73893f2a33 | ||
|
|
ad81470d35 | ||
|
|
fc140d04ef | ||
|
|
a0257ed7e7 | ||
|
|
4769487c3b | ||
|
|
29def587ff | ||
|
|
f35d0b2b37 | ||
|
|
283e92d55d | ||
|
|
c82b26d334 | ||
|
|
2753e02cda | ||
|
|
fde733c205 | ||
|
|
f730591f2c | ||
|
|
94eac1e79d | ||
|
|
9f2b6d0ec6 | ||
|
|
7d7d0ea001 | ||
|
|
794101691c | ||
|
|
a443144a5c | ||
|
|
73f0867061 | ||
|
|
f97db93212 | ||
|
|
d36708933c | ||
|
|
14f82ea0a9 | ||
|
|
c41dd6495d | ||
|
|
1005c99e9c | ||
|
|
f4478fc762 | ||
|
|
c5ed308ea5 | ||
|
|
3ab5ba6149 | ||
|
|
9b2fde962c | ||
|
|
571a7dc42d | ||
|
|
3421fffa9b | ||
|
|
c25619fd63 | ||
|
|
76adb13a64 | ||
|
|
33b1eed361 | ||
|
|
c44891a1a8 | ||
|
|
f31f52ff1c | ||
|
|
6ad9a56bd9 | ||
|
|
a5c2fc4f9d | ||
|
|
0a65006bb4 | ||
|
|
3db896c4e2 | ||
|
|
e80322021a | ||
|
|
48316ba60d | ||
|
|
c0f1493473 | ||
|
|
ccbd128fa2 | ||
|
|
46817caa68 | ||
|
|
775c8624d4 | ||
|
|
36eedc987c | ||
|
|
3b8f31c888 | ||
|
|
a34fa74eaa | ||
|
|
d6b2d8dcb5 | ||
|
|
aab0599280 | ||
|
|
dfa8eaf24e | ||
|
|
63d55cb797 | ||
|
|
c642eee0d2 | ||
|
|
5f33d298d7 | ||
|
|
fc39fd7519 | ||
|
|
7f442f7485 | ||
|
|
0ee3203a5a | ||
|
|
43a5df8780 | ||
|
|
0949df014b | ||
|
|
01f4dd8f97 | ||
|
|
8b7599f5d9 | ||
|
|
9bdc320cf8 | ||
|
|
d9c8285806 | ||
|
|
4b8344082f | ||
|
|
e5cf76b460 | ||
|
|
422ca87a12 | ||
|
|
a512ccca28 | ||
|
|
ba215be97c | ||
|
|
ca16050681 | ||
|
|
06e4ed1bb4 | ||
|
|
d4a8ae5743 | ||
|
|
a4f2f811d3 | ||
|
|
ebaba95eb3 | ||
|
|
31f7769199 | ||
|
|
7726be94be | ||
|
|
f2cbcea6d7 | ||
|
|
5d6a28954b | ||
|
|
319f1deceb | ||
|
|
3f14958741 | ||
|
|
42ba4a5c56 | ||
|
|
c804c395ed | ||
|
|
58c8cf1a3a | ||
|
|
76ea8c86b7 | ||
|
|
050378fa72 | ||
|
|
29d858d58c | ||
|
|
dc45920afb | ||
|
|
15fcb57e2f | ||
|
|
91ee85152c | ||
|
|
aa7bf7af1e | ||
|
|
02c1ba39ad | ||
|
|
8e8d9426df | ||
|
|
57f301815d | ||
|
|
dfc9dc713c | ||
|
|
1a0cad7f5f | ||
|
|
3df436f0d8 | ||
|
|
d737fca295 | ||
|
|
da5a3532d7 | ||
|
|
27111e7b29 | ||
|
|
b847bc0aba | ||
|
|
6eb0bc50e2 | ||
|
|
7530f03bf6 | ||
|
|
24a9633edc | ||
|
|
7e1a5ce445 | ||
|
|
2ffdbc7fc0 | ||
|
|
52c7b68cc3 | ||
|
|
ddbcc8e84b | ||
|
|
2bfb195ad6 | ||
|
|
cd2d9517a0 | ||
|
|
19dc312128 | ||
|
|
175659628d | ||
|
|
8fea2b09be | ||
|
|
f77f45b70c | ||
|
|
103a287f11 | ||
|
|
d600ade40c | ||
|
|
a6a7cba121 | ||
|
|
7fff635a3f | ||
|
|
7a749b88c7 | ||
|
|
1ce6a7f4be | ||
|
|
a092910fdd | ||
|
|
bb77838b3e | ||
|
|
1001f1bd36 | ||
|
|
de0e5583a5 | ||
|
|
cbd2a44350 | ||
|
|
c888e461ba | ||
|
|
d135522087 | ||
|
|
ce2b148dd2 | ||
|
|
2d075c4dd6 | ||
|
|
bcd1841f71 | ||
|
|
029cf4ad1f | ||
|
|
ed7fc86d69 | ||
|
|
82a9e43b6f | ||
|
|
9ae2c731ed | ||
|
|
7d1ba466b4 | ||
|
|
4f1d8678ea | ||
|
|
4bd72ebc63 | ||
|
|
e5986e0ae2 | ||
|
|
fae39e4bc9 | ||
|
|
dbe8357dd5 | ||
|
|
3234f0bdd7 | ||
|
|
47a4d58009 | ||
|
|
4ae60da58d | ||
|
|
47f995bda3 | ||
|
|
42721628eb | ||
|
|
f42ab957d4 | ||
|
|
ce9d0d7e82 | ||
|
|
baf79dda21 | ||
|
|
b71a9bc097 | ||
|
|
129632cd6b | ||
|
|
aca8899c4d | ||
|
|
5c3d91e65e | ||
|
|
0205d827f1 | ||
|
|
225c31d583 | ||
|
|
b18d87ddba | ||
|
|
25298c72bb | ||
|
|
3df3d27533 | ||
|
|
cbb0b57018 | ||
|
|
65f205bca8 | ||
|
|
1cc7f80109 | ||
|
|
213a0a18a5 | ||
|
|
1a24d599b3 | ||
|
|
d80be60e2b | ||
|
|
0ffe79d76c | ||
|
|
db36d0a375 | ||
|
|
ff659a0be3 | ||
|
|
8485b12102 | ||
|
|
d889cc3c5a | ||
|
|
7bb65fca4e | ||
|
|
8aaa5951ca | ||
|
|
d58f3b7520 | ||
|
|
e5a636a159 | ||
|
|
51f314e907 | ||
|
|
531fa30b69 | ||
|
|
2b3bb81fae | ||
|
|
80f80cd31f | ||
|
|
79705fbf11 | ||
|
|
191a4e569e | ||
|
|
1cac35be03 | ||
|
|
6d48100f44 | ||
|
|
4627af3e90 | ||
|
|
913952ffe1 | ||
|
|
67bf6afc89 | ||
|
|
06064decd2 | ||
|
|
4cca9f17df | ||
|
|
74a89223c0 | ||
|
|
2954017836 | ||
|
|
a03262fc01 | ||
|
|
d65ce6fc2c | ||
|
|
d27e1eee25 | ||
|
|
b1f00bb708 | ||
|
|
e0f1e79e6a | ||
|
|
d70b7d41e8 | ||
|
|
43af9f3fad | ||
|
|
bc53dd6830 | ||
|
|
263616ef01 | ||
|
|
285da0542e | ||
|
|
17f7e2f892 | ||
|
|
a29d8f1d68 | ||
|
|
8965172603 | ||
|
|
03c2967337 | ||
|
|
5b154a0da4 | ||
|
|
b2c8c326d7 | ||
|
|
96aedaa91f | ||
|
|
a22ad1ec32 | ||
|
|
a4244defb5 | ||
|
|
57328e55f3 | ||
|
|
87c32aeb40 | ||
|
|
2e01e0c30e | ||
|
|
a12b2de74a | ||
|
|
6b01d8f99b | ||
|
|
eac4f6062e | ||
|
|
5583cf0a5f | ||
|
|
57d772fa23 | ||
|
|
1bdc3988a9 | ||
|
|
2af55baa9a | ||
|
|
0452eec11d | ||
|
|
c4f7db6c04 | ||
|
|
3569529a84 | ||
|
|
70942ac0f6 | ||
|
|
dc02e39918 | ||
|
|
73d6bc35ec | ||
|
|
b1d558d700 | ||
|
|
897480265f | ||
|
|
73724f5a33 | ||
|
|
bdbd495a9e | ||
|
|
1fcf009804 | ||
|
|
914c5752a5 | ||
|
|
201b12a886 | ||
|
|
c5f23ad93d | ||
|
|
28d62009a7 | ||
|
|
1a5a436f82 | ||
|
|
1275ac0569 | ||
|
|
5112fb777e | ||
|
|
f571a944c9 | ||
|
|
bc9aff8c60 | ||
|
|
c4c7ab7888 | ||
|
|
d9819a990c | ||
|
|
aea400e26a | ||
|
|
eb4e7735c1 | ||
|
|
4b498ae8cd | ||
|
|
158e2a4ca9 | ||
|
|
b011d48d82 | ||
|
|
8ac3e725f8 | ||
|
|
9a4aef0358 | ||
|
|
7d3146234a | ||
|
|
5d2ca6493d | ||
|
|
4752f9aa37 | ||
|
|
025d3a03d6 | ||
|
|
aec06183e7 | ||
|
|
aa28abd517 | ||
|
|
7430b31697 | ||
|
|
759f72169a | ||
|
|
1f7135be61 | ||
|
|
6942f9c1cf | ||
|
|
d9da75d1c0 | ||
|
|
7ab7372be4 | ||
|
|
3503c98857 | ||
|
|
708c3f1e2a | ||
|
|
6f645e8619 | ||
|
|
bce7ca7ac4 | ||
|
|
350465c25d | ||
|
|
5b9c70ae22 | ||
|
|
9b30afeca9 | ||
|
|
c1b202c119 | ||
|
|
41cfe5d2ca | ||
|
|
05339e184f | ||
|
|
447127d956 | ||
|
|
394334fbea | ||
|
|
9f8cd33d43 | ||
|
|
f066e28c35 | ||
|
|
b349a449bb | ||
|
|
1c5898d396 | ||
|
|
6802967863 | ||
|
|
0462f18680 | ||
|
|
af6699098f | ||
|
|
6b7e7dc124 | ||
|
|
6bae4c6a66 | ||
|
|
46da918dbe | ||
|
|
bb7e5f17b5 | ||
|
|
b9d03114c2 | ||
|
|
436b1ce176 | ||
|
|
50fb5d83f1 | ||
|
|
fda672f806 | ||
|
|
2bf783b04d | ||
|
|
2f72b23a0d | ||
|
|
85336f9777 | ||
|
|
174d964553 | ||
|
|
cf8677248e | ||
|
|
1e6a3163af | ||
|
|
e008919978 | ||
|
|
4814066c67 | ||
|
|
f17f8b48c2 | ||
|
|
ab0aec0ac5 | ||
|
|
b49a641ba5 | ||
|
|
2f50051426 | ||
|
|
43cc32db40 | ||
|
|
b4d6f6b947 | ||
|
|
71ff533623 | ||
|
|
e33a5bbef5 | ||
|
|
6c0112c2be | ||
|
|
15bbf26b93 | ||
|
|
87c97efce0 | ||
|
|
6c4aee1479 | ||
|
|
73549a9044 | ||
|
|
30fdd3e184 | ||
|
|
c97eb5d63f | ||
|
|
5729c7d5e7 | ||
|
|
d77b13efcb | ||
|
|
c43faca7b9 | ||
|
|
892ddd5724 | ||
|
|
a9de779f33 | ||
|
|
1c2f016ba0 | ||
|
|
7b4d9140af | ||
|
|
c1fc87ff4e | ||
|
|
cd5ea5d4e0 | ||
|
|
30c01089f5 | ||
|
|
89825a2b21 | ||
|
|
a743b75bb4 | ||
|
|
f7ebf8dedd | ||
|
|
f6220cab3b | ||
|
|
0c5e1c4138 | ||
|
|
03fe431f1a | ||
|
|
a8e4554fec | ||
|
|
e81b09b9aa | ||
|
|
c6e846e0ae | ||
|
|
03dcfb5c4b | ||
|
|
3e54da03e2 | ||
|
|
c4b3196917 | ||
|
|
0d81e7933e | ||
|
|
b2a2735034 | ||
|
|
f865c5de90 | ||
|
|
4159369e8b | ||
|
|
170693cf0b | ||
|
|
4e7b5d4af8 | ||
|
|
67bf789fcf | ||
|
|
f5cf616c2f | ||
|
|
7975f19817 | ||
|
|
017602056d | ||
|
|
c63f43854b | ||
|
|
5cc71ec2ad | ||
|
|
80e81f8475 | ||
|
|
3685c8e015 | ||
|
|
99e943c365 | ||
|
|
21818e71f5 | ||
|
|
bcc6d25e21 | ||
|
|
7b885ee0d3 | ||
|
|
c10e808a4f | ||
|
|
54e9be0ed8 | ||
|
|
938cdf316a | ||
|
|
27c33911e6 | ||
|
|
e88f8759e7 | ||
|
|
f2992e3165 | ||
|
|
c71fd1ee3b | ||
|
|
fb45b19fdc | ||
|
|
c4ea8d4942 | ||
|
|
646aa131ef | ||
|
|
0adb40bf92 | ||
|
|
17d6014bf1 | ||
|
|
ff57cd4eaf | ||
|
|
74bd7c3744 | ||
|
|
cfbb283f85 | ||
|
|
74a3c4451b | ||
|
|
be3643c962 | ||
|
|
f4aa546af8 | ||
|
|
67b876a7f4 | ||
|
|
94e177c0ef | ||
|
|
1bd83cc9bc | ||
|
|
ecda3f4a7d | ||
|
|
8f972a965d | ||
|
|
0f051fc57c | ||
|
|
c3f8925f46 | ||
|
|
5d0cab2052 | ||
|
|
4d7492f682 | ||
|
|
fc9d99080f | ||
|
|
47ebac0276 | ||
|
|
cb3fca03e9 | ||
|
|
abbbd83729 | ||
|
|
1743ab7812 | ||
|
|
324e3972a6 | ||
|
|
1502dda2ab | ||
|
|
f31b2c4a79 | ||
|
|
89b9b60e0c | ||
|
|
de9ba12779 | ||
|
|
9cc4359c04 | ||
|
|
67eaf120b9 | ||
|
|
b8353c4a33 | ||
|
|
7013033ae4 | ||
|
|
cb8cd03852 | ||
|
|
f63fb62014 | ||
|
|
2e4fb86b86 | ||
|
|
5e776a07dd | ||
|
|
81e637e50e | ||
|
|
0971ad0a80 | ||
|
|
8267ded7ec | ||
|
|
7f36ea55f5 | ||
|
|
72a051f2d3 | ||
|
|
51b197888c | ||
|
|
cd63865d31 | ||
|
|
5be5685a09 | ||
|
|
76b2f25d46 | ||
|
|
58607d4a7f | ||
|
|
c0a5b16a7f | ||
|
|
3a0c69005b | ||
|
|
5c295fb9e3 | ||
|
|
4ee212e7d5 | ||
|
|
70651ce994 | ||
|
|
a778a91106 | ||
|
|
cfc31eead3 | ||
|
|
da0a1bbe9f | ||
|
|
bc66fb33e9 | ||
|
|
b1b6493755 | ||
|
|
1d189f239b | ||
|
|
5b90691bcc | ||
|
|
d1d5972277 | ||
|
|
2c07d77368 | ||
|
|
642cfbf59a | ||
|
|
bb1367cfb9 | ||
|
|
11724aa555 | ||
|
|
4d374712de | ||
|
|
eb9003187d | ||
|
|
caba444962 | ||
|
|
5b6c8c191f | ||
|
|
dd51589f67 | ||
|
|
b02a31d4b9 | ||
|
|
0e7878b406 | ||
|
|
cae91ce0c5 | ||
|
|
67a65a2aa9 | ||
|
|
364b0a7163 | ||
|
|
d6419f2059 | ||
|
|
6f7ad7ef91 | ||
|
|
5ae588833b | ||
|
|
a70dbac0e6 | ||
|
|
4d34a02afe | ||
|
|
4db4f45897 | ||
|
|
2d5280fc95 | ||
|
|
b8d568761e | ||
|
|
29309dac9a | ||
|
|
7f7745071a | ||
|
|
1914032e35 | ||
|
|
f44c8f1205 | ||
|
|
fe2ef4e61c | ||
|
|
fc3eda55c7 | ||
|
|
8adf1cdd02 | ||
|
|
adbbc656d4 | ||
|
|
8e852bce02 | ||
|
|
bb461b009f | ||
|
|
03559a3cc4 | ||
|
|
7bb2fe128a | ||
|
|
2312e17a8e | ||
|
|
9835b382da | ||
|
|
1eacc6fbff | ||
|
|
85187239b6 | ||
|
|
819ff2a902 | ||
|
|
c744104a18 | ||
|
|
c87801f0a9 | ||
|
|
39735594bd | ||
|
|
30964f65e4 | ||
|
|
ee0c7fd8bf | ||
|
|
dfdecef8e7 | ||
|
|
edcdfeb057 | ||
|
|
47f0de9836 | ||
|
|
9ba657797e | ||
|
|
07442a6f84 | ||
|
|
3faf3c84be | ||
|
|
abcacc82f3 | ||
|
|
9544b7d968 | ||
|
|
babbc8bcd6 | ||
|
|
12809ebc74 | ||
|
|
b45a601ad2 | ||
|
|
f099dc6a37 | ||
|
|
803caddbd4 | ||
|
|
4d7b988018 | ||
|
|
c1f88a4e14 | ||
|
|
5d9ec0b208 | ||
|
|
1877cacf9c | ||
|
|
2f4978cfea | ||
|
|
d27a1103fa | ||
|
|
b85bb95082 | ||
|
|
db7f93cff3 | ||
|
|
85e271098f | ||
|
|
17001e2f74 | ||
|
|
c82f4f0d45 | ||
|
|
88247a3af9 | ||
|
|
158578a406 | ||
|
|
19314e7e06 | ||
|
|
8bcbc6d545 | ||
|
|
ef55e6d476 | ||
|
|
295ef3dc1d | ||
|
|
9d125c9e79 | ||
|
|
86363986fc | ||
|
|
0a2dbbc58b | ||
|
|
673a966541 | ||
|
|
db1e69813b | ||
|
|
e60d56f060 | ||
|
|
328e062ae9 | ||
|
|
0523c2ea4b | ||
|
|
c5c7378c63 | ||
|
|
9b2080d036 | ||
|
|
d4b3649640 | ||
|
|
b085993901 | ||
|
|
0d4afad342 | ||
|
|
0da694b845 | ||
|
|
6d5e7d9e81 | ||
|
|
bc08bea284 | ||
|
|
0e5a0661e1 | ||
|
|
a839bd428f | ||
|
|
0277062693 | ||
|
|
7affa5ab69 | ||
|
|
ed22af4e73 | ||
|
|
63ebb6998e | ||
|
|
7914cd47ca | ||
|
|
708dbac70e | ||
|
|
1b62dd5c40 | ||
|
|
4911545843 | ||
|
|
c5cc4b7867 | ||
|
|
eacb614750 | ||
|
|
341e1e7a6d | ||
|
|
a02c820c2d | ||
|
|
2f6890c78a | ||
|
|
516591fe88 | ||
|
|
d2941a9110 | ||
|
|
f7302f710b | ||
|
|
6a02ac7e80 | ||
|
|
d1b86fdef5 | ||
|
|
857cd718df | ||
|
|
c9dc441915 | ||
|
|
a7ca9950fc | ||
|
|
e0dd33e6be | ||
|
|
2e718e1130 |
14
.gitignore
vendored
14
.gitignore
vendored
@@ -1,8 +1,15 @@
|
|||||||
.*
|
|
||||||
!.gitignore
|
!.gitignore
|
||||||
*~
|
*~
|
||||||
*.py[co]
|
*.py[co]
|
||||||
.*.sw[po]
|
.*.sw[po]
|
||||||
|
.cache/
|
||||||
|
.coverage
|
||||||
|
.coveragerc
|
||||||
|
.env
|
||||||
|
.idea/
|
||||||
|
.pytest_cache/
|
||||||
|
.tox/
|
||||||
|
.eggs/
|
||||||
*.egg
|
*.egg
|
||||||
docs/.build
|
docs/.build
|
||||||
docs/_build
|
docs/_build
|
||||||
@@ -13,5 +20,6 @@ env/
|
|||||||
.settings
|
.settings
|
||||||
.project
|
.project
|
||||||
.pydevproject
|
.pydevproject
|
||||||
tests/test_bugfix.py
|
htmlcov/
|
||||||
htmlcov/
|
venv
|
||||||
|
venv3
|
||||||
|
|||||||
17
.landscape.yml
Normal file
17
.landscape.yml
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
pylint:
|
||||||
|
disable:
|
||||||
|
# We use this a lot (e.g. via document._meta)
|
||||||
|
- protected-access
|
||||||
|
|
||||||
|
options:
|
||||||
|
additional-builtins:
|
||||||
|
# add long as valid built-ins.
|
||||||
|
- long
|
||||||
|
|
||||||
|
pyflakes:
|
||||||
|
disable:
|
||||||
|
# undefined variables are already covered by pylint (and exclude long)
|
||||||
|
- F821
|
||||||
|
|
||||||
|
ignore-paths:
|
||||||
|
- benchmark.py
|
||||||
12
.pre-commit-config.yaml
Normal file
12
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
fail_fast: false
|
||||||
|
repos:
|
||||||
|
- repo: https://github.com/ambv/black
|
||||||
|
rev: 19.10b0
|
||||||
|
hooks:
|
||||||
|
- id: black
|
||||||
|
- repo: https://gitlab.com/pycqa/flake8
|
||||||
|
rev: 3.8.0a2
|
||||||
|
hooks:
|
||||||
|
- id: flake8
|
||||||
|
additional_dependencies:
|
||||||
|
- flake8-import-order
|
||||||
20
.readthedocs.yml
Normal file
20
.readthedocs.yml
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# .readthedocs.yml
|
||||||
|
# Read the Docs configuration file
|
||||||
|
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||||
|
|
||||||
|
# Required
|
||||||
|
version: 2
|
||||||
|
|
||||||
|
# Build documentation in the docs/ directory with Sphinx
|
||||||
|
sphinx:
|
||||||
|
configuration: docs/conf.py
|
||||||
|
|
||||||
|
# Optionally set the version of Python and requirements required to build your docs
|
||||||
|
python:
|
||||||
|
version: 3.7
|
||||||
|
install:
|
||||||
|
- requirements: docs/requirements.txt
|
||||||
|
# docs/conf.py is importing mongoengine
|
||||||
|
# so mongoengine needs to be installed as well
|
||||||
|
- method: setuptools
|
||||||
|
path: .
|
||||||
124
.travis.yml
124
.travis.yml
@@ -1,33 +1,107 @@
|
|||||||
# http://travis-ci.org/#!/MongoEngine/mongoengine
|
# For full coverage, we'd have to test all supported Python, MongoDB, and
|
||||||
|
# PyMongo combinations. However, that would result in an overly long build
|
||||||
|
# with a very large number of jobs, hence we only test a subset of all the
|
||||||
|
# combinations.
|
||||||
|
# * Python3.7, MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup,
|
||||||
|
# Other combinations are tested. See below for the details or check the travis jobs
|
||||||
|
|
||||||
|
# We should periodically check MongoDB Server versions supported by MongoDB
|
||||||
|
# Inc., add newly released versions to the test matrix, and remove versions
|
||||||
|
# which have reached their End of Life. See:
|
||||||
|
# 1. https://www.mongodb.com/support-policy.
|
||||||
|
# 2. https://docs.mongodb.com/ecosystem/drivers/driver-compatibility-reference/#python-driver-compatibility
|
||||||
|
#
|
||||||
|
# Reminder: Update README.rst if you change MongoDB versions we test.
|
||||||
|
|
||||||
language: python
|
language: python
|
||||||
services: mongodb
|
dist: xenial
|
||||||
python:
|
python:
|
||||||
- "2.6"
|
- 3.6
|
||||||
- "2.7"
|
- 3.7
|
||||||
- "3.2"
|
- 3.8
|
||||||
- "3.3"
|
- 3.9
|
||||||
|
- pypy3
|
||||||
|
|
||||||
env:
|
env:
|
||||||
- PYMONGO=dev DJANGO=1.6
|
global:
|
||||||
- PYMONGO=dev DJANGO=1.5.5
|
- MONGODB_3_4=3.4.19
|
||||||
- PYMONGO=dev DJANGO=1.4.10
|
- MONGODB_3_6=3.6.13
|
||||||
- PYMONGO=2.5 DJANGO=1.6
|
- MONGODB_4_0=4.0.13
|
||||||
- PYMONGO=2.5 DJANGO=1.5.5
|
|
||||||
- PYMONGO=2.5 DJANGO=1.4.10
|
- PYMONGO_3_4=3.4
|
||||||
- PYMONGO=3.2 DJANGO=1.6
|
- PYMONGO_3_6=3.6
|
||||||
- PYMONGO=3.2 DJANGO=1.5.5
|
- PYMONGO_3_9=3.9
|
||||||
- PYMONGO=3.3 DJANGO=1.6
|
- PYMONGO_3_11=3.11
|
||||||
- PYMONGO=3.3 DJANGO=1.5.5
|
|
||||||
|
- MAIN_PYTHON_VERSION=3.7
|
||||||
|
matrix:
|
||||||
|
- MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_11}
|
||||||
|
|
||||||
|
matrix:
|
||||||
|
# Finish the build as soon as one job fails
|
||||||
|
fast_finish: true
|
||||||
|
|
||||||
|
include:
|
||||||
|
- python: 3.7
|
||||||
|
env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6}
|
||||||
|
- python: 3.7
|
||||||
|
env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9}
|
||||||
|
- python: 3.7
|
||||||
|
env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_11}
|
||||||
|
- python: 3.8
|
||||||
|
env: MONGODB=${MONGODB_4_0} PYMONGO=${PYMONGO_3_11}
|
||||||
|
|
||||||
install:
|
install:
|
||||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi
|
# Install Mongo
|
||||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi
|
- wget http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB}.tgz
|
||||||
- if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi
|
- tar xzf mongodb-linux-x86_64-${MONGODB}.tgz
|
||||||
- if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi
|
- ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --version
|
||||||
- pip install https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz#md5=1534bb15cf311f07afaa3aacba1c028b
|
# Install Python dependencies.
|
||||||
- python setup.py install
|
- pip install --upgrade pip
|
||||||
|
- pip install coveralls
|
||||||
|
- pip install pre-commit
|
||||||
|
- pip install tox
|
||||||
|
# tox dryrun to setup the tox venv (we run a mock test).
|
||||||
|
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder"
|
||||||
|
|
||||||
|
before_script:
|
||||||
|
- mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data
|
||||||
|
- ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork
|
||||||
|
# Run pre-commit hooks (black, flake8, etc) on entire codebase
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then pre-commit run -a; else echo "pre-commit checks only runs on py37"; fi
|
||||||
|
- mongo --eval 'db.version();' # Make sure mongo is awake
|
||||||
|
|
||||||
script:
|
script:
|
||||||
- python setup.py test
|
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine"
|
||||||
|
|
||||||
|
after_success:
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then coveralls --verbose; else echo "coveralls only sent for py37"; fi
|
||||||
|
|
||||||
notifications:
|
notifications:
|
||||||
irc: "irc.freenode.org#mongoengine"
|
irc: irc.freenode.org#mongoengine
|
||||||
|
|
||||||
|
# Only run builds on the master branch and GitHub releases (tagged as vX.Y.Z)
|
||||||
branches:
|
branches:
|
||||||
only:
|
only:
|
||||||
- master
|
- master
|
||||||
|
- /^v.*$/
|
||||||
|
|
||||||
|
# Whenever a new release is created via GitHub, publish it on PyPI.
|
||||||
|
deploy:
|
||||||
|
provider: pypi
|
||||||
|
user: the_drow
|
||||||
|
password:
|
||||||
|
secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek=
|
||||||
|
|
||||||
|
# Create a source distribution and a pure python wheel for faster installs.
|
||||||
|
distributions: "sdist bdist_wheel"
|
||||||
|
|
||||||
|
# Only deploy on tagged commits (aka GitHub releases) and only for the parent
|
||||||
|
# repo's builds running Python v3.7 along with PyMongo v3.x and MongoDB v3.4.
|
||||||
|
# We run Travis against many different Python, PyMongo, and MongoDB versions
|
||||||
|
# and we don't want the deploy to occur multiple times).
|
||||||
|
on:
|
||||||
|
tags: true
|
||||||
|
repo: MongoEngine/mongoengine
|
||||||
|
condition: ($PYMONGO = ${PYMONGO_3_11}) && ($MONGODB = ${MONGODB_3_4})
|
||||||
|
python: 3.7
|
||||||
|
|||||||
80
AUTHORS
80
AUTHORS
@@ -12,7 +12,7 @@ Laine Herron https://github.com/LaineHerron
|
|||||||
|
|
||||||
CONTRIBUTORS
|
CONTRIBUTORS
|
||||||
|
|
||||||
Dervived from the git logs, inevitably incomplete but all of whom and others
|
Derived from the git logs, inevitably incomplete but all of whom and others
|
||||||
have submitted patches, reported bugs and generally helped make MongoEngine
|
have submitted patches, reported bugs and generally helped make MongoEngine
|
||||||
that much better:
|
that much better:
|
||||||
|
|
||||||
@@ -119,7 +119,7 @@ that much better:
|
|||||||
* Anton Kolechkin
|
* Anton Kolechkin
|
||||||
* Sergey Nikitin
|
* Sergey Nikitin
|
||||||
* psychogenic
|
* psychogenic
|
||||||
* Stefan Wójcik
|
* Stefan Wójcik (https://github.com/wojcikstefan)
|
||||||
* dimonb
|
* dimonb
|
||||||
* Garry Polley
|
* Garry Polley
|
||||||
* James Slagle
|
* James Slagle
|
||||||
@@ -138,11 +138,10 @@ that much better:
|
|||||||
* hellysmile
|
* hellysmile
|
||||||
* Jaepil Jeong
|
* Jaepil Jeong
|
||||||
* Daniil Sharou
|
* Daniil Sharou
|
||||||
* Stefan Wójcik
|
|
||||||
* Pete Campton
|
* Pete Campton
|
||||||
* Martyn Smith
|
* Martyn Smith
|
||||||
* Marcelo Anton
|
* Marcelo Anton
|
||||||
* Aleksey Porfirov
|
* Aleksey Porfirov (https://github.com/lexqt)
|
||||||
* Nicolas Trippar
|
* Nicolas Trippar
|
||||||
* Manuel Hermann
|
* Manuel Hermann
|
||||||
* Gustavo Gawryszewski
|
* Gustavo Gawryszewski
|
||||||
@@ -171,7 +170,7 @@ that much better:
|
|||||||
* Michael Bartnett (https://github.com/michaelbartnett)
|
* Michael Bartnett (https://github.com/michaelbartnett)
|
||||||
* Alon Horev (https://github.com/alonho)
|
* Alon Horev (https://github.com/alonho)
|
||||||
* Kelvin Hammond (https://github.com/kelvinhammond)
|
* Kelvin Hammond (https://github.com/kelvinhammond)
|
||||||
* Jatin- (https://github.com/jatin-)
|
* Jatin Chopra (https://github.com/jatin)
|
||||||
* Paul Uithol (https://github.com/PaulUithol)
|
* Paul Uithol (https://github.com/PaulUithol)
|
||||||
* Thom Knowles (https://github.com/fleat)
|
* Thom Knowles (https://github.com/fleat)
|
||||||
* Paul (https://github.com/squamous)
|
* Paul (https://github.com/squamous)
|
||||||
@@ -189,3 +188,74 @@ that much better:
|
|||||||
* Tom (https://github.com/tomprimozic)
|
* Tom (https://github.com/tomprimozic)
|
||||||
* j0hnsmith (https://github.com/j0hnsmith)
|
* j0hnsmith (https://github.com/j0hnsmith)
|
||||||
* Damien Churchill (https://github.com/damoxc)
|
* Damien Churchill (https://github.com/damoxc)
|
||||||
|
* Jonathan Simon Prates (https://github.com/jonathansp)
|
||||||
|
* Thiago Papageorgiou (https://github.com/tmpapageorgiou)
|
||||||
|
* Omer Katz (https://github.com/thedrow)
|
||||||
|
* Falcon Dai (https://github.com/falcondai)
|
||||||
|
* Polyrabbit (https://github.com/polyrabbit)
|
||||||
|
* Sagiv Malihi (https://github.com/sagivmalihi)
|
||||||
|
* Dmitry Konishchev (https://github.com/KonishchevDmitry)
|
||||||
|
* Martyn Smith (https://github.com/martynsmith)
|
||||||
|
* Andrei Zbikowski (https://github.com/b1naryth1ef)
|
||||||
|
* Ronald van Rij (https://github.com/ronaldvanrij)
|
||||||
|
* François Schmidts (https://github.com/jaesivsm)
|
||||||
|
* Eric Plumb (https://github.com/professorplumb)
|
||||||
|
* Damien Churchill (https://github.com/damoxc)
|
||||||
|
* Aleksandr Sorokoumov (https://github.com/Gerrrr)
|
||||||
|
* Clay McClure (https://github.com/claymation)
|
||||||
|
* Bruno Rocha (https://github.com/rochacbruno)
|
||||||
|
* Norberto Leite (https://github.com/nleite)
|
||||||
|
* Bob Cribbs (https://github.com/bocribbz)
|
||||||
|
* Jay Shirley (https://github.com/jshirley)
|
||||||
|
* David Bordeynik (https://github.com/DavidBord)
|
||||||
|
* Axel Haustant (https://github.com/noirbizarre)
|
||||||
|
* David Czarnecki (https://github.com/czarneckid)
|
||||||
|
* Vyacheslav Murashkin (https://github.com/a4tunado)
|
||||||
|
* André Ericson https://github.com/aericson)
|
||||||
|
* Mikhail Moshnogorsky (https://github.com/mikhailmoshnogorsky)
|
||||||
|
* Diego Berrocal (https://github.com/cestdiego)
|
||||||
|
* Matthew Ellison (https://github.com/seglberg)
|
||||||
|
* Jimmy Shen (https://github.com/jimmyshen)
|
||||||
|
* J. Fernando Sánchez (https://github.com/balkian)
|
||||||
|
* Michael Chase (https://github.com/rxsegrxup)
|
||||||
|
* Eremeev Danil (https://github.com/elephanter)
|
||||||
|
* Catstyle Lee (https://github.com/Catstyle)
|
||||||
|
* Kiryl Yermakou (https://github.com/rma4ok)
|
||||||
|
* Matthieu Rigal (https://github.com/MRigal)
|
||||||
|
* Charanpal Dhanjal (https://github.com/charanpald)
|
||||||
|
* Emmanuel Leblond (https://github.com/touilleMan)
|
||||||
|
* Breeze.Kay (https://github.com/9nix00)
|
||||||
|
* Vicki Donchenko (https://github.com/kivistein)
|
||||||
|
* Emile Caron (https://github.com/emilecaron)
|
||||||
|
* Amit Lichtenberg (https://github.com/amitlicht)
|
||||||
|
* Gang Li (https://github.com/iici-gli)
|
||||||
|
* Lars Butler (https://github.com/larsbutler)
|
||||||
|
* George Macon (https://github.com/gmacon)
|
||||||
|
* Ashley Whetter (https://github.com/AWhetter)
|
||||||
|
* Paul-Armand Verhaegen (https://github.com/paularmand)
|
||||||
|
* Steven Rossiter (https://github.com/BeardedSteve)
|
||||||
|
* Luo Peng (https://github.com/RussellLuo)
|
||||||
|
* Bryan Bennett (https://github.com/bbenne10)
|
||||||
|
* Gilb's Gilb's (https://github.com/gilbsgilbs)
|
||||||
|
* Joshua Nedrud (https://github.com/Neurostack)
|
||||||
|
* Shu Shen (https://github.com/shushen)
|
||||||
|
* xiaost7 (https://github.com/xiaost7)
|
||||||
|
* Victor Varvaryuk
|
||||||
|
* Stanislav Kaledin (https://github.com/sallyruthstruik)
|
||||||
|
* Dmitry Yantsen (https://github.com/mrTable)
|
||||||
|
* Renjianxin (https://github.com/Davidrjx)
|
||||||
|
* Erdenezul Batmunkh (https://github.com/erdenezul)
|
||||||
|
* Andy Yankovsky (https://github.com/werat)
|
||||||
|
* Bastien Gérard (https://github.com/bagerard)
|
||||||
|
* Trevor Hall (https://github.com/tjhall13)
|
||||||
|
* Gleb Voropaev (https://github.com/buggyspace)
|
||||||
|
* Paulo Amaral (https://github.com/pauloAmaral)
|
||||||
|
* Gaurav Dadhania (https://github.com/GVRV)
|
||||||
|
* Yurii Andrieiev (https://github.com/yandrieiev)
|
||||||
|
* Filip Kucharczyk (https://github.com/Pacu2)
|
||||||
|
* Eric Timmons (https://github.com/daewok)
|
||||||
|
* Matthew Simpson (https://github.com/mcsimps2)
|
||||||
|
* Leonardo Domingues (https://github.com/leodmgs)
|
||||||
|
* Agustin Barto (https://github.com/abarto)
|
||||||
|
* Stankiewicz Mateusz (https://github.com/mas15)
|
||||||
|
* Felix Schultheiß (https://github.com/felix-smashdocs)
|
||||||
|
|||||||
@@ -14,38 +14,77 @@ Before starting to write code, look for existing `tickets
|
|||||||
<https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one
|
<https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one
|
||||||
<https://github.com/MongoEngine/mongoengine/issues>`_ for your specific
|
<https://github.com/MongoEngine/mongoengine/issues>`_ for your specific
|
||||||
issue or feature request. That way you avoid working on something
|
issue or feature request. That way you avoid working on something
|
||||||
that might not be of interest or that has already been addressed. If in doubt
|
that might not be of interest or that has already been addressed. If in doubt
|
||||||
post to the `user group <http://groups.google.com/group/mongoengine-users>`
|
post to the `user group <http://groups.google.com/group/mongoengine-users>`
|
||||||
|
|
||||||
Supported Interpreters
|
Supported Interpreters
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
MongoEngine supports CPython 2.6 and newer. Language
|
MongoEngine supports CPython 3.5 and newer as well as Pypy3.
|
||||||
features not supported by all interpreters can not be used.
|
Language features not supported by all interpreters can not be used.
|
||||||
Please also ensure that your code is properly converted by
|
|
||||||
`2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support.
|
Python3 codebase
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
Since 0.20, the codebase is exclusively Python 3.
|
||||||
|
|
||||||
|
Earlier versions were exclusively Python2, and were relying on 2to3 to support Python3 installs.
|
||||||
|
Travis runs the tests against the main Python 3.x versions.
|
||||||
|
|
||||||
|
|
||||||
Style Guide
|
Style Guide
|
||||||
-----------
|
-----------
|
||||||
|
|
||||||
MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_
|
MongoEngine's codebase is formatted with `black <https://github.com/python/black>`_, other tools like
|
||||||
including 4 space indents and 79 character line limits.
|
flake8 are also used. Those tools will run as part of the CI and will fail in case the code is not formatted properly.
|
||||||
|
|
||||||
|
To install all development tools, simply run the following commands:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ python -m pip install -r requirements-dev.txt
|
||||||
|
|
||||||
|
|
||||||
|
You can install `pre-commit <https://pre-commit.com/>`_ into your git hooks,
|
||||||
|
to automatically check and fix any formatting issue before creating a
|
||||||
|
git commit.
|
||||||
|
|
||||||
|
To enable ``pre-commit`` simply run:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ pre-commit install
|
||||||
|
|
||||||
|
See the ``.pre-commit-config.yaml`` configuration file for more information
|
||||||
|
on how it works.
|
||||||
|
|
||||||
Testing
|
Testing
|
||||||
-------
|
-------
|
||||||
|
|
||||||
All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_
|
All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_
|
||||||
and any pull requests are automatically tested by Travis. Any pull requests
|
and any pull requests are automatically tested. Any pull requests without
|
||||||
without tests will take longer to be integrated and might be refused.
|
tests will take longer to be integrated and might be refused.
|
||||||
|
|
||||||
|
You may also submit a simple failing test as a pull request if you don't know
|
||||||
|
how to fix it, it will be easier for other people to work on it and it may get
|
||||||
|
fixed faster.
|
||||||
|
|
||||||
General Guidelines
|
General Guidelines
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
- Avoid backward breaking changes if at all possible.
|
- Avoid backward breaking changes if at all possible.
|
||||||
|
- If you *have* to introduce a breaking change, make it very clear in your
|
||||||
|
pull request's description. Also, describe how users of this package
|
||||||
|
should adapt to the breaking change in docs/upgrade.rst.
|
||||||
- Write inline documentation for new classes and methods.
|
- Write inline documentation for new classes and methods.
|
||||||
- Write tests and make sure they pass (make sure you have a mongod
|
- Write tests and make sure they pass (make sure you have a mongod
|
||||||
running on the default port, then execute ``python setup.py test``
|
running on the default port, then execute ``python setup.py test``
|
||||||
from the cmd line to run the test suite).
|
from the cmd line to run the test suite).
|
||||||
|
- Ensure tests pass on all supported Python, PyMongo, and MongoDB versions.
|
||||||
|
You can test various Python and PyMongo versions locally by executing
|
||||||
|
``tox``. For different MongoDB versions, you can rely on our automated
|
||||||
|
Travis tests.
|
||||||
|
- Add enhancements or problematic bug fixes to docs/changelog.rst.
|
||||||
- Add yourself to AUTHORS :)
|
- Add yourself to AUTHORS :)
|
||||||
|
|
||||||
Documentation
|
Documentation
|
||||||
@@ -59,3 +98,8 @@ just make your changes to the inline documentation of the appropriate
|
|||||||
branch and submit a `pull request <https://help.github.com/articles/using-pull-requests>`_.
|
branch and submit a `pull request <https://help.github.com/articles/using-pull-requests>`_.
|
||||||
You might also use the github `Edit <https://github.com/blog/844-forking-with-the-edit-button>`_
|
You might also use the github `Edit <https://github.com/blog/844-forking-with-the-edit-button>`_
|
||||||
button.
|
button.
|
||||||
|
|
||||||
|
If you want to test your documentation changes locally, you need to install
|
||||||
|
the ``sphinx`` and ``sphinx_rtd_theme`` packages. Once these are installed,
|
||||||
|
go to the ``docs`` directory, run ``make html`` and inspect the updated docs
|
||||||
|
by running ``open _build/html/index.html``.
|
||||||
|
|||||||
119
README.rst
119
README.rst
@@ -4,39 +4,82 @@ MongoEngine
|
|||||||
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
||||||
:Repository: https://github.com/MongoEngine/mongoengine
|
:Repository: https://github.com/MongoEngine/mongoengine
|
||||||
:Author: Harry Marr (http://github.com/hmarr)
|
:Author: Harry Marr (http://github.com/hmarr)
|
||||||
:Maintainer: Ross Lawley (http://github.com/rozza)
|
:Maintainer: Stefan Wójcik (http://github.com/wojcikstefan)
|
||||||
|
|
||||||
.. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master
|
.. image:: https://travis-ci.org/MongoEngine/mongoengine.svg?branch=master
|
||||||
:target: http://travis-ci.org/MongoEngine/mongoengine
|
:target: https://travis-ci.org/MongoEngine/mongoengine
|
||||||
|
|
||||||
|
.. image:: https://coveralls.io/repos/github/MongoEngine/mongoengine/badge.svg?branch=master
|
||||||
|
:target: https://coveralls.io/github/MongoEngine/mongoengine?branch=master
|
||||||
|
|
||||||
|
.. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.svg?style=flat
|
||||||
|
:target: https://landscape.io/github/MongoEngine/mongoengine/master
|
||||||
|
:alt: Code Health
|
||||||
|
|
||||||
|
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
|
||||||
|
:target: https://github.com/ambv/black
|
||||||
|
|
||||||
About
|
About
|
||||||
=====
|
=====
|
||||||
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
||||||
Documentation available at http://mongoengine-odm.rtfd.org - there is currently
|
Documentation is available at https://mongoengine-odm.readthedocs.io - there
|
||||||
a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide
|
is currently a `tutorial <https://mongoengine-odm.readthedocs.io/tutorial.html>`_,
|
||||||
<https://mongoengine-odm.readthedocs.org/en/latest/guide/index.html>`_ and an `API reference
|
a `user guide <https://mongoengine-odm.readthedocs.io/guide/index.html>`_, and
|
||||||
<http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_.
|
an `API reference <https://mongoengine-odm.readthedocs.io/apireference.html>`_.
|
||||||
|
|
||||||
|
Supported MongoDB Versions
|
||||||
|
==========================
|
||||||
|
MongoEngine is currently tested against MongoDB v3.4, v3.6 and v4.0. Future versions
|
||||||
|
should be supported as well, but aren't actively tested at the moment. Make
|
||||||
|
sure to open an issue or submit a pull request if you experience any problems
|
||||||
|
with MongoDB version > 4.0.
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
============
|
============
|
||||||
If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of
|
||||||
you can use ``easy_install -U mongoengine``. Otherwise, you can download the
|
`pip <https://pip.pypa.io/>`_. You can then use ``python -m pip install -U mongoengine``.
|
||||||
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
|
You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
||||||
setup.py install``.
|
and thus you can use ``easy_install -U mongoengine``. Another option is
|
||||||
|
`pipenv <https://docs.pipenv.org/>`_. You can then use ``pipenv install mongoengine``
|
||||||
|
to both create the virtual environment and install the package. Otherwise, you can
|
||||||
|
download the source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and
|
||||||
|
run ``python setup.py install``.
|
||||||
|
|
||||||
|
The support for Python2 was dropped with MongoEngine 0.20.0
|
||||||
|
|
||||||
Dependencies
|
Dependencies
|
||||||
============
|
============
|
||||||
- pymongo 2.5+
|
All of the dependencies can easily be installed via `python -m pip <https://pip.pypa.io/>`_.
|
||||||
- sphinx (optional - for documentation generation)
|
At the very least, you'll need these two packages to use MongoEngine:
|
||||||
|
|
||||||
|
- pymongo>=3.4
|
||||||
|
|
||||||
|
If you utilize a ``DateTimeField``, you might also use a more flexible date parser:
|
||||||
|
|
||||||
|
- dateutil>=2.1.0
|
||||||
|
|
||||||
|
If you need to use an ``ImageField`` or ``ImageGridFsProxy``:
|
||||||
|
|
||||||
|
- Pillow>=2.0.0
|
||||||
|
|
||||||
|
If you need to use signals:
|
||||||
|
|
||||||
|
- blinker>=1.3
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
========
|
========
|
||||||
Some simple examples of what MongoEngine code looks like::
|
Some simple examples of what MongoEngine code looks like:
|
||||||
|
|
||||||
|
.. code :: python
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
connect('mydb')
|
||||||
|
|
||||||
class BlogPost(Document):
|
class BlogPost(Document):
|
||||||
title = StringField(required=True, max_length=200)
|
title = StringField(required=True, max_length=200)
|
||||||
posted = DateTimeField(default=datetime.datetime.now)
|
posted = DateTimeField(default=datetime.datetime.utcnow)
|
||||||
tags = ListField(StringField(max_length=50))
|
tags = ListField(StringField(max_length=50))
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
class TextPost(BlogPost):
|
class TextPost(BlogPost):
|
||||||
content = StringField(required=True)
|
content = StringField(required=True)
|
||||||
@@ -56,31 +99,52 @@ Some simple examples of what MongoEngine code looks like::
|
|||||||
|
|
||||||
# Iterate over all posts using the BlogPost superclass
|
# Iterate over all posts using the BlogPost superclass
|
||||||
>>> for post in BlogPost.objects:
|
>>> for post in BlogPost.objects:
|
||||||
... print '===', post.title, '==='
|
... print('===', post.title, '===')
|
||||||
... if isinstance(post, TextPost):
|
... if isinstance(post, TextPost):
|
||||||
... print post.content
|
... print(post.content)
|
||||||
... elif isinstance(post, LinkPost):
|
... elif isinstance(post, LinkPost):
|
||||||
... print 'Link:', post.url
|
... print('Link:', post.url)
|
||||||
... print
|
|
||||||
...
|
...
|
||||||
|
|
||||||
>>> len(BlogPost.objects)
|
# Count all blog posts and its subtypes
|
||||||
|
>>> BlogPost.objects.count()
|
||||||
2
|
2
|
||||||
>>> len(HtmlPost.objects)
|
>>> TextPost.objects.count()
|
||||||
1
|
1
|
||||||
>>> len(LinkPost.objects)
|
>>> LinkPost.objects.count()
|
||||||
1
|
1
|
||||||
|
|
||||||
# Find tagged posts
|
# Count tagged posts
|
||||||
>>> len(BlogPost.objects(tags='mongoengine'))
|
>>> BlogPost.objects(tags='mongoengine').count()
|
||||||
2
|
2
|
||||||
>>> len(BlogPost.objects(tags='mongodb'))
|
>>> BlogPost.objects(tags='mongodb').count()
|
||||||
1
|
1
|
||||||
|
|
||||||
Tests
|
Tests
|
||||||
=====
|
=====
|
||||||
To run the test suite, ensure you are running a local instance of MongoDB on
|
To run the test suite, ensure you are running a local instance of MongoDB on
|
||||||
the standard port, and run: ``python setup.py test``.
|
the standard port and have ``pytest`` installed. Then, run ``python setup.py test``
|
||||||
|
or simply ``pytest``.
|
||||||
|
|
||||||
|
To run the test suite on every supported Python and PyMongo version, you can
|
||||||
|
use ``tox``. You'll need to make sure you have each supported Python version
|
||||||
|
installed in your environment and then:
|
||||||
|
|
||||||
|
.. code-block:: shell
|
||||||
|
|
||||||
|
# Install tox
|
||||||
|
$ python -m pip install tox
|
||||||
|
# Run the test suites
|
||||||
|
$ tox
|
||||||
|
|
||||||
|
If you wish to run a subset of tests, use the pytest convention:
|
||||||
|
|
||||||
|
.. code-block:: shell
|
||||||
|
|
||||||
|
# Run all the tests in a particular test file
|
||||||
|
$ pytest tests/fields/test_fields.py
|
||||||
|
# Run only particular test class in that file
|
||||||
|
$ pytest tests/fields/test_fields.py::TestField
|
||||||
|
|
||||||
Community
|
Community
|
||||||
=========
|
=========
|
||||||
@@ -88,8 +152,7 @@ Community
|
|||||||
<http://groups.google.com/group/mongoengine-users>`_
|
<http://groups.google.com/group/mongoengine-users>`_
|
||||||
- `MongoEngine Developers mailing list
|
- `MongoEngine Developers mailing list
|
||||||
<http://groups.google.com/group/mongoengine-dev>`_
|
<http://groups.google.com/group/mongoengine-dev>`_
|
||||||
- `#mongoengine IRC channel <http://webchat.freenode.net/?channels=mongoengine>`_
|
|
||||||
|
|
||||||
Contributing
|
Contributing
|
||||||
============
|
============
|
||||||
We welcome contributions! see the`Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_
|
We welcome contributions! See the `Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_
|
||||||
|
|||||||
282
benchmark.py
282
benchmark.py
@@ -1,282 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import timeit
|
|
||||||
|
|
||||||
|
|
||||||
def cprofile_main():
|
|
||||||
from pymongo import Connection
|
|
||||||
connection = Connection()
|
|
||||||
connection.drop_database('timeit_test')
|
|
||||||
connection.disconnect()
|
|
||||||
|
|
||||||
from mongoengine import Document, DictField, connect
|
|
||||||
connect("timeit_test")
|
|
||||||
|
|
||||||
class Noddy(Document):
|
|
||||||
fields = DictField()
|
|
||||||
|
|
||||||
for i in xrange(1):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key" + str(j)] = "value " + str(j)
|
|
||||||
noddy.save()
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""
|
|
||||||
0.4 Performance Figures ...
|
|
||||||
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - Pymongo
|
|
||||||
3.86744189262
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine
|
|
||||||
6.23374891281
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
|
||||||
5.33027005196
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
|
||||||
pass - No Cascade
|
|
||||||
|
|
||||||
0.5.X
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - Pymongo
|
|
||||||
3.89597702026
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine
|
|
||||||
21.7735359669
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
|
||||||
19.8670389652
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
|
||||||
pass - No Cascade
|
|
||||||
|
|
||||||
0.6.X
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - Pymongo
|
|
||||||
3.81559205055
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine
|
|
||||||
10.0446798801
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
|
||||||
9.51354718208
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
|
||||||
9.02567505836
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, force=True
|
|
||||||
8.44933390617
|
|
||||||
|
|
||||||
0.7.X
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - Pymongo
|
|
||||||
3.78801012039
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine
|
|
||||||
9.73050498962
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
|
||||||
8.33456707001
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
|
||||||
8.37778115273
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, force=True
|
|
||||||
8.36906409264
|
|
||||||
0.8.X
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - Pymongo
|
|
||||||
3.69964408875
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - Pymongo write_concern={"w": 0}
|
|
||||||
3.5526599884
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine
|
|
||||||
7.00959801674
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries without continual assign - MongoEngine
|
|
||||||
5.60943293571
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade=True
|
|
||||||
6.715102911
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True
|
|
||||||
5.50644683838
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False
|
|
||||||
4.69851183891
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False
|
|
||||||
4.68946313858
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
"""
|
|
||||||
|
|
||||||
setup = """
|
|
||||||
from pymongo import MongoClient
|
|
||||||
connection = MongoClient()
|
|
||||||
connection.drop_database('timeit_test')
|
|
||||||
"""
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
from pymongo import MongoClient
|
|
||||||
connection = MongoClient()
|
|
||||||
|
|
||||||
db = connection.timeit_test
|
|
||||||
noddy = db.noddy
|
|
||||||
|
|
||||||
for i in xrange(10000):
|
|
||||||
example = {'fields': {}}
|
|
||||||
for j in range(20):
|
|
||||||
example['fields']["key"+str(j)] = "value "+str(j)
|
|
||||||
|
|
||||||
noddy.save(example)
|
|
||||||
|
|
||||||
myNoddys = noddy.find()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print "-" * 100
|
|
||||||
print """Creating 10000 dictionaries - Pymongo"""
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print t.timeit(1)
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
from pymongo import MongoClient
|
|
||||||
connection = MongoClient()
|
|
||||||
|
|
||||||
db = connection.timeit_test
|
|
||||||
noddy = db.noddy
|
|
||||||
|
|
||||||
for i in xrange(10000):
|
|
||||||
example = {'fields': {}}
|
|
||||||
for j in range(20):
|
|
||||||
example['fields']["key"+str(j)] = "value "+str(j)
|
|
||||||
|
|
||||||
noddy.save(example, write_concern={"w": 0})
|
|
||||||
|
|
||||||
myNoddys = noddy.find()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print "-" * 100
|
|
||||||
print """Creating 10000 dictionaries - Pymongo write_concern={"w": 0}"""
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print t.timeit(1)
|
|
||||||
|
|
||||||
setup = """
|
|
||||||
from pymongo import MongoClient
|
|
||||||
connection = MongoClient()
|
|
||||||
connection.drop_database('timeit_test')
|
|
||||||
connection.disconnect()
|
|
||||||
|
|
||||||
from mongoengine import Document, DictField, connect
|
|
||||||
connect("timeit_test")
|
|
||||||
|
|
||||||
class Noddy(Document):
|
|
||||||
fields = DictField()
|
|
||||||
"""
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in xrange(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.save()
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print "-" * 100
|
|
||||||
print """Creating 10000 dictionaries - MongoEngine"""
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print t.timeit(1)
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in xrange(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
fields = {}
|
|
||||||
for j in range(20):
|
|
||||||
fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.fields = fields
|
|
||||||
noddy.save()
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print "-" * 100
|
|
||||||
print """Creating 10000 dictionaries without continual assign - MongoEngine"""
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print t.timeit(1)
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in xrange(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.save(write_concern={"w": 0}, cascade=True)
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print "-" * 100
|
|
||||||
print """Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True"""
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print t.timeit(1)
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in xrange(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.save(write_concern={"w": 0}, validate=False, cascade=True)
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print "-" * 100
|
|
||||||
print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True"""
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print t.timeit(1)
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in xrange(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.save(validate=False, write_concern={"w": 0})
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print "-" * 100
|
|
||||||
print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False"""
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print t.timeit(1)
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in xrange(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.save(force_insert=True, write_concern={"w": 0}, validate=False)
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print "-" * 100
|
|
||||||
print """Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False"""
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print t.timeit(1)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
142
benchmarks/test_basic_doc_ops.py
Normal file
142
benchmarks/test_basic_doc_ops.py
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
from timeit import repeat
|
||||||
|
|
||||||
|
import mongoengine
|
||||||
|
from mongoengine import (
|
||||||
|
BooleanField,
|
||||||
|
Document,
|
||||||
|
EmailField,
|
||||||
|
EmbeddedDocument,
|
||||||
|
EmbeddedDocumentField,
|
||||||
|
IntField,
|
||||||
|
ListField,
|
||||||
|
StringField,
|
||||||
|
)
|
||||||
|
|
||||||
|
mongoengine.connect(db="mongoengine_benchmark_test")
|
||||||
|
|
||||||
|
|
||||||
|
def timeit(f, n=10000):
|
||||||
|
return min(repeat(f, repeat=3, number=n)) / float(n)
|
||||||
|
|
||||||
|
|
||||||
|
def test_basic():
|
||||||
|
class Book(Document):
|
||||||
|
name = StringField()
|
||||||
|
pages = IntField()
|
||||||
|
tags = ListField(StringField())
|
||||||
|
is_published = BooleanField()
|
||||||
|
author_email = EmailField()
|
||||||
|
|
||||||
|
Book.drop_collection()
|
||||||
|
|
||||||
|
def init_book():
|
||||||
|
return Book(
|
||||||
|
name="Always be closing",
|
||||||
|
pages=100,
|
||||||
|
tags=["self-help", "sales"],
|
||||||
|
is_published=True,
|
||||||
|
author_email="alec@example.com",
|
||||||
|
)
|
||||||
|
|
||||||
|
print("Doc initialization: %.3fus" % (timeit(init_book, 1000) * 10 ** 6))
|
||||||
|
|
||||||
|
b = init_book()
|
||||||
|
print("Doc getattr: %.3fus" % (timeit(lambda: b.name, 10000) * 10 ** 6))
|
||||||
|
|
||||||
|
print(
|
||||||
|
"Doc setattr: %.3fus"
|
||||||
|
% (timeit(lambda: setattr(b, "name", "New name"), 10000) * 10 ** 6)
|
||||||
|
)
|
||||||
|
|
||||||
|
print("Doc to mongo: %.3fus" % (timeit(b.to_mongo, 1000) * 10 ** 6))
|
||||||
|
|
||||||
|
print("Doc validation: %.3fus" % (timeit(b.validate, 1000) * 10 ** 6))
|
||||||
|
|
||||||
|
def save_book():
|
||||||
|
b._mark_as_changed("name")
|
||||||
|
b._mark_as_changed("tags")
|
||||||
|
b.save()
|
||||||
|
|
||||||
|
print("Save to database: %.3fus" % (timeit(save_book, 100) * 10 ** 6))
|
||||||
|
|
||||||
|
son = b.to_mongo()
|
||||||
|
print(
|
||||||
|
"Load from SON: %.3fus" % (timeit(lambda: Book._from_son(son), 1000) * 10 ** 6)
|
||||||
|
)
|
||||||
|
|
||||||
|
print(
|
||||||
|
"Load from database: %.3fus" % (timeit(lambda: Book.objects[0], 100) * 10 ** 6)
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_and_delete_book():
|
||||||
|
b = init_book()
|
||||||
|
b.save()
|
||||||
|
b.delete()
|
||||||
|
|
||||||
|
print(
|
||||||
|
"Init + save to database + delete: %.3fms"
|
||||||
|
% (timeit(create_and_delete_book, 10) * 10 ** 3)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_big_doc():
|
||||||
|
class Contact(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
title = StringField()
|
||||||
|
address = StringField()
|
||||||
|
|
||||||
|
class Company(Document):
|
||||||
|
name = StringField()
|
||||||
|
contacts = ListField(EmbeddedDocumentField(Contact))
|
||||||
|
|
||||||
|
Company.drop_collection()
|
||||||
|
|
||||||
|
def init_company():
|
||||||
|
return Company(
|
||||||
|
name="MongoDB, Inc.",
|
||||||
|
contacts=[
|
||||||
|
Contact(name="Contact %d" % x, title="CEO", address="Address %d" % x)
|
||||||
|
for x in range(1000)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
company = init_company()
|
||||||
|
print("Big doc to mongo: %.3fms" % (timeit(company.to_mongo, 100) * 10 ** 3))
|
||||||
|
|
||||||
|
print("Big doc validation: %.3fms" % (timeit(company.validate, 1000) * 10 ** 3))
|
||||||
|
|
||||||
|
company.save()
|
||||||
|
|
||||||
|
def save_company():
|
||||||
|
company._mark_as_changed("name")
|
||||||
|
company._mark_as_changed("contacts")
|
||||||
|
company.save()
|
||||||
|
|
||||||
|
print("Save to database: %.3fms" % (timeit(save_company, 100) * 10 ** 3))
|
||||||
|
|
||||||
|
son = company.to_mongo()
|
||||||
|
print(
|
||||||
|
"Load from SON: %.3fms"
|
||||||
|
% (timeit(lambda: Company._from_son(son), 100) * 10 ** 3)
|
||||||
|
)
|
||||||
|
|
||||||
|
print(
|
||||||
|
"Load from database: %.3fms"
|
||||||
|
% (timeit(lambda: Company.objects[0], 100) * 10 ** 3)
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_and_delete_company():
|
||||||
|
c = init_company()
|
||||||
|
c.save()
|
||||||
|
c.delete()
|
||||||
|
|
||||||
|
print(
|
||||||
|
"Init + save to database + delete: %.3fms"
|
||||||
|
% (timeit(create_and_delete_company, 10) * 10 ** 3)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
test_basic()
|
||||||
|
print("-" * 100)
|
||||||
|
test_big_doc()
|
||||||
161
benchmarks/test_inserts.py
Normal file
161
benchmarks/test_inserts.py
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
import timeit
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
setup = """
|
||||||
|
from pymongo import MongoClient
|
||||||
|
|
||||||
|
connection = MongoClient()
|
||||||
|
connection.drop_database('mongoengine_benchmark_test')
|
||||||
|
"""
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
from pymongo import MongoClient
|
||||||
|
|
||||||
|
connection = MongoClient()
|
||||||
|
|
||||||
|
db = connection.mongoengine_benchmark_test
|
||||||
|
noddy = db.noddy
|
||||||
|
|
||||||
|
for i in range(10000):
|
||||||
|
example = {'fields': {}}
|
||||||
|
for j in range(20):
|
||||||
|
example['fields']["key"+str(j)] = "value "+str(j)
|
||||||
|
|
||||||
|
noddy.insert_one(example)
|
||||||
|
|
||||||
|
myNoddys = noddy.find()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print("-" * 100)
|
||||||
|
print("PyMongo: Creating 10000 dictionaries.")
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print("{}s".format(t.timeit(1)))
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
from pymongo import MongoClient, WriteConcern
|
||||||
|
connection = MongoClient()
|
||||||
|
|
||||||
|
db = connection.mongoengine_benchmark_test
|
||||||
|
noddy = db.noddy.with_options(write_concern=WriteConcern(w=0))
|
||||||
|
|
||||||
|
for i in range(10000):
|
||||||
|
example = {'fields': {}}
|
||||||
|
for j in range(20):
|
||||||
|
example['fields']["key"+str(j)] = "value "+str(j)
|
||||||
|
|
||||||
|
noddy.insert_one(example)
|
||||||
|
|
||||||
|
myNoddys = noddy.find()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print("-" * 100)
|
||||||
|
print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print("{}s".format(t.timeit(1)))
|
||||||
|
|
||||||
|
setup = """
|
||||||
|
from pymongo import MongoClient
|
||||||
|
|
||||||
|
connection = MongoClient()
|
||||||
|
connection.drop_database('mongoengine_benchmark_test')
|
||||||
|
connection.close()
|
||||||
|
|
||||||
|
from mongoengine import Document, DictField, connect
|
||||||
|
connect("mongoengine_benchmark_test")
|
||||||
|
|
||||||
|
class Noddy(Document):
|
||||||
|
fields = DictField()
|
||||||
|
"""
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in range(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save()
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print("-" * 100)
|
||||||
|
print("MongoEngine: Creating 10000 dictionaries.")
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print("{}s".format(t.timeit(1)))
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in range(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
fields = {}
|
||||||
|
for j in range(20):
|
||||||
|
fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.fields = fields
|
||||||
|
noddy.save()
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print("-" * 100)
|
||||||
|
print("MongoEngine: Creating 10000 dictionaries (using a single field assignment).")
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print("{}s".format(t.timeit(1)))
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in range(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save(write_concern={"w": 0})
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print("-" * 100)
|
||||||
|
print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print("{}s".format(t.timeit(1)))
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in range(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save(write_concern={"w": 0}, validate=False)
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print("-" * 100)
|
||||||
|
print(
|
||||||
|
'MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).'
|
||||||
|
)
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print("{}s".format(t.timeit(1)))
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in range(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save(force_insert=True, write_concern={"w": 0}, validate=False)
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print("-" * 100)
|
||||||
|
print(
|
||||||
|
'MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).'
|
||||||
|
)
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print("{}s".format(t.timeit(1)))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -33,7 +33,7 @@ clean:
|
|||||||
html:
|
html:
|
||||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
@echo "Build finished. Check $(BUILDDIR)/html/index.html"
|
||||||
|
|
||||||
dirhtml:
|
dirhtml:
|
||||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||||
|
|||||||
233
docs/_themes/nature/static/nature.css_t
vendored
233
docs/_themes/nature/static/nature.css_t
vendored
@@ -1,233 +0,0 @@
|
|||||||
/**
|
|
||||||
* Sphinx stylesheet -- default theme
|
|
||||||
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
*/
|
|
||||||
|
|
||||||
@import url("basic.css");
|
|
||||||
|
|
||||||
#changelog p.first {margin-bottom: 0 !important;}
|
|
||||||
#changelog p {margin-top: 0 !important;
|
|
||||||
margin-bottom: 0 !important;}
|
|
||||||
|
|
||||||
/* -- page layout ----------------------------------------------------------- */
|
|
||||||
|
|
||||||
body {
|
|
||||||
font-family: Arial, sans-serif;
|
|
||||||
font-size: 100%;
|
|
||||||
background-color: #111;
|
|
||||||
color: #555;
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.documentwrapper {
|
|
||||||
float: left;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.bodywrapper {
|
|
||||||
margin: 0 0 0 230px;
|
|
||||||
}
|
|
||||||
|
|
||||||
hr{
|
|
||||||
border: 1px solid #B1B4B6;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.document {
|
|
||||||
background-color: #eee;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body {
|
|
||||||
background-color: #ffffff;
|
|
||||||
color: #3E4349;
|
|
||||||
padding: 0 30px 30px 30px;
|
|
||||||
font-size: 0.8em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.footer {
|
|
||||||
color: #555;
|
|
||||||
width: 100%;
|
|
||||||
padding: 13px 0;
|
|
||||||
text-align: center;
|
|
||||||
font-size: 75%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.footer a {
|
|
||||||
color: #444;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related {
|
|
||||||
background-color: #6BA81E;
|
|
||||||
line-height: 32px;
|
|
||||||
color: #fff;
|
|
||||||
text-shadow: 0px 1px 0 #444;
|
|
||||||
font-size: 0.80em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.related a {
|
|
||||||
color: #E2F3CC;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar {
|
|
||||||
font-size: 0.75em;
|
|
||||||
line-height: 1.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebarwrapper{
|
|
||||||
padding: 20px 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar h3,
|
|
||||||
div.sphinxsidebar h4 {
|
|
||||||
font-family: Arial, sans-serif;
|
|
||||||
color: #222;
|
|
||||||
font-size: 1.2em;
|
|
||||||
font-weight: normal;
|
|
||||||
margin: 0;
|
|
||||||
padding: 5px 10px;
|
|
||||||
background-color: #ddd;
|
|
||||||
text-shadow: 1px 1px 0 white
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar h4{
|
|
||||||
font-size: 1.1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar h3 a {
|
|
||||||
color: #444;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
div.sphinxsidebar p {
|
|
||||||
color: #888;
|
|
||||||
padding: 5px 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar p.topless {
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar ul {
|
|
||||||
margin: 10px 20px;
|
|
||||||
padding: 0;
|
|
||||||
color: #000;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar a {
|
|
||||||
color: #444;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar input {
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
font-family: sans-serif;
|
|
||||||
font-size: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.sphinxsidebar input[type=text]{
|
|
||||||
margin-left: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* -- body styles ----------------------------------------------------------- */
|
|
||||||
|
|
||||||
a {
|
|
||||||
color: #005B81;
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
a:hover {
|
|
||||||
color: #E32E00;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body h1,
|
|
||||||
div.body h2,
|
|
||||||
div.body h3,
|
|
||||||
div.body h4,
|
|
||||||
div.body h5,
|
|
||||||
div.body h6 {
|
|
||||||
font-family: Arial, sans-serif;
|
|
||||||
background-color: #BED4EB;
|
|
||||||
font-weight: normal;
|
|
||||||
color: #212224;
|
|
||||||
margin: 30px 0px 10px 0px;
|
|
||||||
padding: 5px 0 5px 10px;
|
|
||||||
text-shadow: 0px 1px 0 white
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body h1 { border-top: 20px solid white; margin-top: 0; font-size: 200%; }
|
|
||||||
div.body h2 { font-size: 150%; background-color: #C8D5E3; }
|
|
||||||
div.body h3 { font-size: 120%; background-color: #D8DEE3; }
|
|
||||||
div.body h4 { font-size: 110%; background-color: #D8DEE3; }
|
|
||||||
div.body h5 { font-size: 100%; background-color: #D8DEE3; }
|
|
||||||
div.body h6 { font-size: 100%; background-color: #D8DEE3; }
|
|
||||||
|
|
||||||
a.headerlink {
|
|
||||||
color: #c60f0f;
|
|
||||||
font-size: 0.8em;
|
|
||||||
padding: 0 4px 0 4px;
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
a.headerlink:hover {
|
|
||||||
background-color: #c60f0f;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.body p, div.body dd, div.body li {
|
|
||||||
line-height: 1.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.admonition p.admonition-title + p {
|
|
||||||
display: inline;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.highlight{
|
|
||||||
background-color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.note {
|
|
||||||
background-color: #eee;
|
|
||||||
border: 1px solid #ccc;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.seealso {
|
|
||||||
background-color: #ffc;
|
|
||||||
border: 1px solid #ff6;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.topic {
|
|
||||||
background-color: #eee;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.warning {
|
|
||||||
background-color: #ffe4e4;
|
|
||||||
border: 1px solid #f66;
|
|
||||||
}
|
|
||||||
|
|
||||||
p.admonition-title {
|
|
||||||
display: inline;
|
|
||||||
}
|
|
||||||
|
|
||||||
p.admonition-title:after {
|
|
||||||
content: ":";
|
|
||||||
}
|
|
||||||
|
|
||||||
pre {
|
|
||||||
padding: 10px;
|
|
||||||
background-color: White;
|
|
||||||
color: #222;
|
|
||||||
line-height: 1.2em;
|
|
||||||
border: 1px solid #C6C9CB;
|
|
||||||
font-size: 1.2em;
|
|
||||||
margin: 1.5em 0 1.5em 0;
|
|
||||||
-webkit-box-shadow: 1px 1px 1px #d8d8d8;
|
|
||||||
-moz-box-shadow: 1px 1px 1px #d8d8d8;
|
|
||||||
}
|
|
||||||
|
|
||||||
tt {
|
|
||||||
background-color: #ecf0f3;
|
|
||||||
color: #222;
|
|
||||||
padding: 1px 2px;
|
|
||||||
font-size: 1.2em;
|
|
||||||
font-family: monospace;
|
|
||||||
}
|
|
||||||
54
docs/_themes/nature/static/pygments.css
vendored
54
docs/_themes/nature/static/pygments.css
vendored
@@ -1,54 +0,0 @@
|
|||||||
.c { color: #999988; font-style: italic } /* Comment */
|
|
||||||
.k { font-weight: bold } /* Keyword */
|
|
||||||
.o { font-weight: bold } /* Operator */
|
|
||||||
.cm { color: #999988; font-style: italic } /* Comment.Multiline */
|
|
||||||
.cp { color: #999999; font-weight: bold } /* Comment.preproc */
|
|
||||||
.c1 { color: #999988; font-style: italic } /* Comment.Single */
|
|
||||||
.gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */
|
|
||||||
.ge { font-style: italic } /* Generic.Emph */
|
|
||||||
.gr { color: #aa0000 } /* Generic.Error */
|
|
||||||
.gh { color: #999999 } /* Generic.Heading */
|
|
||||||
.gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */
|
|
||||||
.go { color: #111 } /* Generic.Output */
|
|
||||||
.gp { color: #555555 } /* Generic.Prompt */
|
|
||||||
.gs { font-weight: bold } /* Generic.Strong */
|
|
||||||
.gu { color: #aaaaaa } /* Generic.Subheading */
|
|
||||||
.gt { color: #aa0000 } /* Generic.Traceback */
|
|
||||||
.kc { font-weight: bold } /* Keyword.Constant */
|
|
||||||
.kd { font-weight: bold } /* Keyword.Declaration */
|
|
||||||
.kp { font-weight: bold } /* Keyword.Pseudo */
|
|
||||||
.kr { font-weight: bold } /* Keyword.Reserved */
|
|
||||||
.kt { color: #445588; font-weight: bold } /* Keyword.Type */
|
|
||||||
.m { color: #009999 } /* Literal.Number */
|
|
||||||
.s { color: #bb8844 } /* Literal.String */
|
|
||||||
.na { color: #008080 } /* Name.Attribute */
|
|
||||||
.nb { color: #999999 } /* Name.Builtin */
|
|
||||||
.nc { color: #445588; font-weight: bold } /* Name.Class */
|
|
||||||
.no { color: #ff99ff } /* Name.Constant */
|
|
||||||
.ni { color: #800080 } /* Name.Entity */
|
|
||||||
.ne { color: #990000; font-weight: bold } /* Name.Exception */
|
|
||||||
.nf { color: #990000; font-weight: bold } /* Name.Function */
|
|
||||||
.nn { color: #555555 } /* Name.Namespace */
|
|
||||||
.nt { color: #000080 } /* Name.Tag */
|
|
||||||
.nv { color: purple } /* Name.Variable */
|
|
||||||
.ow { font-weight: bold } /* Operator.Word */
|
|
||||||
.mf { color: #009999 } /* Literal.Number.Float */
|
|
||||||
.mh { color: #009999 } /* Literal.Number.Hex */
|
|
||||||
.mi { color: #009999 } /* Literal.Number.Integer */
|
|
||||||
.mo { color: #009999 } /* Literal.Number.Oct */
|
|
||||||
.sb { color: #bb8844 } /* Literal.String.Backtick */
|
|
||||||
.sc { color: #bb8844 } /* Literal.String.Char */
|
|
||||||
.sd { color: #bb8844 } /* Literal.String.Doc */
|
|
||||||
.s2 { color: #bb8844 } /* Literal.String.Double */
|
|
||||||
.se { color: #bb8844 } /* Literal.String.Escape */
|
|
||||||
.sh { color: #bb8844 } /* Literal.String.Heredoc */
|
|
||||||
.si { color: #bb8844 } /* Literal.String.Interpol */
|
|
||||||
.sx { color: #bb8844 } /* Literal.String.Other */
|
|
||||||
.sr { color: #808000 } /* Literal.String.Regex */
|
|
||||||
.s1 { color: #bb8844 } /* Literal.String.Single */
|
|
||||||
.ss { color: #bb8844 } /* Literal.String.Symbol */
|
|
||||||
.bp { color: #999999 } /* Name.Builtin.Pseudo */
|
|
||||||
.vc { color: #ff99ff } /* Name.Variable.Class */
|
|
||||||
.vg { color: #ff99ff } /* Name.Variable.Global */
|
|
||||||
.vi { color: #ff99ff } /* Name.Variable.Instance */
|
|
||||||
.il { color: #009999 } /* Literal.Number.Integer.Long */
|
|
||||||
4
docs/_themes/nature/theme.conf
vendored
4
docs/_themes/nature/theme.conf
vendored
@@ -1,4 +0,0 @@
|
|||||||
[theme]
|
|
||||||
inherit = basic
|
|
||||||
stylesheet = nature.css
|
|
||||||
pygments_style = tango
|
|
||||||
@@ -13,6 +13,7 @@ Documents
|
|||||||
|
|
||||||
.. autoclass:: mongoengine.Document
|
.. autoclass:: mongoengine.Document
|
||||||
:members:
|
:members:
|
||||||
|
:inherited-members:
|
||||||
|
|
||||||
.. attribute:: objects
|
.. attribute:: objects
|
||||||
|
|
||||||
@@ -21,23 +22,30 @@ Documents
|
|||||||
|
|
||||||
.. autoclass:: mongoengine.EmbeddedDocument
|
.. autoclass:: mongoengine.EmbeddedDocument
|
||||||
:members:
|
:members:
|
||||||
|
:inherited-members:
|
||||||
|
|
||||||
.. autoclass:: mongoengine.DynamicDocument
|
.. autoclass:: mongoengine.DynamicDocument
|
||||||
:members:
|
:members:
|
||||||
|
:inherited-members:
|
||||||
|
|
||||||
.. autoclass:: mongoengine.DynamicEmbeddedDocument
|
.. autoclass:: mongoengine.DynamicEmbeddedDocument
|
||||||
:members:
|
:members:
|
||||||
|
:inherited-members:
|
||||||
|
|
||||||
.. autoclass:: mongoengine.document.MapReduceDocument
|
.. autoclass:: mongoengine.document.MapReduceDocument
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
.. autoclass:: mongoengine.ValidationError
|
.. autoclass:: mongoengine.ValidationError
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.FieldDoesNotExist
|
||||||
|
|
||||||
|
|
||||||
Context Managers
|
Context Managers
|
||||||
================
|
================
|
||||||
|
|
||||||
.. autoclass:: mongoengine.context_managers.switch_db
|
.. autoclass:: mongoengine.context_managers.switch_db
|
||||||
|
.. autoclass:: mongoengine.context_managers.switch_collection
|
||||||
.. autoclass:: mongoengine.context_managers.no_dereference
|
.. autoclass:: mongoengine.context_managers.no_dereference
|
||||||
.. autoclass:: mongoengine.context_managers.query_counter
|
.. autoclass:: mongoengine.context_managers.query_counter
|
||||||
|
|
||||||
@@ -78,11 +86,15 @@ Fields
|
|||||||
.. autoclass:: mongoengine.fields.GenericEmbeddedDocumentField
|
.. autoclass:: mongoengine.fields.GenericEmbeddedDocumentField
|
||||||
.. autoclass:: mongoengine.fields.DynamicField
|
.. autoclass:: mongoengine.fields.DynamicField
|
||||||
.. autoclass:: mongoengine.fields.ListField
|
.. autoclass:: mongoengine.fields.ListField
|
||||||
|
.. autoclass:: mongoengine.fields.EmbeddedDocumentListField
|
||||||
.. autoclass:: mongoengine.fields.SortedListField
|
.. autoclass:: mongoengine.fields.SortedListField
|
||||||
.. autoclass:: mongoengine.fields.DictField
|
.. autoclass:: mongoengine.fields.DictField
|
||||||
.. autoclass:: mongoengine.fields.MapField
|
.. autoclass:: mongoengine.fields.MapField
|
||||||
.. autoclass:: mongoengine.fields.ReferenceField
|
.. autoclass:: mongoengine.fields.ReferenceField
|
||||||
|
.. autoclass:: mongoengine.fields.LazyReferenceField
|
||||||
.. autoclass:: mongoengine.fields.GenericReferenceField
|
.. autoclass:: mongoengine.fields.GenericReferenceField
|
||||||
|
.. autoclass:: mongoengine.fields.GenericLazyReferenceField
|
||||||
|
.. autoclass:: mongoengine.fields.CachedReferenceField
|
||||||
.. autoclass:: mongoengine.fields.BinaryField
|
.. autoclass:: mongoengine.fields.BinaryField
|
||||||
.. autoclass:: mongoengine.fields.FileField
|
.. autoclass:: mongoengine.fields.FileField
|
||||||
.. autoclass:: mongoengine.fields.ImageField
|
.. autoclass:: mongoengine.fields.ImageField
|
||||||
@@ -93,11 +105,29 @@ Fields
|
|||||||
.. autoclass:: mongoengine.fields.PointField
|
.. autoclass:: mongoengine.fields.PointField
|
||||||
.. autoclass:: mongoengine.fields.LineStringField
|
.. autoclass:: mongoengine.fields.LineStringField
|
||||||
.. autoclass:: mongoengine.fields.PolygonField
|
.. autoclass:: mongoengine.fields.PolygonField
|
||||||
|
.. autoclass:: mongoengine.fields.MultiPointField
|
||||||
|
.. autoclass:: mongoengine.fields.MultiLineStringField
|
||||||
|
.. autoclass:: mongoengine.fields.MultiPolygonField
|
||||||
.. autoclass:: mongoengine.fields.GridFSError
|
.. autoclass:: mongoengine.fields.GridFSError
|
||||||
.. autoclass:: mongoengine.fields.GridFSProxy
|
.. autoclass:: mongoengine.fields.GridFSProxy
|
||||||
.. autoclass:: mongoengine.fields.ImageGridFsProxy
|
.. autoclass:: mongoengine.fields.ImageGridFsProxy
|
||||||
.. autoclass:: mongoengine.fields.ImproperlyConfigured
|
.. autoclass:: mongoengine.fields.ImproperlyConfigured
|
||||||
|
|
||||||
|
Embedded Document Querying
|
||||||
|
==========================
|
||||||
|
|
||||||
|
.. versionadded:: 0.9
|
||||||
|
|
||||||
|
Additional queries for Embedded Documents are available when using the
|
||||||
|
:class:`~mongoengine.EmbeddedDocumentListField` to store a list of embedded
|
||||||
|
documents.
|
||||||
|
|
||||||
|
A list of embedded documents is returned as a special list with the
|
||||||
|
following methods:
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.base.datastructures.EmbeddedDocumentList
|
||||||
|
:members:
|
||||||
|
|
||||||
Misc
|
Misc
|
||||||
====
|
====
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,436 @@
|
|||||||
|
|
||||||
=========
|
=========
|
||||||
Changelog
|
Changelog
|
||||||
=========
|
=========
|
||||||
|
|
||||||
|
Development
|
||||||
|
===========
|
||||||
|
- (Fill this out as you fix issues and develop your features).
|
||||||
|
|
||||||
|
Changes in 0.21.0
|
||||||
|
=================
|
||||||
|
- Bug fix in DynamicDocument which is not parsing known fields in constructor like Document do #2412
|
||||||
|
- When using pymongo >= 3.7, make use of Collection.count_documents instead of Collection.count
|
||||||
|
and Cursor.count that got deprecated in pymongo >= 3.7.
|
||||||
|
This should have a negative impact on performance of count see Issue #2219
|
||||||
|
- Fix a bug that made the queryset drop the read_preference after clone().
|
||||||
|
- Remove Py3.5 from CI as it reached EOL and add Python 3.9
|
||||||
|
- Fix some issues related with db_field/field conflict in constructor #2414
|
||||||
|
- BREAKING CHANGE: Fix the behavior of Doc.objects.limit(0) which should return all documents (similar to mongodb) #2311
|
||||||
|
- Bug fix in ListField when updating the first item, it was saving the whole list, instead of
|
||||||
|
just replacing the first item (as usually done when updating 1 item of the list) #2392
|
||||||
|
- Add EnumField: ``mongoengine.fields.EnumField``
|
||||||
|
- Refactoring - Remove useless code related to Document.__only_fields and Queryset.only_fields
|
||||||
|
- Fix query transformation regarding special operators #2365
|
||||||
|
- Bug Fix: Document.save() fails when shard_key is not _id #2154
|
||||||
|
|
||||||
|
Changes in 0.20.0
|
||||||
|
=================
|
||||||
|
- ATTENTION: Drop support for Python2
|
||||||
|
- Add Mongo 4.0 to Travis
|
||||||
|
- Fix error when setting a string as a ComplexDateTimeField #2253
|
||||||
|
- Bump development Status classifier to Production/Stable #2232
|
||||||
|
- Improve Queryset.get to avoid confusing MultipleObjectsReturned message in case multiple match are found #630
|
||||||
|
- Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264
|
||||||
|
- Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267
|
||||||
|
- DictField validate failed without default connection (bug introduced in 0.19.0) #2239
|
||||||
|
- Remove methods that were deprecated years ago:
|
||||||
|
- name parameter in Field constructor e.g `StringField(name="...")`, was replaced by db_field
|
||||||
|
- Queryset.slave_okay() was deprecated since pymongo3
|
||||||
|
- dropDups was dropped with MongoDB3
|
||||||
|
- ``Queryset._ensure_indexes`` and ``Queryset.ensure_indexes``, the right method to use is ``Document.ensure_indexes``
|
||||||
|
- Added pre-commit for development/CI #2212
|
||||||
|
- Renamed requirements-lint.txt to requirements-dev.txt #2212
|
||||||
|
- Support for setting ReadConcern #2255
|
||||||
|
|
||||||
|
Changes in 0.19.1
|
||||||
|
=================
|
||||||
|
- Tests require Pillow < 7.0.0 as it dropped Python2 support
|
||||||
|
- DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of
|
||||||
|
pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079
|
||||||
|
|
||||||
|
Changes in 0.19.0
|
||||||
|
=================
|
||||||
|
- BREAKING CHANGE: ``class_check`` and ``read_preference`` keyword arguments are no longer available when filtering a ``QuerySet``. #2112
|
||||||
|
- Instead of ``Doc.objects(foo=bar, read_preference=...)`` use ``Doc.objects(foo=bar).read_preference(...)``.
|
||||||
|
- Instead of ``Doc.objects(foo=bar, class_check=False)`` use ``Doc.objects(foo=bar).clear_cls_query(...)``.
|
||||||
|
- This change also renames the private ``QuerySet._initial_query`` attribute to ``_cls_query``.
|
||||||
|
- BREAKING CHANGE: Removed the deprecated ``format`` param from ``QuerySet.explain``. #2113
|
||||||
|
- BREAKING CHANGE: Renamed ``MongoEngineConnectionError`` to ``ConnectionFailure``. #2111
|
||||||
|
- If you catch/use ``MongoEngineConnectionError`` in your code, you'll have to rename it.
|
||||||
|
- BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103
|
||||||
|
- From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required.
|
||||||
|
- BREAKING CHANGE: A ``LazyReferenceField`` is now stored in the ``_data`` field of its parent as a ``DBRef``, ``Document``, or ``EmbeddedDocument`` (``ObjectId`` is no longer allowed). #2182
|
||||||
|
- DEPRECATION: ``Q.empty`` & ``QNode.empty`` are marked as deprecated and will be removed in a next version of MongoEngine. #2210
|
||||||
|
- Added ability to check if Q or QNode are empty by parsing them to bool.
|
||||||
|
- Instead of ``Q(name="John").empty`` use ``not Q(name="John")``.
|
||||||
|
- Fix updating/modifying/deleting/reloading a document that's sharded by a field with ``db_field`` specified. #2125
|
||||||
|
- Only set no_cursor_timeout when requested (fixes an incompatibility with MongoDB 4.2) #2148
|
||||||
|
- ``ListField`` now accepts an optional ``max_length`` parameter. #2110
|
||||||
|
- Improve error message related to InvalidDocumentError #2180
|
||||||
|
- Added BulkWriteError to replace NotUniqueError which was misleading in bulk write insert #2152
|
||||||
|
- Added ability to compare Q and Q operations #2204
|
||||||
|
- Added ability to use a db alias on query_counter #2194
|
||||||
|
- Added ability to specify collations for querysets with ``Doc.objects.collation`` #2024
|
||||||
|
- Fix updates of a list field by negative index #2094
|
||||||
|
- Switch from nosetest to pytest as test runner #2114
|
||||||
|
- The codebase is now formatted using ``black``. #2109
|
||||||
|
- Documentation improvements:
|
||||||
|
- Documented how `pymongo.monitoring` can be used to log all queries issued by MongoEngine to the driver.
|
||||||
|
|
||||||
|
Changes in 0.18.2
|
||||||
|
=================
|
||||||
|
- Replace deprecated PyMongo v2.x methods with their v3.x equivalents in the ``SequenceField``. #2097
|
||||||
|
- Various code clarity and documentation improvements.
|
||||||
|
|
||||||
|
Changes in 0.18.1
|
||||||
|
=================
|
||||||
|
- Fix a bug introduced in 0.18.0 which was causing ``Document.save`` to update all the fields instead of updating only the modified fields. This bug only occurred when using a custom PK. #2082
|
||||||
|
- Add Python 3.7 to Travis CI. #2058
|
||||||
|
|
||||||
|
Changes in 0.18.0
|
||||||
|
=================
|
||||||
|
- Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2.
|
||||||
|
- MongoEngine now requires PyMongo >= v3.4. Travis CI now tests against MongoDB v3.4 – v3.6 and PyMongo v3.4 – v3.6. #2017 #2066
|
||||||
|
- Improve performance by avoiding a call to ``to_mongo`` in ``Document.save``. #2049
|
||||||
|
- Connection/disconnection improvements:
|
||||||
|
- Expose ``mongoengine.connection.disconnect`` and ``mongoengine.connection.disconnect_all``.
|
||||||
|
- Fix disconnecting. #566 #1599 #605 #607 #1213 #565
|
||||||
|
- Improve documentation of ``connect``/``disconnect``.
|
||||||
|
- Fix issue when using multiple connections to the same mongo with different credentials. #2047
|
||||||
|
- ``connect`` fails immediately when db name contains invalid characters. #2031 #1718
|
||||||
|
- Fix the default write concern of ``Document.save`` that was overwriting the connection write concern. #568
|
||||||
|
- Fix querying on ``List(EmbeddedDocument)`` subclasses fields. #1961 #1492
|
||||||
|
- Fix querying on ``(Generic)EmbeddedDocument`` subclasses fields. #475
|
||||||
|
- Fix ``QuerySet.aggregate`` so that it takes limit and skip value into account. #2029
|
||||||
|
- Generate unique indices for ``SortedListField`` and ``EmbeddedDocumentListFields``. #2020
|
||||||
|
- BREAKING CHANGE: Changed the behavior of a custom field validator (i.e ``validation`` parameter of a ``Field``). It is now expected to raise a ``ValidationError`` instead of returning ``True``/``False``. #2050
|
||||||
|
- BREAKING CHANGES (associated with connection/disconnection fixes):
|
||||||
|
- Calling ``connect`` 2 times with the same alias and different parameter will raise an error (should call ``disconnect`` first).
|
||||||
|
- ``disconnect`` now clears ``mongoengine.connection._connection_settings``.
|
||||||
|
- ``disconnect`` now clears the cached attribute ``Document._collection``.
|
||||||
|
- BREAKING CHANGE: ``EmbeddedDocument.save`` & ``.reload`` no longer exist. #1552
|
||||||
|
|
||||||
|
Changes in 0.17.0
|
||||||
|
=================
|
||||||
|
- POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (``_cls``, ``_id``) when using ``QuerySet.as_pymongo``. #1976
|
||||||
|
- Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time. #1995
|
||||||
|
- DEPRECATION: ``EmbeddedDocument.save`` & ``.reload`` are marked as deprecated and will be removed in a next version of MongoEngine. #1552
|
||||||
|
- Fix ``QuerySet.only`` working improperly after using ``QuerySet.count`` of the same instance of a ``QuerySet``.
|
||||||
|
- Fix ``batch_size`` that was not copied when cloning a ``QuerySet`` object. #2011
|
||||||
|
- Fix ``InvalidStringData`` error when using ``modify`` on a ``BinaryField``. #1127
|
||||||
|
- Fix test suite and CI to support MongoDB v3.4. #1445
|
||||||
|
- Fix reference fields querying the database on each access if value contains orphan DBRefs.
|
||||||
|
|
||||||
|
Changes in 0.16.3
|
||||||
|
=================
|
||||||
|
- Fix ``$push`` with the ``$position`` operator not working with lists in embedded documents. #1965
|
||||||
|
|
||||||
|
Changes in 0.16.2
|
||||||
|
=================
|
||||||
|
- Fix ``Document.save`` that fails when called with ``write_concern=None`` (regression of 0.16.1). #1958
|
||||||
|
|
||||||
|
Changes in 0.16.1
|
||||||
|
=================
|
||||||
|
- Fix ``_cls`` that is not set properly in the ``Document`` constructor (regression). #1950
|
||||||
|
- Fix a bug in the ``_delta`` method - update of a ``ListField`` depends on an unrelated dynamic field update. #1733
|
||||||
|
- Remove PyMongo's deprecated ``Collection.save`` method and use ``Collection.insert_one`` instead. #1899
|
||||||
|
|
||||||
|
Changes in 0.16.0
|
||||||
|
=================
|
||||||
|
- POTENTIAL BREAKING CHANGES:
|
||||||
|
- ``EmbeddedDocumentField`` will no longer accept references to Document classes in its constructor. #1661
|
||||||
|
- Get rid of the ``basecls`` parameter from the ``DictField`` constructor (dead code). #1876
|
||||||
|
- Default value of the ``ComplexDateTime`` field is now ``None`` (and no longer the current datetime). #1368
|
||||||
|
- Fix an unhashable ``TypeError`` when referencing a ``Document`` with a compound key in an ``EmbeddedDocument``. #1685
|
||||||
|
- Fix a bug where an ``EmbeddedDocument`` with the same id as its parent would not be tracked for changes. #1768
|
||||||
|
- Fix the fact that a bulk ``QuerySet.insert`` was not setting primary keys of inserted document instances. #1919
|
||||||
|
- Fix a bug when referencing an abstract class in a ``ReferenceField``. #1920
|
||||||
|
- Allow modifications to the document made in ``pre_save_post_validation`` to be taken into account. #1202
|
||||||
|
- Replace MongoDB v2.4 tests in Travis CI with MongoDB v3.2. #1903
|
||||||
|
- Fix side effects of using ``QuerySet.no_dereference`` on other documents. #1677
|
||||||
|
- Fix ``TypeError`` when using lazy Django translation objects as translated choices. #1879
|
||||||
|
- Improve Python 2-3 codebase compatibility. #1889
|
||||||
|
- Fix support for changing the default value of the ``ComplexDateTime`` field. #1368
|
||||||
|
- Improve error message in case an ``EmbeddedDocumentListField`` receives an ``EmbeddedDocument`` instance instead of a list. #1877
|
||||||
|
- Fix the ``inc`` and ``dec`` operators for the ``DecimalField``. #1517 #1320
|
||||||
|
- Ignore ``killcursors`` queries in ``query_counter`` context manager. #1869
|
||||||
|
- Fix the fact that ``query_counter`` was modifying the initial profiling level in case it was != 0. #1870
|
||||||
|
- Repair the ``no_sub_classes`` context manager + fix the fact that it was swallowing exceptions. #1865
|
||||||
|
- Fix index creation error that was swallowed by ``hasattr`` under Python 2. #1688
|
||||||
|
- ``QuerySet.limit`` function behaviour: Passing 0 as parameter will return all the documents in the cursor. #1611
|
||||||
|
- Bulk insert updates the IDs of the input documents instances. #1919
|
||||||
|
- Fix a harmless bug related to ``GenericReferenceField`` where modifications in the generic-referenced document were tracked in the parent. #1934
|
||||||
|
- Improve validation of the ``BinaryField``. #273
|
||||||
|
- Implement lazy regex compiling in Field classes to improve ``import mongoengine`` performance. #1806
|
||||||
|
- Update ``GridFSProxy.__str__`` so that it would always print both the filename and grid_id. #710
|
||||||
|
- Add ``__repr__`` to ``Q`` and ``QCombination`` classes. #1843
|
||||||
|
- Fix bug in the ``BaseList.__iter__`` operator (was occuring when modifying a BaseList while iterating over it). #1676
|
||||||
|
- Add a ``DateField``. #513
|
||||||
|
- Various improvements to the documentation.
|
||||||
|
- Various code quality improvements.
|
||||||
|
|
||||||
|
Changes in 0.15.3
|
||||||
|
=================
|
||||||
|
- ``Queryset.update/update_one`` methods now return an ``UpdateResult`` when ``full_result=True`` is provided and no longer a dict. #1491
|
||||||
|
- Improve ``LazyReferenceField`` and ``GenericLazyReferenceField`` with nested fields. #1704
|
||||||
|
- Fix the subfield resolve error in ``generic_emdedded_document`` query. #1651 #1652
|
||||||
|
- Use each modifier only with ``$position``. #1673 #1675
|
||||||
|
- Fix validation errors in the ``GenericEmbeddedDocumentField``. #1067
|
||||||
|
- Update cached fields when a ``fields`` argument is given. #1712
|
||||||
|
- Add a ``db`` parameter to ``register_connection`` for compatibility with ``connect``.
|
||||||
|
- Use PyMongo v3.x's ``insert_one`` and ``insert_many`` in ``Document.insert``. #1491
|
||||||
|
- Use PyMongo v3.x's ``update_one`` and ``update_many`` in ``Document.update`` and ``QuerySet.update``. #1491
|
||||||
|
- Fix how ``reload(fields)`` affects changed fields. #1371
|
||||||
|
- Fix a bug where the read-only access to the database fails when trying to create indexes. #1338
|
||||||
|
|
||||||
|
Changes in 0.15.0
|
||||||
|
=================
|
||||||
|
- Add ``LazyReferenceField`` and ``GenericLazyReferenceField``. #1230
|
||||||
|
|
||||||
|
Changes in 0.14.1
|
||||||
|
=================
|
||||||
|
- Remove ``SemiStrictDict`` and start using a regular dict for ``BaseDocument._data``. #1630
|
||||||
|
- Add support for the ``$position`` param in the ``$push`` operator. #1566
|
||||||
|
- Fix ``DateTimeField`` interpreting an empty string as today. #1533
|
||||||
|
- Add a missing ``__ne__`` method to the ``GridFSProxy`` class. #1632
|
||||||
|
- Fix ``BaseQuerySet._fields_to_db_fields``. #1553
|
||||||
|
|
||||||
|
Changes in 0.14.0
|
||||||
|
=================
|
||||||
|
- BREAKING CHANGE: Remove the ``coerce_types`` param from ``QuerySet.as_pymongo``. #1549
|
||||||
|
- POTENTIAL BREAKING CHANGE: Make ``EmbeddedDocument`` not hashable by default. #1528
|
||||||
|
- Improve code quality. #1531, #1540, #1541, #1547
|
||||||
|
|
||||||
|
Changes in 0.13.0
|
||||||
|
=================
|
||||||
|
- POTENTIAL BREAKING CHANGE: Added Unicode support to the ``EmailField``, see docs/upgrade.rst for details.
|
||||||
|
|
||||||
|
Changes in 0.12.0
|
||||||
|
=================
|
||||||
|
- POTENTIAL BREAKING CHANGE: Fix ``limit``/``skip``/``hint``/``batch_size`` chaining. #1476
|
||||||
|
- POTENTIAL BREAKING CHANGE: Change a public ``QuerySet.clone_into`` method to a private ``QuerySet._clone_into``. #1476
|
||||||
|
- Fix the way ``Document.objects.create`` works with duplicate IDs. #1485
|
||||||
|
- Fix connecting to a replica set with PyMongo 2.x. #1436
|
||||||
|
- Fix using sets in field choices. #1481
|
||||||
|
- Fix deleting items from a ``ListField``. #1318
|
||||||
|
- Fix an obscure error message when filtering by ``field__in=non_iterable``. #1237
|
||||||
|
- Fix behavior of a ``dec`` update operator. #1450
|
||||||
|
- Add a ``rename`` update operator. #1454
|
||||||
|
- Add validation for the ``db_field`` parameter. #1448
|
||||||
|
- Fix the error message displayed when querying an ``EmbeddedDocumentField`` by an invalid value. #1440
|
||||||
|
- Fix the error message displayed when validating Unicode URLs. #1486
|
||||||
|
- Raise an error when trying to save an abstract document. #1449
|
||||||
|
|
||||||
|
Changes in 0.11.0
|
||||||
|
=================
|
||||||
|
- BREAKING CHANGE: Rename ``ConnectionError`` to ``MongoEngineConnectionError`` since the former is a built-in exception name in Python v3.x. #1428
|
||||||
|
- BREAKING CHANGE: Drop Python v2.6 support. #1428
|
||||||
|
- BREAKING CHANGE: ``from mongoengine.base import ErrorClass`` won't work anymore for any error from ``mongoengine.errors`` (e.g. ``ValidationError``). Use ``from mongoengine.errors import ErrorClass instead``. #1428
|
||||||
|
- BREAKING CHANGE: Accessing a broken reference will raise a ``DoesNotExist`` error. In the past it used to return ``None``. #1334
|
||||||
|
- Fix absent rounding for the ``DecimalField`` when ``force_string`` is set. #1103
|
||||||
|
|
||||||
|
Changes in 0.10.8
|
||||||
|
=================
|
||||||
|
- Add support for ``QuerySet.batch_size``. (#1426)
|
||||||
|
- Fix a query set iteration within an iteration. #1427
|
||||||
|
- Fix an issue where specifying a MongoDB URI host would override more information than it should. #1421
|
||||||
|
- Add an ability to filter the ``GenericReferenceField`` by an ``ObjectId`` and a ``DBRef``. #1425
|
||||||
|
- Fix cascading deletes for models with a custom primary key field. #1247
|
||||||
|
- Add ability to specify an authentication mechanism (e.g. X.509). #1333
|
||||||
|
- Add support for falsy primary keys (e.g. ``doc.pk = 0``). #1354
|
||||||
|
- Fix ``QuerySet.sum/average`` for fields w/ an explicit ``db_field``. #1417
|
||||||
|
- Fix filtering by ``embedded_doc=None``. #1422
|
||||||
|
- Add support for ``Cursor.comment``. #1420
|
||||||
|
- Fix ``doc.get_<field>_display`` methods. #1419
|
||||||
|
- Fix the ``__repr__`` method of the ``StrictDict`` #1424
|
||||||
|
- Add a deprecation warning for Python v2.6.
|
||||||
|
|
||||||
|
Changes in 0.10.7
|
||||||
|
=================
|
||||||
|
- Drop Python 3.2 support #1390
|
||||||
|
- Fix a bug where a dynamic doc has an index inside a dict field. #1278
|
||||||
|
- Fix: ``ListField`` minus index assignment does not work. #1128
|
||||||
|
- Fix cascade delete mixing among collections. #1224
|
||||||
|
- Add ``signal_kwargs`` argument to ``Document.save``, ``Document.delete`` and ``BaseQuerySet.insert`` to be passed to signals calls. #1206
|
||||||
|
- Raise ``OperationError`` when trying to do a ``drop_collection`` on document with no collection set.
|
||||||
|
- Fix a bug where a count on ``ListField`` of ``EmbeddedDocumentField`` fails. #1187
|
||||||
|
- Fix ``LongField`` values stored as int32 in Python 3. #1253
|
||||||
|
- ``MapField`` now handles unicode keys correctly. #1267
|
||||||
|
- ``ListField`` now handles negative indicies correctly. #1270
|
||||||
|
- Fix an ``AttributeError`` when initializing an ``EmbeddedDocument`` with positional args. #681
|
||||||
|
- Fix a ``no_cursor_timeout`` error with PyMongo v3.x. #1304
|
||||||
|
- Replace map-reduce based ``QuerySet.sum/average`` with aggregation-based implementations. #1336
|
||||||
|
- Fix support for ``__`` to escape field names that match operators' names in ``update``. #1351
|
||||||
|
- Fix ``BaseDocument._mark_as_changed``. #1369
|
||||||
|
- Add support for pickling ``QuerySet`` instances. #1397
|
||||||
|
- Fix connecting to a list of hosts. #1389
|
||||||
|
- Fix a bug where accessing broken references wouldn't raise a ``DoesNotExist`` error. #1334
|
||||||
|
- Fix not being able to specify ``use_db_field=False`` on ``ListField(EmbeddedDocumentField)`` instances. #1218
|
||||||
|
- Improvements to the dictionary field's docs. #1383
|
||||||
|
|
||||||
|
Changes in 0.10.6
|
||||||
|
=================
|
||||||
|
- Add support for mocking MongoEngine based on mongomock. #1151
|
||||||
|
- Fix not being able to run tests on Windows. #1153
|
||||||
|
- Allow creation of sparse compound indexes. #1114
|
||||||
|
|
||||||
|
Changes in 0.10.5
|
||||||
|
=================
|
||||||
|
- Fix for reloading of strict with special fields. #1156
|
||||||
|
|
||||||
|
Changes in 0.10.4
|
||||||
|
=================
|
||||||
|
- ``SaveConditionError`` is now importable from the top level package. #1165
|
||||||
|
- Add a ``QuerySet.upsert_one`` method. #1157
|
||||||
|
|
||||||
|
Changes in 0.10.3
|
||||||
|
=================
|
||||||
|
- Fix ``read_preference`` (it had chaining issues with PyMongo v2.x and it didn't work at all with PyMongo v3.x). #1042
|
||||||
|
|
||||||
|
Changes in 0.10.2
|
||||||
|
=================
|
||||||
|
- Allow shard key to point to a field in an embedded document. #551
|
||||||
|
- Allow arbirary metadata in fields. #1129
|
||||||
|
- ReferenceFields now support abstract document types. #837
|
||||||
|
|
||||||
|
Changes in 0.10.1
|
||||||
|
=================
|
||||||
|
- Fix infinite recursion with cascade delete rules under specific conditions. #1046
|
||||||
|
- Fix ``CachedReferenceField`` bug when loading cached docs as ``DBRef`` but failing to save them. #1047
|
||||||
|
- Fix ignored chained options. #842
|
||||||
|
- ``Document.save``'s ``save_condition`` error raises a ``SaveConditionError`` exception. #1070
|
||||||
|
- Fix ``Document.reload`` for the ``DynamicDocument``. #1050
|
||||||
|
- ``StrictDict`` & ``SemiStrictDict`` are shadowed at init time. #1105
|
||||||
|
- Fix ``ListField`` negative index assignment not working. #1119
|
||||||
|
- Remove code that marks a field as changed when the field has a default value but does not exist in the database. #1126
|
||||||
|
- Remove test dependencies (nose and rednose) from install dependencies. #1079
|
||||||
|
- Recursively build a query when using the ``elemMatch`` operator. #1130
|
||||||
|
- Fix instance back references for lists of embedded documents. #1131
|
||||||
|
|
||||||
|
Changes in 0.10.0
|
||||||
|
=================
|
||||||
|
- Django support was removed and will be available as a separate extension. #958
|
||||||
|
- Allow to load undeclared field with meta attribute 'strict': False #957
|
||||||
|
- Support for PyMongo 3+ #946
|
||||||
|
- Removed get_or_create() deprecated since 0.8.0. #300
|
||||||
|
- Improve Document._created status when switch collection and db #1020
|
||||||
|
- Queryset update doesn't go through field validation #453
|
||||||
|
- Added support for specifying authentication source as option ``authSource`` in URI. #967
|
||||||
|
- Fixed mark_as_changed to handle higher/lower level fields changed. #927
|
||||||
|
- ListField of embedded docs doesn't set the _instance attribute when iterating over it #914
|
||||||
|
- Support += and *= for ListField #595
|
||||||
|
- Use sets for populating dbrefs to dereference
|
||||||
|
- Fixed unpickled documents replacing the global field's list. #888
|
||||||
|
- Fixed storage of microseconds in ComplexDateTimeField and unused separator option. #910
|
||||||
|
- Don't send a "cls" option to ensureIndex (related to https://jira.mongodb.org/browse/SERVER-769)
|
||||||
|
- Fix for updating sorting in SortedListField. #978
|
||||||
|
- Added __ support to escape field name in fields lookup keywords that match operators names #949
|
||||||
|
- Fix for issue where FileField deletion did not free space in GridFS.
|
||||||
|
- No_dereference() not respected on embedded docs containing reference. #517
|
||||||
|
- Document save raise an exception if save_condition fails #1005
|
||||||
|
- Fixes some internal _id handling issue. #961
|
||||||
|
- Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652
|
||||||
|
- Capped collection multiple of 256. #1011
|
||||||
|
- Added ``BaseQuerySet.aggregate_sum`` and ``BaseQuerySet.aggregate_average`` methods.
|
||||||
|
- Fix for delete with write_concern {'w': 0}. #1008
|
||||||
|
- Allow dynamic lookup for more than two parts. #882
|
||||||
|
- Added support for min_distance on geo queries. #831
|
||||||
|
- Allow to add custom metadata to fields #705
|
||||||
|
|
||||||
|
Changes in 0.9.0
|
||||||
|
================
|
||||||
|
- Update FileField when creating a new file #714
|
||||||
|
- Added ``EmbeddedDocumentListField`` for Lists of Embedded Documents. #826
|
||||||
|
- ComplexDateTimeField should fall back to None when null=True #864
|
||||||
|
- Request Support for $min, $max Field update operators #863
|
||||||
|
- ``BaseDict`` does not follow ``setdefault`` #866
|
||||||
|
- Add support for $type operator # 766
|
||||||
|
- Fix tests for pymongo 2.8+ #877
|
||||||
|
- No module named 'django.utils.importlib' (Django dev) #872
|
||||||
|
- Field Choices Now Accept Subclasses of Documents
|
||||||
|
- Ensure Indexes before Each Save #812
|
||||||
|
- Generate Unique Indices for Lists of EmbeddedDocuments #358
|
||||||
|
- Sparse fields #515
|
||||||
|
- write_concern not in params of Collection#remove #801
|
||||||
|
- Better BaseDocument equality check when not saved #798
|
||||||
|
- OperationError: Shard Keys are immutable. Tried to update id even though the document is not yet saved #771
|
||||||
|
- with_limit_and_skip for count should default like in pymongo #759
|
||||||
|
- Fix storing value of precision attribute in DecimalField #787
|
||||||
|
- Set attribute to None does not work (at least for fields with default values) #734
|
||||||
|
- Querying by a field defined in a subclass raises InvalidQueryError #744
|
||||||
|
- Add Support For MongoDB 2.6.X's maxTimeMS #778
|
||||||
|
- abstract shouldn't be inherited in EmbeddedDocument # 789
|
||||||
|
- Allow specifying the '_cls' as a field for indexes #397
|
||||||
|
- Stop ensure_indexes running on a secondaries unless connection is through mongos #746
|
||||||
|
- Not overriding default values when loading a subset of fields #399
|
||||||
|
- Saving document doesn't create new fields in existing collection #620
|
||||||
|
- Added ``Queryset.aggregate`` wrapper to aggregation framework #703
|
||||||
|
- Added support to show original model fields on to_json calls instead of db_field #697
|
||||||
|
- Added Queryset.search_text to Text indexes searchs #700
|
||||||
|
- Fixed tests for Django 1.7 #696
|
||||||
|
- Follow ReferenceFields in EmbeddedDocuments with select_related #690
|
||||||
|
- Added preliminary support for text indexes #680
|
||||||
|
- Added ``elemMatch`` operator as well - ``match`` is too obscure #653
|
||||||
|
- Added support for progressive JPEG #486 #548
|
||||||
|
- Allow strings to be used in index creation #675
|
||||||
|
- Fixed EmbeddedDoc weakref proxy issue #592
|
||||||
|
- Fixed nested reference field distinct error #583
|
||||||
|
- Fixed change tracking on nested MapFields #539
|
||||||
|
- Dynamic fields in embedded documents now visible to queryset.only() / qs.exclude() #425 #507
|
||||||
|
- Add authentication_source option to register_connection #178 #464 #573 #580 #590
|
||||||
|
- Implemented equality between Documents and DBRefs #597
|
||||||
|
- Fixed ReferenceField inside nested ListFields dereferencing problem #368
|
||||||
|
- Added the ability to reload specific document fields #100
|
||||||
|
- Added db_alias support and fixes for custom map/reduce output #586
|
||||||
|
- post_save signal now has access to delta information about field changes #594 #589
|
||||||
|
- Don't query with $orderby for qs.get() #600
|
||||||
|
- Fix id shard key save issue #636
|
||||||
|
- Fixes issue with recursive embedded document errors #557
|
||||||
|
- Fix clear_changed_fields() clearing unsaved documents bug #602
|
||||||
|
- Removing support for Django 1.4.x, pymongo 2.5.x, pymongo 2.6.x.
|
||||||
|
- Removing support for Python < 2.6.6
|
||||||
|
- Fixed $maxDistance location for geoJSON $near queries with MongoDB 2.6+ #664
|
||||||
|
- QuerySet.modify() and Document.modify() methods to provide find_and_modify() like behaviour #677 #773
|
||||||
|
- Added support for the using() method on a queryset #676
|
||||||
|
- PYPY support #673
|
||||||
|
- Connection pooling #674
|
||||||
|
- Avoid to open all documents from cursors in an if stmt #655
|
||||||
|
- Ability to clear the ordering #657
|
||||||
|
- Raise NotUniqueError in Document.update() on pymongo.errors.DuplicateKeyError #626
|
||||||
|
- Slots - memory improvements #625
|
||||||
|
- Fixed incorrectly split a query key when it ends with "_" #619
|
||||||
|
- Geo docs updates #613
|
||||||
|
- Workaround a dateutil bug #608
|
||||||
|
- Conditional save for atomic-style operations #511
|
||||||
|
- Allow dynamic dictionary-style field access #559
|
||||||
|
- Increase email field length to accommodate new TLDs #726
|
||||||
|
- index_cls is ignored when deciding to set _cls as index prefix #733
|
||||||
|
- Make 'db' argument to connection optional #737
|
||||||
|
- Allow atomic update for the entire ``DictField`` #742
|
||||||
|
- Added MultiPointField, MultiLineField, MultiPolygonField
|
||||||
|
- Fix multiple connections aliases being rewritten #748
|
||||||
|
- Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791
|
||||||
|
- Make ``in_bulk()`` respect ``no_dereference()`` #775
|
||||||
|
- Handle None from model __str__; Fixes #753 #754
|
||||||
|
- _get_changed_fields fix for embedded documents with id field. #925
|
||||||
|
|
||||||
|
Changes in 0.8.7
|
||||||
|
================
|
||||||
|
- Calling reload on deleted / nonexistent documents raises DoesNotExist (#538)
|
||||||
|
- Stop ensure_indexes running on a secondaries (#555)
|
||||||
|
- Fix circular import issue with django auth (#531) (#545)
|
||||||
|
|
||||||
|
Changes in 0.8.6
|
||||||
|
================
|
||||||
|
- Fix django auth import (#531)
|
||||||
|
|
||||||
Changes in 0.8.5
|
Changes in 0.8.5
|
||||||
================
|
================
|
||||||
- Fix multi level nested fields getting marked as changed (#523)
|
- Fix multi level nested fields getting marked as changed (#523)
|
||||||
- Django 1.6 login fix (#522) (#527)
|
- Django 1.6 login fix (#522) (#527)
|
||||||
- Django 1.6 session fix (#509)
|
- Django 1.6 session fix (#509)
|
||||||
- EmbeddedDocument._instance is now set when settng the attribute (#506)
|
- EmbeddedDocument._instance is now set when setting the attribute (#506)
|
||||||
- Fixed EmbeddedDocument with ReferenceField equality issue (#502)
|
- Fixed EmbeddedDocument with ReferenceField equality issue (#502)
|
||||||
- Fixed GenericReferenceField serialization order (#499)
|
- Fixed GenericReferenceField serialization order (#499)
|
||||||
- Fixed count and none bug (#498)
|
- Fixed count and none bug (#498)
|
||||||
@@ -42,18 +465,15 @@ Changes in 0.8.4
|
|||||||
|
|
||||||
Changes in 0.8.3
|
Changes in 0.8.3
|
||||||
================
|
================
|
||||||
- Fixed EmbeddedDocuments with `id` also storing `_id` (#402)
|
- Fixed EmbeddedDocuments with ``id`` also storing ``_id`` (#402)
|
||||||
- Added get_proxy_object helper to filefields (#391)
|
- Added get_proxy_object helper to filefields (#391)
|
||||||
- Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365)
|
- Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365)
|
||||||
- Fixed sum and average mapreduce dot notation support (#375, #376, #393)
|
- Fixed sum and average mapreduce dot notation support (#375, #376, #393)
|
||||||
- Fixed as_pymongo to return the id (#386)
|
- Fixed as_pymongo to return the id (#386)
|
||||||
- Document.select_related() now respects `db_alias` (#377)
|
- Document.select_related() now respects ``db_alias`` (#377)
|
||||||
- Reload uses shard_key if applicable (#384)
|
- Reload uses shard_key if applicable (#384)
|
||||||
- Dynamic fields are ordered based on creation and stored in _fields_ordered (#396)
|
- Dynamic fields are ordered based on creation and stored in _fields_ordered (#396)
|
||||||
|
- Fixed pickling dynamic documents ``_dynamic_fields`` (#387)
|
||||||
**Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3
|
|
||||||
|
|
||||||
- Fixed pickling dynamic documents `_dynamic_fields` (#387)
|
|
||||||
- Fixed ListField setslice and delslice dirty tracking (#390)
|
- Fixed ListField setslice and delslice dirty tracking (#390)
|
||||||
- Added Django 1.5 PY3 support (#392)
|
- Added Django 1.5 PY3 support (#392)
|
||||||
- Added match ($elemMatch) support for EmbeddedDocuments (#379)
|
- Added match ($elemMatch) support for EmbeddedDocuments (#379)
|
||||||
@@ -94,10 +514,10 @@ Changes in 0.8.0
|
|||||||
================
|
================
|
||||||
- Fixed querying ReferenceField custom_id (#317)
|
- Fixed querying ReferenceField custom_id (#317)
|
||||||
- Fixed pickle issues with collections (#316)
|
- Fixed pickle issues with collections (#316)
|
||||||
- Added `get_next_value` preview for SequenceFields (#319)
|
- Added ``get_next_value`` preview for SequenceFields (#319)
|
||||||
- Added no_sub_classes context manager and queryset helper (#312)
|
- Added no_sub_classes context manager and queryset helper (#312)
|
||||||
- Querysets now utilises a local cache
|
- Querysets now utilises a local cache
|
||||||
- Changed __len__ behavour in the queryset (#247, #311)
|
- Changed __len__ behaviour in the queryset (#247, #311)
|
||||||
- Fixed querying string versions of ObjectIds issue with ReferenceField (#307)
|
- Fixed querying string versions of ObjectIds issue with ReferenceField (#307)
|
||||||
- Added $setOnInsert support for upserts (#308)
|
- Added $setOnInsert support for upserts (#308)
|
||||||
- Upserts now possible with just query parameters (#309)
|
- Upserts now possible with just query parameters (#309)
|
||||||
@@ -123,7 +543,7 @@ Changes in 0.8.0
|
|||||||
- Updated connection to use MongoClient (#262, #274)
|
- Updated connection to use MongoClient (#262, #274)
|
||||||
- Fixed db_alias and inherited Documents (#143)
|
- Fixed db_alias and inherited Documents (#143)
|
||||||
- Documentation update for document errors (#124)
|
- Documentation update for document errors (#124)
|
||||||
- Deprecated `get_or_create` (#35)
|
- Deprecated ``get_or_create`` (#35)
|
||||||
- Updated inheritable objects created by upsert now contain _cls (#118)
|
- Updated inheritable objects created by upsert now contain _cls (#118)
|
||||||
- Added support for creating documents with embedded documents in a single operation (#6)
|
- Added support for creating documents with embedded documents in a single operation (#6)
|
||||||
- Added to_json and from_json to Document (#1)
|
- Added to_json and from_json to Document (#1)
|
||||||
@@ -148,7 +568,7 @@ Changes in 0.8.0
|
|||||||
- Uses getlasterror to test created on updated saves (#163)
|
- Uses getlasterror to test created on updated saves (#163)
|
||||||
- Fixed inheritance and unique index creation (#140)
|
- Fixed inheritance and unique index creation (#140)
|
||||||
- Fixed reverse delete rule with inheritance (#197)
|
- Fixed reverse delete rule with inheritance (#197)
|
||||||
- Fixed validation for GenericReferences which havent been dereferenced
|
- Fixed validation for GenericReferences which haven't been dereferenced
|
||||||
- Added switch_db context manager (#106)
|
- Added switch_db context manager (#106)
|
||||||
- Added switch_db method to document instances (#106)
|
- Added switch_db method to document instances (#106)
|
||||||
- Added no_dereference context manager (#82) (#61)
|
- Added no_dereference context manager (#82) (#61)
|
||||||
@@ -230,11 +650,11 @@ Changes in 0.7.2
|
|||||||
- Update index spec generation so its not destructive (#113)
|
- Update index spec generation so its not destructive (#113)
|
||||||
|
|
||||||
Changes in 0.7.1
|
Changes in 0.7.1
|
||||||
=================
|
================
|
||||||
- Fixed index spec inheritance (#111)
|
- Fixed index spec inheritance (#111)
|
||||||
|
|
||||||
Changes in 0.7.0
|
Changes in 0.7.0
|
||||||
=================
|
================
|
||||||
- Updated queryset.delete so you can use with skip / limit (#107)
|
- Updated queryset.delete so you can use with skip / limit (#107)
|
||||||
- Updated index creation allows kwargs to be passed through refs (#104)
|
- Updated index creation allows kwargs to be passed through refs (#104)
|
||||||
- Fixed Q object merge edge case (#109)
|
- Fixed Q object merge edge case (#109)
|
||||||
@@ -244,7 +664,7 @@ Changes in 0.7.0
|
|||||||
- Fixed UnboundLocalError in composite index with pk field (#88)
|
- Fixed UnboundLocalError in composite index with pk field (#88)
|
||||||
- Updated ReferenceField's to optionally store ObjectId strings
|
- Updated ReferenceField's to optionally store ObjectId strings
|
||||||
this will become the default in 0.8 (#89)
|
this will become the default in 0.8 (#89)
|
||||||
- Added FutureWarning - save will default to `cascade=False` in 0.8
|
- Added FutureWarning - save will default to ``cascade=False`` in 0.8
|
||||||
- Added example of indexing embedded document fields (#75)
|
- Added example of indexing embedded document fields (#75)
|
||||||
- Fixed ImageField resizing when forcing size (#80)
|
- Fixed ImageField resizing when forcing size (#80)
|
||||||
- Add flexibility for fields handling bad data (#78)
|
- Add flexibility for fields handling bad data (#78)
|
||||||
@@ -315,7 +735,7 @@ Changes in 0.6.12
|
|||||||
- Fixes error with _delta handling DBRefs
|
- Fixes error with _delta handling DBRefs
|
||||||
|
|
||||||
Changes in 0.6.11
|
Changes in 0.6.11
|
||||||
==================
|
=================
|
||||||
- Fixed inconsistency handling None values field attrs
|
- Fixed inconsistency handling None values field attrs
|
||||||
- Fixed map_field embedded db_field issue
|
- Fixed map_field embedded db_field issue
|
||||||
- Fixed .save() _delta issue with DbRefs
|
- Fixed .save() _delta issue with DbRefs
|
||||||
@@ -340,7 +760,7 @@ Changes in 0.6.8
|
|||||||
================
|
================
|
||||||
- Fixed FileField losing reference when no default set
|
- Fixed FileField losing reference when no default set
|
||||||
- Removed possible race condition from FileField (grid_file)
|
- Removed possible race condition from FileField (grid_file)
|
||||||
- Added assignment to save, can now do: `b = MyDoc(**kwargs).save()`
|
- Added assignment to save, can now do: ``b = MyDoc(**kwargs).save()``
|
||||||
- Added support for pull operations on nested EmbeddedDocuments
|
- Added support for pull operations on nested EmbeddedDocuments
|
||||||
- Added support for choices with GenericReferenceFields
|
- Added support for choices with GenericReferenceFields
|
||||||
- Added support for choices with GenericEmbeddedDocumentFields
|
- Added support for choices with GenericEmbeddedDocumentFields
|
||||||
@@ -355,7 +775,7 @@ Changes in 0.6.7
|
|||||||
- Fixed indexing on '_id' or 'pk' or 'id'
|
- Fixed indexing on '_id' or 'pk' or 'id'
|
||||||
- Invalid data from the DB now raises a InvalidDocumentError
|
- Invalid data from the DB now raises a InvalidDocumentError
|
||||||
- Cleaned up the Validation Error - docs and code
|
- Cleaned up the Validation Error - docs and code
|
||||||
- Added meta `auto_create_index` so you can disable index creation
|
- Added meta ``auto_create_index`` so you can disable index creation
|
||||||
- Added write concern options to inserts
|
- Added write concern options to inserts
|
||||||
- Fixed typo in meta for index options
|
- Fixed typo in meta for index options
|
||||||
- Bug fix Read preference now passed correctly
|
- Bug fix Read preference now passed correctly
|
||||||
@@ -395,8 +815,7 @@ Changes in 0.6.1
|
|||||||
- Fix for replicaSet connections
|
- Fix for replicaSet connections
|
||||||
|
|
||||||
Changes in 0.6
|
Changes in 0.6
|
||||||
================
|
==============
|
||||||
|
|
||||||
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
|
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
|
||||||
- Added support for covered indexes when inheritance is off
|
- Added support for covered indexes when inheritance is off
|
||||||
- No longer always upsert on save for items with a '_id'
|
- No longer always upsert on save for items with a '_id'
|
||||||
@@ -483,8 +902,8 @@ Changes in v0.5
|
|||||||
- Updated default collection naming convention
|
- Updated default collection naming convention
|
||||||
- Added Document Mixin support
|
- Added Document Mixin support
|
||||||
- Fixed queryet __repr__ mid iteration
|
- Fixed queryet __repr__ mid iteration
|
||||||
- Added hint() support, so cantell Mongo the proper index to use for the query
|
- Added hint() support, so can tell Mongo the proper index to use for the query
|
||||||
- Fixed issue with inconsitent setting of _cls breaking inherited referencing
|
- Fixed issue with inconsistent setting of _cls breaking inherited referencing
|
||||||
- Added help_text and verbose_name to fields to help with some form libs
|
- Added help_text and verbose_name to fields to help with some form libs
|
||||||
- Updated item_frequencies to handle embedded document lookups
|
- Updated item_frequencies to handle embedded document lookups
|
||||||
- Added delta tracking now only sets / unsets explicitly changed fields
|
- Added delta tracking now only sets / unsets explicitly changed fields
|
||||||
@@ -621,7 +1040,6 @@ Changes in v0.1.3
|
|||||||
querying takes place
|
querying takes place
|
||||||
- A few minor bugfixes
|
- A few minor bugfixes
|
||||||
|
|
||||||
|
|
||||||
Changes in v0.1.2
|
Changes in v0.1.2
|
||||||
=================
|
=================
|
||||||
- Query values may be processed before before being used in queries
|
- Query values may be processed before before being used in queries
|
||||||
@@ -630,7 +1048,6 @@ Changes in v0.1.2
|
|||||||
- Added ``BooleanField``
|
- Added ``BooleanField``
|
||||||
- Added ``Document.reload()`` method
|
- Added ``Document.reload()`` method
|
||||||
|
|
||||||
|
|
||||||
Changes in v0.1.1
|
Changes in v0.1.1
|
||||||
=================
|
=================
|
||||||
- Documents may now use capped collections
|
- Documents may now use capped collections
|
||||||
|
|||||||
@@ -1,66 +1,77 @@
|
|||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
|
|
||||||
connect('tumblelog')
|
connect("tumblelog")
|
||||||
|
|
||||||
|
|
||||||
class Comment(EmbeddedDocument):
|
class Comment(EmbeddedDocument):
|
||||||
content = StringField()
|
content = StringField()
|
||||||
name = StringField(max_length=120)
|
name = StringField(max_length=120)
|
||||||
|
|
||||||
|
|
||||||
class User(Document):
|
class User(Document):
|
||||||
email = StringField(required=True)
|
email = StringField(required=True)
|
||||||
first_name = StringField(max_length=50)
|
first_name = StringField(max_length=50)
|
||||||
last_name = StringField(max_length=50)
|
last_name = StringField(max_length=50)
|
||||||
|
|
||||||
|
|
||||||
class Post(Document):
|
class Post(Document):
|
||||||
title = StringField(max_length=120, required=True)
|
title = StringField(max_length=120, required=True)
|
||||||
author = ReferenceField(User)
|
author = ReferenceField(User)
|
||||||
tags = ListField(StringField(max_length=30))
|
tags = ListField(StringField(max_length=30))
|
||||||
comments = ListField(EmbeddedDocumentField(Comment))
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
|
|
||||||
|
# bugfix
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
|
||||||
class TextPost(Post):
|
class TextPost(Post):
|
||||||
content = StringField()
|
content = StringField()
|
||||||
|
|
||||||
|
|
||||||
class ImagePost(Post):
|
class ImagePost(Post):
|
||||||
image_path = StringField()
|
image_path = StringField()
|
||||||
|
|
||||||
|
|
||||||
class LinkPost(Post):
|
class LinkPost(Post):
|
||||||
link_url = StringField()
|
link_url = StringField()
|
||||||
|
|
||||||
|
|
||||||
Post.drop_collection()
|
Post.drop_collection()
|
||||||
|
|
||||||
john = User(email='jdoe@example.com', first_name='John', last_name='Doe')
|
john = User(email="jdoe@example.com", first_name="John", last_name="Doe")
|
||||||
john.save()
|
john.save()
|
||||||
|
|
||||||
post1 = TextPost(title='Fun with MongoEngine', author=john)
|
post1 = TextPost(title="Fun with MongoEngine", author=john)
|
||||||
post1.content = 'Took a look at MongoEngine today, looks pretty cool.'
|
post1.content = "Took a look at MongoEngine today, looks pretty cool."
|
||||||
post1.tags = ['mongodb', 'mongoengine']
|
post1.tags = ["mongodb", "mongoengine"]
|
||||||
post1.save()
|
post1.save()
|
||||||
|
|
||||||
post2 = LinkPost(title='MongoEngine Documentation', author=john)
|
post2 = LinkPost(title="MongoEngine Documentation", author=john)
|
||||||
post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs'
|
post2.link_url = "http://tractiondigital.com/labs/mongoengine/docs"
|
||||||
post2.tags = ['mongoengine']
|
post2.tags = ["mongoengine"]
|
||||||
post2.save()
|
post2.save()
|
||||||
|
|
||||||
print 'ALL POSTS'
|
print("ALL POSTS")
|
||||||
print
|
print()
|
||||||
for post in Post.objects:
|
for post in Post.objects:
|
||||||
print post.title
|
print(post.title)
|
||||||
print '=' * post.title.count()
|
# print '=' * post.title.count()
|
||||||
|
print("=" * 20)
|
||||||
|
|
||||||
if isinstance(post, TextPost):
|
if isinstance(post, TextPost):
|
||||||
print post.content
|
print(post.content)
|
||||||
|
|
||||||
if isinstance(post, LinkPost):
|
if isinstance(post, LinkPost):
|
||||||
print 'Link:', post.link_url
|
print("Link:", post.link_url)
|
||||||
|
|
||||||
print
|
print()
|
||||||
print
|
print()
|
||||||
|
|
||||||
print 'POSTS TAGGED \'MONGODB\''
|
print("POSTS TAGGED 'MONGODB'")
|
||||||
print
|
print()
|
||||||
for post in Post.objects(tags='mongodb'):
|
for post in Post.objects(tags="mongodb"):
|
||||||
print post.title
|
print(post.title)
|
||||||
print
|
print()
|
||||||
|
|
||||||
num_posts = Post.objects(tags='mongodb').count()
|
num_posts = Post.objects(tags="mongodb").count()
|
||||||
print 'Found %d posts with tag "mongodb"' % num_posts
|
print('Found %d posts with tag "mongodb"' % num_posts)
|
||||||
|
|||||||
105
docs/conf.py
105
docs/conf.py
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# MongoEngine documentation build configuration file, created by
|
# MongoEngine documentation build configuration file, created by
|
||||||
# sphinx-quickstart on Sun Nov 22 18:14:13 2009.
|
# sphinx-quickstart on Sun Nov 22 18:14:13 2009.
|
||||||
@@ -11,40 +10,44 @@
|
|||||||
# All configuration values have a default; values that are commented out
|
# All configuration values have a default; values that are commented out
|
||||||
# serve to show the default.
|
# serve to show the default.
|
||||||
|
|
||||||
import sys, os
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import sphinx_rtd_theme
|
||||||
|
|
||||||
|
import mongoengine
|
||||||
|
|
||||||
# If extensions (or modules to document with autodoc) are in another directory,
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
# add these directories to sys.path here. If the directory is relative to the
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
sys.path.insert(0, os.path.abspath('..'))
|
sys.path.insert(0, os.path.abspath(".."))
|
||||||
|
|
||||||
# -- General configuration -----------------------------------------------------
|
# -- General configuration -----------------------------------------------------
|
||||||
|
|
||||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||||
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo']
|
extensions = ["sphinx.ext.autodoc", "sphinx.ext.todo"]
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ['_templates']
|
templates_path = ["_templates"]
|
||||||
|
|
||||||
# The suffix of source filenames.
|
# The suffix of source filenames.
|
||||||
source_suffix = '.rst'
|
source_suffix = ".rst"
|
||||||
|
|
||||||
# The encoding of source files.
|
# The encoding of source files.
|
||||||
#source_encoding = 'utf-8'
|
# source_encoding = 'utf-8'
|
||||||
|
|
||||||
# The master toctree document.
|
# The master toctree document.
|
||||||
master_doc = 'index'
|
master_doc = "index"
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u'MongoEngine'
|
project = u"MongoEngine"
|
||||||
copyright = u'2009, MongoEngine Authors'
|
copyright = u"2009, MongoEngine Authors"
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
# built documents.
|
# built documents.
|
||||||
#
|
#
|
||||||
import mongoengine
|
|
||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
version = mongoengine.get_version()
|
version = mongoengine.get_version()
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
@@ -52,80 +55,80 @@ release = mongoengine.get_version()
|
|||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
#language = None
|
# language = None
|
||||||
|
|
||||||
# There are two options for replacing |today|: either, you set today to some
|
# There are two options for replacing |today|: either, you set today to some
|
||||||
# non-false value, then it is used:
|
# non-false value, then it is used:
|
||||||
#today = ''
|
# today = ''
|
||||||
# Else, today_fmt is used as the format for a strftime call.
|
# Else, today_fmt is used as the format for a strftime call.
|
||||||
#today_fmt = '%B %d, %Y'
|
# today_fmt = '%B %d, %Y'
|
||||||
|
|
||||||
# List of documents that shouldn't be included in the build.
|
# List of documents that shouldn't be included in the build.
|
||||||
#unused_docs = []
|
# unused_docs = []
|
||||||
|
|
||||||
# List of directories, relative to source directory, that shouldn't be searched
|
# List of directories, relative to source directory, that shouldn't be searched
|
||||||
# for source files.
|
# for source files.
|
||||||
exclude_trees = ['_build']
|
exclude_trees = ["_build"]
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||||
#default_role = None
|
# default_role = None
|
||||||
|
|
||||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||||
#add_function_parentheses = True
|
# add_function_parentheses = True
|
||||||
|
|
||||||
# If true, the current module name will be prepended to all description
|
# If true, the current module name will be prepended to all description
|
||||||
# unit titles (such as .. function::).
|
# unit titles (such as .. function::).
|
||||||
#add_module_names = True
|
# add_module_names = True
|
||||||
|
|
||||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||||
# output. They are ignored by default.
|
# output. They are ignored by default.
|
||||||
#show_authors = False
|
# show_authors = False
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
pygments_style = 'sphinx'
|
pygments_style = "sphinx"
|
||||||
|
|
||||||
# A list of ignored prefixes for module index sorting.
|
# A list of ignored prefixes for module index sorting.
|
||||||
#modindex_common_prefix = []
|
# modindex_common_prefix = []
|
||||||
|
|
||||||
|
|
||||||
# -- Options for HTML output ---------------------------------------------------
|
# -- Options for HTML output ---------------------------------------------------
|
||||||
|
|
||||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||||
# Sphinx are currently 'default' and 'sphinxdoc'.
|
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||||
html_theme = 'nature'
|
html_theme = "sphinx_rtd_theme"
|
||||||
|
|
||||||
# Theme options are theme-specific and customize the look and feel of a theme
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
# further. For a list of options available for each theme, see the
|
# further. For a list of options available for each theme, see the
|
||||||
# documentation.
|
# documentation.
|
||||||
#html_theme_options = {}
|
html_theme_options = {"canonical_url": "http://docs.mongoengine.org/en/latest/"}
|
||||||
|
|
||||||
# Add any paths that contain custom themes here, relative to this directory.
|
# Add any paths that contain custom themes here, relative to this directory.
|
||||||
html_theme_path = ['_themes']
|
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||||
|
|
||||||
# The name for this set of Sphinx documents. If None, it defaults to
|
# The name for this set of Sphinx documents. If None, it defaults to
|
||||||
# "<project> v<release> documentation".
|
# "<project> v<release> documentation".
|
||||||
#html_title = None
|
# html_title = None
|
||||||
|
|
||||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||||
#html_short_title = None
|
# html_short_title = None
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top
|
# The name of an image file (relative to this directory) to place at the top
|
||||||
# of the sidebar.
|
# of the sidebar.
|
||||||
#html_logo = None
|
# html_logo = None
|
||||||
|
|
||||||
# The name of an image file (within the static path) to use as favicon of the
|
# The name of an image file (within the static path) to use as favicon of the
|
||||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||||
# pixels large.
|
# pixels large.
|
||||||
#html_favicon = None
|
html_favicon = "favicon.ico"
|
||||||
|
|
||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
#html_static_path = ['_static']
|
# html_static_path = ['_static']
|
||||||
|
|
||||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||||
# using the given strftime format.
|
# using the given strftime format.
|
||||||
#html_last_updated_fmt = '%b %d, %Y'
|
# html_last_updated_fmt = '%b %d, %Y'
|
||||||
|
|
||||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||||
# typographically correct entities.
|
# typographically correct entities.
|
||||||
@@ -133,70 +136,68 @@ html_use_smartypants = True
|
|||||||
|
|
||||||
# Custom sidebar templates, maps document names to template names.
|
# Custom sidebar templates, maps document names to template names.
|
||||||
html_sidebars = {
|
html_sidebars = {
|
||||||
'index': ['globaltoc.html', 'searchbox.html'],
|
"index": ["globaltoc.html", "searchbox.html"],
|
||||||
'**': ['localtoc.html', 'relations.html', 'searchbox.html']
|
"**": ["localtoc.html", "relations.html", "searchbox.html"],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# Additional templates that should be rendered to pages, maps page names to
|
# Additional templates that should be rendered to pages, maps page names to
|
||||||
# template names.
|
# template names.
|
||||||
#html_additional_pages = {}
|
# html_additional_pages = {}
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#html_use_modindex = True
|
# html_use_modindex = True
|
||||||
|
|
||||||
# If false, no index is generated.
|
# If false, no index is generated.
|
||||||
#html_use_index = True
|
# html_use_index = True
|
||||||
|
|
||||||
# If true, the index is split into individual pages for each letter.
|
# If true, the index is split into individual pages for each letter.
|
||||||
#html_split_index = False
|
# html_split_index = False
|
||||||
|
|
||||||
# If true, links to the reST sources are added to the pages.
|
# If true, links to the reST sources are added to the pages.
|
||||||
#html_show_sourcelink = True
|
# html_show_sourcelink = True
|
||||||
|
|
||||||
# If true, an OpenSearch description file will be output, and all pages will
|
# If true, an OpenSearch description file will be output, and all pages will
|
||||||
# contain a <link> tag referring to it. The value of this option must be the
|
# contain a <link> tag referring to it. The value of this option must be the
|
||||||
# base URL from which the finished HTML is served.
|
# base URL from which the finished HTML is served.
|
||||||
#html_use_opensearch = ''
|
# html_use_opensearch = ''
|
||||||
|
|
||||||
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
|
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
|
||||||
#html_file_suffix = ''
|
# html_file_suffix = ''
|
||||||
|
|
||||||
# Output file base name for HTML help builder.
|
# Output file base name for HTML help builder.
|
||||||
htmlhelp_basename = 'MongoEnginedoc'
|
htmlhelp_basename = "MongoEnginedoc"
|
||||||
|
|
||||||
|
|
||||||
# -- Options for LaTeX output --------------------------------------------------
|
# -- Options for LaTeX output --------------------------------------------------
|
||||||
|
|
||||||
# The paper size ('letter' or 'a4').
|
# The paper size ('letter' or 'a4').
|
||||||
latex_paper_size = 'a4'
|
latex_paper_size = "a4"
|
||||||
|
|
||||||
# The font size ('10pt', '11pt' or '12pt').
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
#latex_font_size = '10pt'
|
# latex_font_size = '10pt'
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
('index', 'MongoEngine.tex', 'MongoEngine Documentation',
|
("index", "MongoEngine.tex", "MongoEngine Documentation", "Ross Lawley", "manual")
|
||||||
'Ross Lawley', 'manual'),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
# the title page.
|
# the title page.
|
||||||
#latex_logo = None
|
# latex_logo = None
|
||||||
|
|
||||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||||
# not chapters.
|
# not chapters.
|
||||||
#latex_use_parts = False
|
# latex_use_parts = False
|
||||||
|
|
||||||
# Additional stuff for the LaTeX preamble.
|
# Additional stuff for the LaTeX preamble.
|
||||||
#latex_preamble = ''
|
# latex_preamble = ''
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
#latex_appendices = []
|
# latex_appendices = []
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#latex_use_modindex = True
|
# latex_use_modindex = True
|
||||||
|
|
||||||
autoclass_content = 'both'
|
|
||||||
|
|
||||||
|
autoclass_content = "both"
|
||||||
|
|||||||
180
docs/django.rst
180
docs/django.rst
@@ -2,176 +2,18 @@
|
|||||||
Django Support
|
Django Support
|
||||||
==============
|
==============
|
||||||
|
|
||||||
.. note:: Updated to support Django 1.5
|
.. note:: Django support has been split from the main MongoEngine
|
||||||
|
repository. The *legacy* Django extension may be found bundled with the
|
||||||
Connecting
|
0.9 release of MongoEngine.
|
||||||
==========
|
|
||||||
In your **settings.py** file, ignore the standard database settings (unless you
|
|
||||||
also plan to use the ORM in your project), and instead call
|
|
||||||
:func:`~mongoengine.connect` somewhere in the settings module.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
If you are not using another Database backend you may need to add a dummy
|
|
||||||
database backend to ``settings.py`` eg::
|
|
||||||
|
|
||||||
DATABASES = {
|
|
||||||
'default': {
|
|
||||||
'ENGINE': 'django.db.backends.dummy'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Authentication
|
|
||||||
==============
|
|
||||||
MongoEngine includes a Django authentication backend, which uses MongoDB. The
|
|
||||||
:class:`~mongoengine.django.auth.User` model is a MongoEngine
|
|
||||||
:class:`~mongoengine.Document`, but implements most of the methods and
|
|
||||||
attributes that the standard Django :class:`User` model does - so the two are
|
|
||||||
moderately compatible. Using this backend will allow you to store users in
|
|
||||||
MongoDB but still use many of the Django authentication infrastructure (such as
|
|
||||||
the :func:`login_required` decorator and the :func:`authenticate` function). To
|
|
||||||
enable the MongoEngine auth backend, add the following to your **settings.py**
|
|
||||||
file::
|
|
||||||
|
|
||||||
AUTHENTICATION_BACKENDS = (
|
|
||||||
'mongoengine.django.auth.MongoEngineBackend',
|
|
||||||
)
|
|
||||||
|
|
||||||
The :mod:`~mongoengine.django.auth` module also contains a
|
|
||||||
:func:`~mongoengine.django.auth.get_user` helper function, that takes a user's
|
|
||||||
:attr:`id` and returns a :class:`~mongoengine.django.auth.User` object.
|
|
||||||
|
|
||||||
.. versionadded:: 0.1.3
|
|
||||||
|
|
||||||
Custom User model
|
|
||||||
=================
|
|
||||||
Django 1.5 introduced `Custom user Models
|
|
||||||
<https://docs.djangoproject.com/en/dev/topics/auth/customizing/#auth-custom-user>`_
|
|
||||||
which can be used as an alternative to the MongoEngine authentication backend.
|
|
||||||
|
|
||||||
The main advantage of this option is that other components relying on
|
|
||||||
:mod:`django.contrib.auth` and supporting the new swappable user model are more
|
|
||||||
likely to work. For example, you can use the ``createsuperuser`` management
|
|
||||||
command as usual.
|
|
||||||
|
|
||||||
To enable the custom User model in Django, add ``'mongoengine.django.mongo_auth'``
|
|
||||||
in your ``INSTALLED_APPS`` and set ``'mongo_auth.MongoUser'`` as the custom user
|
|
||||||
user model to use. In your **settings.py** file you will have::
|
|
||||||
|
|
||||||
INSTALLED_APPS = (
|
|
||||||
...
|
|
||||||
'django.contrib.auth',
|
|
||||||
'mongoengine.django.mongo_auth',
|
|
||||||
...
|
|
||||||
)
|
|
||||||
|
|
||||||
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
|
|
||||||
|
|
||||||
An additional ``MONGOENGINE_USER_DOCUMENT`` setting enables you to replace the
|
|
||||||
:class:`~mongoengine.django.auth.User` class with another class of your choice::
|
|
||||||
|
|
||||||
MONGOENGINE_USER_DOCUMENT = 'mongoengine.django.auth.User'
|
|
||||||
|
|
||||||
The custom :class:`User` must be a :class:`~mongoengine.Document` class, but
|
|
||||||
otherwise has the same requirements as a standard custom user model,
|
|
||||||
as specified in the `Django Documentation
|
|
||||||
<https://docs.djangoproject.com/en/dev/topics/auth/customizing/>`_.
|
|
||||||
In particular, the custom class must define :attr:`USERNAME_FIELD` and
|
|
||||||
:attr:`REQUIRED_FIELDS` attributes.
|
|
||||||
|
|
||||||
Sessions
|
|
||||||
========
|
|
||||||
Django allows the use of different backend stores for its sessions. MongoEngine
|
|
||||||
provides a MongoDB-based session backend for Django, which allows you to use
|
|
||||||
sessions in your Django application with just MongoDB. To enable the MongoEngine
|
|
||||||
session backend, ensure that your settings module has
|
|
||||||
``'django.contrib.sessions.middleware.SessionMiddleware'`` in the
|
|
||||||
``MIDDLEWARE_CLASSES`` field and ``'django.contrib.sessions'`` in your
|
|
||||||
``INSTALLED_APPS``. From there, all you need to do is add the following line
|
|
||||||
into your settings module::
|
|
||||||
|
|
||||||
SESSION_ENGINE = 'mongoengine.django.sessions'
|
|
||||||
SESSION_SERIALIZER = 'mongoengine.django.sessions.BSONSerializer'
|
|
||||||
|
|
||||||
Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` seconds, but doesn't delete cookie at sessions backend, so ``'mongoengine.django.sessions'`` supports `mongodb TTL
|
|
||||||
<http://docs.mongodb.org/manual/tutorial/expire-data/>`_.
|
|
||||||
|
|
||||||
.. note:: ``SESSION_SERIALIZER`` is only necessary in Django 1.6 as the default
|
|
||||||
serializer is based around JSON and doesn't know how to convert
|
|
||||||
``bson.objectid.ObjectId`` instances to strings.
|
|
||||||
|
|
||||||
.. versionadded:: 0.2.1
|
|
||||||
|
|
||||||
Storage
|
|
||||||
=======
|
|
||||||
With MongoEngine's support for GridFS via the :class:`~mongoengine.fields.FileField`,
|
|
||||||
it is useful to have a Django file storage backend that wraps this. The new
|
|
||||||
storage module is called :class:`~mongoengine.django.storage.GridFSStorage`.
|
|
||||||
Using it is very similar to using the default FileSystemStorage.::
|
|
||||||
|
|
||||||
from mongoengine.django.storage import GridFSStorage
|
|
||||||
fs = GridFSStorage()
|
|
||||||
|
|
||||||
filename = fs.save('hello.txt', 'Hello, World!')
|
|
||||||
|
|
||||||
All of the `Django Storage API methods
|
|
||||||
<http://docs.djangoproject.com/en/dev/ref/files/storage/>`_ have been
|
|
||||||
implemented except :func:`path`. If the filename provided already exists, an
|
|
||||||
underscore and a number (before # the file extension, if one exists) will be
|
|
||||||
appended to the filename until the generated filename doesn't exist. The
|
|
||||||
:func:`save` method will return the new filename.::
|
|
||||||
|
|
||||||
>>> fs.exists('hello.txt')
|
|
||||||
True
|
|
||||||
>>> fs.open('hello.txt').read()
|
|
||||||
'Hello, World!'
|
|
||||||
>>> fs.size('hello.txt')
|
|
||||||
13
|
|
||||||
>>> fs.url('hello.txt')
|
|
||||||
'http://your_media_url/hello.txt'
|
|
||||||
>>> fs.open('hello.txt').name
|
|
||||||
'hello.txt'
|
|
||||||
>>> fs.listdir()
|
|
||||||
([], [u'hello.txt'])
|
|
||||||
|
|
||||||
All files will be saved and retrieved in GridFS via the :class:`FileDocument`
|
|
||||||
document, allowing easy access to the files without the GridFSStorage
|
|
||||||
backend.::
|
|
||||||
|
|
||||||
>>> from mongoengine.django.storage import FileDocument
|
|
||||||
>>> FileDocument.objects()
|
|
||||||
[<FileDocument: FileDocument object>]
|
|
||||||
|
|
||||||
.. versionadded:: 0.4
|
|
||||||
|
|
||||||
Shortcuts
|
|
||||||
=========
|
|
||||||
Inspired by the `Django shortcut get_object_or_404
|
|
||||||
<https://docs.djangoproject.com/en/dev/topics/http/shortcuts/#get-object-or-404>`_,
|
|
||||||
the :func:`~mongoengine.django.shortcuts.get_document_or_404` method returns
|
|
||||||
a document or raises an Http404 exception if the document does not exist::
|
|
||||||
|
|
||||||
from mongoengine.django.shortcuts import get_document_or_404
|
|
||||||
|
|
||||||
admin_user = get_document_or_404(User, username='root')
|
|
||||||
|
|
||||||
The first argument may be a Document or QuerySet object. All other passed arguments
|
|
||||||
and keyword arguments are used in the query::
|
|
||||||
|
|
||||||
foo_email = get_document_or_404(User.objects.only('email'), username='foo', is_active=True).email
|
|
||||||
|
|
||||||
.. note:: Like with :func:`get`, a MultipleObjectsReturned will be raised if more than one
|
|
||||||
object is found.
|
|
||||||
|
|
||||||
|
|
||||||
Also inspired by the `Django shortcut get_list_or_404
|
|
||||||
<https://docs.djangoproject.com/en/dev/topics/http/shortcuts/#get-list-or-404>`_,
|
|
||||||
the :func:`~mongoengine.django.shortcuts.get_list_or_404` method returns a list of
|
|
||||||
documents or raises an Http404 exception if the list is empty::
|
|
||||||
|
|
||||||
from mongoengine.django.shortcuts import get_list_or_404
|
Help Wanted!
|
||||||
|
------------
|
||||||
active_users = get_list_or_404(User, is_active=True)
|
|
||||||
|
|
||||||
The first argument may be a Document or QuerySet object. All other passed
|
|
||||||
arguments and keyword arguments are used to filter the query.
|
|
||||||
|
|
||||||
|
The MongoEngine team is looking for help contributing and maintaining a new
|
||||||
|
Django extension for MongoEngine! If you have Django experience and would like
|
||||||
|
to help contribute to the project, please get in touch on the
|
||||||
|
`mailing list <http://groups.google.com/group/mongoengine-users>`_ or by
|
||||||
|
simply contributing on
|
||||||
|
`GitHub <https://github.com/MongoEngine/django-mongoengine>`_.
|
||||||
|
|||||||
12
docs/faq.rst
Normal file
12
docs/faq.rst
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
==========================
|
||||||
|
Frequently Asked Questions
|
||||||
|
==========================
|
||||||
|
|
||||||
|
Does MongoEngine support asynchronous drivers (Motor, TxMongo)?
|
||||||
|
---------------------------------------------------------------
|
||||||
|
|
||||||
|
No, MongoEngine is exclusively based on PyMongo and isn't designed to support other driver.
|
||||||
|
If this is a requirement for your project, check the alternative: `uMongo`_ and `MotorEngine`_.
|
||||||
|
|
||||||
|
.. _uMongo: https://umongo.readthedocs.io/
|
||||||
|
.. _MotorEngine: https://motorengine.readthedocs.io/
|
||||||
@@ -4,9 +4,11 @@
|
|||||||
Connecting to MongoDB
|
Connecting to MongoDB
|
||||||
=====================
|
=====================
|
||||||
|
|
||||||
To connect to a running instance of :program:`mongod`, use the
|
Connections in MongoEngine are registered globally and are identified with aliases.
|
||||||
:func:`~mongoengine.connect` function. The first argument is the name of the
|
If no `alias` is provided during the connection, it will use "default" as alias.
|
||||||
database to connect to::
|
|
||||||
|
To connect to a running instance of :program:`mongod`, use the :func:`~mongoengine.connect`
|
||||||
|
function. The first argument is the name of the database to connect to::
|
||||||
|
|
||||||
from mongoengine import connect
|
from mongoengine import connect
|
||||||
connect('project1')
|
connect('project1')
|
||||||
@@ -18,26 +20,47 @@ provide the :attr:`host` and :attr:`port` arguments to
|
|||||||
|
|
||||||
connect('project1', host='192.168.1.35', port=12345)
|
connect('project1', host='192.168.1.35', port=12345)
|
||||||
|
|
||||||
If the database requires authentication, :attr:`username` and :attr:`password`
|
If the database requires authentication, :attr:`username`, :attr:`password`
|
||||||
arguments should be provided::
|
and :attr:`authentication_source` arguments should be provided::
|
||||||
|
|
||||||
connect('project1', username='webapp', password='pwd123')
|
connect('project1', username='webapp', password='pwd123', authentication_source='admin')
|
||||||
|
|
||||||
Uri style connections are also supported - just supply the uri as
|
URI style connections are also supported -- just supply the URI as
|
||||||
the :attr:`host` to
|
the :attr:`host` to
|
||||||
:func:`~mongoengine.connect`::
|
:func:`~mongoengine.connect`::
|
||||||
|
|
||||||
connect('project1', host='mongodb://localhost/database_name')
|
connect('project1', host='mongodb://localhost/database_name')
|
||||||
|
|
||||||
Note that database name from uri has priority over name
|
.. note:: URI containing SRV records (e.g mongodb+srv://server.example.com/) can be used as well as the :attr:`host`
|
||||||
in ::func:`~mongoengine.connect`
|
|
||||||
|
|
||||||
ReplicaSets
|
.. note:: Database, username and password from URI string overrides
|
||||||
===========
|
corresponding parameters in :func:`~mongoengine.connect`: ::
|
||||||
|
|
||||||
MongoEngine supports :class:`~pymongo.mongo_replica_set_client.MongoReplicaSetClient`
|
connect(
|
||||||
to use them please use a URI style connection and provide the `replicaSet` name in the
|
db='test',
|
||||||
connection kwargs.
|
username='user',
|
||||||
|
password='12345',
|
||||||
|
host='mongodb://admin:qwerty@localhost/production'
|
||||||
|
)
|
||||||
|
|
||||||
|
will establish connection to ``production`` database using
|
||||||
|
``admin`` username and ``qwerty`` password.
|
||||||
|
|
||||||
|
.. note:: Calling :func:`~mongoengine.connect` without argument will establish
|
||||||
|
a connection to the "test" database by default
|
||||||
|
|
||||||
|
Replica Sets
|
||||||
|
============
|
||||||
|
|
||||||
|
MongoEngine supports connecting to replica sets::
|
||||||
|
|
||||||
|
from mongoengine import connect
|
||||||
|
|
||||||
|
# Regular connect
|
||||||
|
connect('dbname', replicaset='rs-name')
|
||||||
|
|
||||||
|
# MongoDB URI-style connect
|
||||||
|
connect(host='mongodb://localhost/dbname?replicaSet=rs-name')
|
||||||
|
|
||||||
Read preferences are supported through the connection or via individual
|
Read preferences are supported through the connection or via individual
|
||||||
queries by passing the read_preference ::
|
queries by passing the read_preference ::
|
||||||
@@ -48,55 +71,109 @@ queries by passing the read_preference ::
|
|||||||
Multiple Databases
|
Multiple Databases
|
||||||
==================
|
==================
|
||||||
|
|
||||||
Multiple database support was added in MongoEngine 0.6. To use multiple
|
To use multiple databases you can use :func:`~mongoengine.connect` and provide
|
||||||
databases you can use :func:`~mongoengine.connect` and provide an `alias` name
|
an `alias` name for the connection - if no `alias` is provided then "default"
|
||||||
for the connection - if no `alias` is provided then "default" is used.
|
is used.
|
||||||
|
|
||||||
In the background this uses :func:`~mongoengine.register_connection` to
|
In the background this uses :func:`~mongoengine.register_connection` to
|
||||||
store the data and you can register all aliases up front if required.
|
store the data and you can register all aliases up front if required.
|
||||||
|
|
||||||
Individual documents can also support multiple databases by providing a
|
Documents defined in different database
|
||||||
`db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` objects
|
---------------------------------------
|
||||||
to point across databases and collections. Below is an example schema, using
|
Individual documents can be attached to different databases by providing a
|
||||||
3 different databases to store data::
|
`db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef`
|
||||||
|
objects to point across databases and collections. Below is an example schema,
|
||||||
|
using 3 different databases to store data::
|
||||||
|
|
||||||
|
connect(alias='user-db-alias', db='user-db')
|
||||||
|
connect(alias='book-db-alias', db='book-db')
|
||||||
|
connect(alias='users-books-db-alias', db='users-books-db')
|
||||||
|
|
||||||
class User(Document):
|
class User(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
|
||||||
meta = {"db_alias": "user-db"}
|
meta = {'db_alias': 'user-db-alias'}
|
||||||
|
|
||||||
class Book(Document):
|
class Book(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
|
||||||
meta = {"db_alias": "book-db"}
|
meta = {'db_alias': 'book-db-alias'}
|
||||||
|
|
||||||
class AuthorBooks(Document):
|
class AuthorBooks(Document):
|
||||||
author = ReferenceField(User)
|
author = ReferenceField(User)
|
||||||
book = ReferenceField(Book)
|
book = ReferenceField(Book)
|
||||||
|
|
||||||
meta = {"db_alias": "users-books-db"}
|
meta = {'db_alias': 'users-books-db-alias'}
|
||||||
|
|
||||||
|
|
||||||
Switch Database Context Manager
|
Disconnecting an existing connection
|
||||||
===============================
|
------------------------------------
|
||||||
|
The function :func:`~mongoengine.disconnect` can be used to
|
||||||
|
disconnect a particular connection. This can be used to change a
|
||||||
|
connection globally::
|
||||||
|
|
||||||
Sometimes you may want to switch the database to query against for a class
|
from mongoengine import connect, disconnect
|
||||||
for example, archiving older data into a separate database for performance
|
connect('a_db', alias='db1')
|
||||||
reasons.
|
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {'db_alias': 'db1'}
|
||||||
|
|
||||||
|
disconnect(alias='db1')
|
||||||
|
|
||||||
|
connect('another_db', alias='db1')
|
||||||
|
|
||||||
|
.. note:: Calling :func:`~mongoengine.disconnect` without argument
|
||||||
|
will disconnect the "default" connection
|
||||||
|
|
||||||
|
.. note:: Since connections gets registered globally, it is important
|
||||||
|
to use the `disconnect` function from MongoEngine and not the
|
||||||
|
`disconnect()` method of an existing connection (pymongo.MongoClient)
|
||||||
|
|
||||||
|
.. note:: :class:`~mongoengine.Document` are caching the pymongo collection.
|
||||||
|
using `disconnect` ensures that it gets cleaned as well
|
||||||
|
|
||||||
|
Context Managers
|
||||||
|
================
|
||||||
|
Sometimes you may want to switch the database or collection to query against.
|
||||||
|
For example, archiving older data into a separate database for performance
|
||||||
|
reasons or writing functions that dynamically choose collections to write
|
||||||
|
a document to.
|
||||||
|
|
||||||
|
Switch Database
|
||||||
|
---------------
|
||||||
The :class:`~mongoengine.context_managers.switch_db` context manager allows
|
The :class:`~mongoengine.context_managers.switch_db` context manager allows
|
||||||
you to change the database alias for a given class allowing quick and easy
|
you to change the database alias for a given class allowing quick and easy
|
||||||
access to the same User document across databases::
|
access to the same User document across databases::
|
||||||
|
|
||||||
from mongoengine.context_managers import switch_db
|
from mongoengine.context_managers import switch_db
|
||||||
|
|
||||||
class User(Document):
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
meta = {'db_alias': 'user-db'}
|
||||||
|
|
||||||
|
with switch_db(User, 'archive-user-db') as User:
|
||||||
|
User(name='Ross').save() # Saves the 'archive-user-db'
|
||||||
|
|
||||||
|
|
||||||
|
Switch Collection
|
||||||
|
-----------------
|
||||||
|
The :func:`~mongoengine.context_managers.switch_collection` context manager
|
||||||
|
allows you to change the collection for a given class allowing quick and easy
|
||||||
|
access to the same Group document across collection::
|
||||||
|
|
||||||
|
from mongoengine.context_managers import switch_collection
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
|
||||||
meta = {"db_alias": "user-db"}
|
Group(name='test').save() # Saves in the default db
|
||||||
|
|
||||||
|
with switch_collection(Group, 'group2000') as Group:
|
||||||
|
Group(name='hello Group 2000 collection!').save() # Saves in group2000 collection
|
||||||
|
|
||||||
with switch_db(User, 'archive-user-db') as User:
|
|
||||||
User(name="Ross").save() # Saves the 'archive-user-db'
|
|
||||||
|
|
||||||
.. note:: Make sure any aliases have been registered with
|
.. note:: Make sure any aliases have been registered with
|
||||||
:func:`~mongoengine.register_connection` before using the context manager.
|
:func:`~mongoengine.register_connection` or :func:`~mongoengine.connect`
|
||||||
|
before using the context manager.
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ Defining documents
|
|||||||
In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When
|
In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When
|
||||||
working with relational databases, rows are stored in **tables**, which have a
|
working with relational databases, rows are stored in **tables**, which have a
|
||||||
strict **schema** that the rows follow. MongoDB stores documents in
|
strict **schema** that the rows follow. MongoDB stores documents in
|
||||||
**collections** rather than tables - the principle difference is that no schema
|
**collections** rather than tables --- the principal difference is that no schema
|
||||||
is enforced at a database level.
|
is enforced at a database level.
|
||||||
|
|
||||||
Defining a document's schema
|
Defining a document's schema
|
||||||
@@ -22,14 +22,14 @@ objects** as class attributes to the document class::
|
|||||||
|
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
title = StringField(max_length=200, required=True)
|
title = StringField(max_length=200, required=True)
|
||||||
date_modified = DateTimeField(default=datetime.datetime.now)
|
date_modified = DateTimeField(default=datetime.datetime.utcnow)
|
||||||
|
|
||||||
As BSON (the binary format for storing data in mongodb) is order dependent,
|
As BSON (the binary format for storing data in mongodb) is order dependent,
|
||||||
documents are serialized based on their field order.
|
documents are serialized based on their field order.
|
||||||
|
|
||||||
Dynamic document schemas
|
Dynamic document schemas
|
||||||
========================
|
========================
|
||||||
One of the benefits of MongoDb is dynamic schemas for a collection, whilst data
|
One of the benefits of MongoDB is dynamic schemas for a collection, whilst data
|
||||||
should be planned and organised (after all explicit is better than implicit!)
|
should be planned and organised (after all explicit is better than implicit!)
|
||||||
there are scenarios where having dynamic / expando style documents is desirable.
|
there are scenarios where having dynamic / expando style documents is desirable.
|
||||||
|
|
||||||
@@ -75,22 +75,33 @@ are as follows:
|
|||||||
* :class:`~mongoengine.fields.DynamicField`
|
* :class:`~mongoengine.fields.DynamicField`
|
||||||
* :class:`~mongoengine.fields.EmailField`
|
* :class:`~mongoengine.fields.EmailField`
|
||||||
* :class:`~mongoengine.fields.EmbeddedDocumentField`
|
* :class:`~mongoengine.fields.EmbeddedDocumentField`
|
||||||
|
* :class:`~mongoengine.fields.EmbeddedDocumentListField`
|
||||||
|
* :class:`~mongoengine.fields.EnumField`
|
||||||
* :class:`~mongoengine.fields.FileField`
|
* :class:`~mongoengine.fields.FileField`
|
||||||
* :class:`~mongoengine.fields.FloatField`
|
* :class:`~mongoengine.fields.FloatField`
|
||||||
* :class:`~mongoengine.fields.GenericEmbeddedDocumentField`
|
* :class:`~mongoengine.fields.GenericEmbeddedDocumentField`
|
||||||
* :class:`~mongoengine.fields.GenericReferenceField`
|
* :class:`~mongoengine.fields.GenericReferenceField`
|
||||||
|
* :class:`~mongoengine.fields.GenericLazyReferenceField`
|
||||||
* :class:`~mongoengine.fields.GeoPointField`
|
* :class:`~mongoengine.fields.GeoPointField`
|
||||||
* :class:`~mongoengine.fields.ImageField`
|
* :class:`~mongoengine.fields.ImageField`
|
||||||
* :class:`~mongoengine.fields.IntField`
|
* :class:`~mongoengine.fields.IntField`
|
||||||
* :class:`~mongoengine.fields.ListField`
|
* :class:`~mongoengine.fields.ListField`
|
||||||
|
* :class:`~mongoengine.fields.LongField`
|
||||||
* :class:`~mongoengine.fields.MapField`
|
* :class:`~mongoengine.fields.MapField`
|
||||||
* :class:`~mongoengine.fields.ObjectIdField`
|
* :class:`~mongoengine.fields.ObjectIdField`
|
||||||
* :class:`~mongoengine.fields.ReferenceField`
|
* :class:`~mongoengine.fields.ReferenceField`
|
||||||
|
* :class:`~mongoengine.fields.LazyReferenceField`
|
||||||
* :class:`~mongoengine.fields.SequenceField`
|
* :class:`~mongoengine.fields.SequenceField`
|
||||||
* :class:`~mongoengine.fields.SortedListField`
|
* :class:`~mongoengine.fields.SortedListField`
|
||||||
* :class:`~mongoengine.fields.StringField`
|
* :class:`~mongoengine.fields.StringField`
|
||||||
* :class:`~mongoengine.fields.URLField`
|
* :class:`~mongoengine.fields.URLField`
|
||||||
* :class:`~mongoengine.fields.UUIDField`
|
* :class:`~mongoengine.fields.UUIDField`
|
||||||
|
* :class:`~mongoengine.fields.PointField`
|
||||||
|
* :class:`~mongoengine.fields.LineStringField`
|
||||||
|
* :class:`~mongoengine.fields.PolygonField`
|
||||||
|
* :class:`~mongoengine.fields.MultiPointField`
|
||||||
|
* :class:`~mongoengine.fields.MultiLineStringField`
|
||||||
|
* :class:`~mongoengine.fields.MultiPolygonField`
|
||||||
|
|
||||||
Field arguments
|
Field arguments
|
||||||
---------------
|
---------------
|
||||||
@@ -108,7 +119,7 @@ arguments can be set on all fields:
|
|||||||
:attr:`default` (Default: None)
|
:attr:`default` (Default: None)
|
||||||
A value to use when no value is set for this field.
|
A value to use when no value is set for this field.
|
||||||
|
|
||||||
The definion of default parameters follow `the general rules on Python
|
The definition of default parameters follow `the general rules on Python
|
||||||
<http://docs.python.org/reference/compound_stmts.html#function-definitions>`__,
|
<http://docs.python.org/reference/compound_stmts.html#function-definitions>`__,
|
||||||
which means that some care should be taken when dealing with default mutable objects
|
which means that some care should be taken when dealing with default mutable objects
|
||||||
(like in :class:`~mongoengine.fields.ListField` or :class:`~mongoengine.fields.DictField`)::
|
(like in :class:`~mongoengine.fields.ListField` or :class:`~mongoengine.fields.DictField`)::
|
||||||
@@ -140,11 +151,13 @@ arguments can be set on all fields:
|
|||||||
When True, use this field as a primary key for the collection. `DictField`
|
When True, use this field as a primary key for the collection. `DictField`
|
||||||
and `EmbeddedDocuments` both support being the primary key for a document.
|
and `EmbeddedDocuments` both support being the primary key for a document.
|
||||||
|
|
||||||
|
.. note:: If set, this field is also accessible through the `pk` field.
|
||||||
|
|
||||||
:attr:`choices` (Default: None)
|
:attr:`choices` (Default: None)
|
||||||
An iterable (e.g. a list or tuple) of choices to which the value of this
|
An iterable (e.g. list, tuple or set) of choices to which the value of this
|
||||||
field should be limited.
|
field should be limited.
|
||||||
|
|
||||||
Can be either be a nested tuples of value (stored in mongo) and a
|
Can either be nested tuples of value (stored in mongo) and a
|
||||||
human readable key ::
|
human readable key ::
|
||||||
|
|
||||||
SIZE = (('S', 'Small'),
|
SIZE = (('S', 'Small'),
|
||||||
@@ -164,16 +177,31 @@ arguments can be set on all fields:
|
|||||||
class Shirt(Document):
|
class Shirt(Document):
|
||||||
size = StringField(max_length=3, choices=SIZE)
|
size = StringField(max_length=3, choices=SIZE)
|
||||||
|
|
||||||
:attr:`help_text` (Default: None)
|
:attr:`validation` (Optional)
|
||||||
Optional help text to output with the field - used by form libraries
|
A callable to validate the value of the field.
|
||||||
|
The callable takes the value as parameter and should raise a ValidationError
|
||||||
|
if validation fails
|
||||||
|
|
||||||
:attr:`verbose_name` (Default: None)
|
e.g ::
|
||||||
Optional human-readable name for the field - used by form libraries
|
|
||||||
|
def _not_empty(val):
|
||||||
|
if not val:
|
||||||
|
raise ValidationError('value can not be empty')
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField(validation=_not_empty)
|
||||||
|
|
||||||
|
|
||||||
|
:attr:`**kwargs` (Optional)
|
||||||
|
You can supply additional metadata as arbitrary additional keyword
|
||||||
|
arguments. You can not override existing attributes, however. Common
|
||||||
|
choices include `help_text` and `verbose_name`, commonly used by form and
|
||||||
|
widget libraries.
|
||||||
|
|
||||||
|
|
||||||
List fields
|
List fields
|
||||||
-----------
|
-----------
|
||||||
MongoDB allows the storage of lists of items. To add a list of items to a
|
MongoDB allows storing lists of items. To add a list of items to a
|
||||||
:class:`~mongoengine.Document`, use the :class:`~mongoengine.fields.ListField` field
|
:class:`~mongoengine.Document`, use the :class:`~mongoengine.fields.ListField` field
|
||||||
type. :class:`~mongoengine.fields.ListField` takes another field object as its first
|
type. :class:`~mongoengine.fields.ListField` takes another field object as its first
|
||||||
argument, which specifies which type elements may be stored within the list::
|
argument, which specifies which type elements may be stored within the list::
|
||||||
@@ -205,9 +233,9 @@ document class as the first argument::
|
|||||||
|
|
||||||
Dictionary Fields
|
Dictionary Fields
|
||||||
-----------------
|
-----------------
|
||||||
Often, an embedded document may be used instead of a dictionary -- generally
|
Often, an embedded document may be used instead of a dictionary – generally
|
||||||
this is recommended as dictionaries don't support validation or custom field
|
embedded documents are recommended as dictionaries don’t support validation
|
||||||
types. However, sometimes you will not know the structure of what you want to
|
or custom field types. However, sometimes you will not know the structure of what you want to
|
||||||
store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate::
|
store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate::
|
||||||
|
|
||||||
class SurveyResponse(Document):
|
class SurveyResponse(Document):
|
||||||
@@ -215,7 +243,7 @@ store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate
|
|||||||
user = ReferenceField(User)
|
user = ReferenceField(User)
|
||||||
answers = DictField()
|
answers = DictField()
|
||||||
|
|
||||||
survey_response = SurveyResponse(date=datetime.now(), user=request.user)
|
survey_response = SurveyResponse(date=datetime.utcnow(), user=request.user)
|
||||||
response_form = ResponseForm(request.POST)
|
response_form = ResponseForm(request.POST)
|
||||||
survey_response.answers = response_form.cleaned_data()
|
survey_response.answers = response_form.cleaned_data()
|
||||||
survey_response.save()
|
survey_response.save()
|
||||||
@@ -307,12 +335,12 @@ reference with a delete rule specification. A delete rule is specified by
|
|||||||
supplying the :attr:`reverse_delete_rule` attributes on the
|
supplying the :attr:`reverse_delete_rule` attributes on the
|
||||||
:class:`ReferenceField` definition, like this::
|
:class:`ReferenceField` definition, like this::
|
||||||
|
|
||||||
class Employee(Document):
|
class ProfilePage(Document):
|
||||||
...
|
...
|
||||||
profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY)
|
employee = ReferenceField('Employee', reverse_delete_rule=mongoengine.CASCADE)
|
||||||
|
|
||||||
The declaration in this example means that when an :class:`Employee` object is
|
The declaration in this example means that when an :class:`Employee` object is
|
||||||
removed, the :class:`ProfilePage` that belongs to that employee is removed as
|
removed, the :class:`ProfilePage` that references that employee is removed as
|
||||||
well. If a whole batch of employees is removed, all profile pages that are
|
well. If a whole batch of employees is removed, all profile pages that are
|
||||||
linked are removed as well.
|
linked are removed as well.
|
||||||
|
|
||||||
@@ -325,10 +353,10 @@ Its value can take any of the following constants:
|
|||||||
Deletion is denied if there still exist references to the object being
|
Deletion is denied if there still exist references to the object being
|
||||||
deleted.
|
deleted.
|
||||||
:const:`mongoengine.NULLIFY`
|
:const:`mongoengine.NULLIFY`
|
||||||
Any object's fields still referring to the object being deleted are removed
|
Any object's fields still referring to the object being deleted are set to None
|
||||||
(using MongoDB's "unset" operation), effectively nullifying the relationship.
|
(using MongoDB's "unset" operation), effectively nullifying the relationship.
|
||||||
:const:`mongoengine.CASCADE`
|
:const:`mongoengine.CASCADE`
|
||||||
Any object containing fields that are refererring to the object being deleted
|
Any object containing fields that are referring to the object being deleted
|
||||||
are deleted first.
|
are deleted first.
|
||||||
:const:`mongoengine.PULL`
|
:const:`mongoengine.PULL`
|
||||||
Removes the reference to the object (using MongoDB's "pull" operation)
|
Removes the reference to the object (using MongoDB's "pull" operation)
|
||||||
@@ -352,11 +380,6 @@ Its value can take any of the following constants:
|
|||||||
In Django, be sure to put all apps that have such delete rule declarations in
|
In Django, be sure to put all apps that have such delete rule declarations in
|
||||||
their :file:`models.py` in the :const:`INSTALLED_APPS` tuple.
|
their :file:`models.py` in the :const:`INSTALLED_APPS` tuple.
|
||||||
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
Signals are not triggered when doing cascading updates / deletes - if this
|
|
||||||
is required you must manually handle the update / delete.
|
|
||||||
|
|
||||||
Generic reference fields
|
Generic reference fields
|
||||||
''''''''''''''''''''''''
|
''''''''''''''''''''''''
|
||||||
A second kind of reference field also exists,
|
A second kind of reference field also exists,
|
||||||
@@ -395,7 +418,7 @@ MongoEngine allows you to specify that a field should be unique across a
|
|||||||
collection by providing ``unique=True`` to a :class:`~mongoengine.fields.Field`\ 's
|
collection by providing ``unique=True`` to a :class:`~mongoengine.fields.Field`\ 's
|
||||||
constructor. If you try to save a document that has the same value for a unique
|
constructor. If you try to save a document that has the same value for a unique
|
||||||
field as a document that is already in the database, a
|
field as a document that is already in the database, a
|
||||||
:class:`~mongoengine.OperationError` will be raised. You may also specify
|
:class:`~mongoengine.NotUniqueError` will be raised. You may also specify
|
||||||
multi-field uniqueness constraints by using :attr:`unique_with`, which may be
|
multi-field uniqueness constraints by using :attr:`unique_with`, which may be
|
||||||
either a single field name, or a list or tuple of field names::
|
either a single field name, or a list or tuple of field names::
|
||||||
|
|
||||||
@@ -404,25 +427,12 @@ either a single field name, or a list or tuple of field names::
|
|||||||
first_name = StringField()
|
first_name = StringField()
|
||||||
last_name = StringField(unique_with='first_name')
|
last_name = StringField(unique_with='first_name')
|
||||||
|
|
||||||
Skipping Document validation on save
|
|
||||||
------------------------------------
|
|
||||||
You can also skip the whole document validation process by setting
|
|
||||||
``validate=False`` when calling the :meth:`~mongoengine.document.Document.save`
|
|
||||||
method::
|
|
||||||
|
|
||||||
class Recipient(Document):
|
|
||||||
name = StringField()
|
|
||||||
email = EmailField()
|
|
||||||
|
|
||||||
recipient = Recipient(name='admin', email='root@localhost')
|
|
||||||
recipient.save() # will raise a ValidationError while
|
|
||||||
recipient.save(validate=False) # won't
|
|
||||||
|
|
||||||
Document collections
|
Document collections
|
||||||
====================
|
====================
|
||||||
Document classes that inherit **directly** from :class:`~mongoengine.Document`
|
Document classes that inherit **directly** from :class:`~mongoengine.Document`
|
||||||
will have their own **collection** in the database. The name of the collection
|
will have their own **collection** in the database. The name of the collection
|
||||||
is by default the name of the class, coverted to lowercase (so in the example
|
is by default the name of the class, converted to lowercase (so in the example
|
||||||
above, the collection would be called `page`). If you need to change the name
|
above, the collection would be called `page`). If you need to change the name
|
||||||
of the collection (e.g. to use MongoEngine with an existing database), then
|
of the collection (e.g. to use MongoEngine with an existing database), then
|
||||||
create a class dictionary attribute called :attr:`meta` on your document, and
|
create a class dictionary attribute called :attr:`meta` on your document, and
|
||||||
@@ -439,8 +449,10 @@ A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying
|
|||||||
:attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary.
|
:attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary.
|
||||||
:attr:`max_documents` is the maximum number of documents that is allowed to be
|
:attr:`max_documents` is the maximum number of documents that is allowed to be
|
||||||
stored in the collection, and :attr:`max_size` is the maximum size of the
|
stored in the collection, and :attr:`max_size` is the maximum size of the
|
||||||
collection in bytes. If :attr:`max_size` is not specified and
|
collection in bytes. :attr:`max_size` is rounded up to the next multiple of 256
|
||||||
:attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB).
|
by MongoDB internally and mongoengine before. Use also a multiple of 256 to
|
||||||
|
avoid confusions. If :attr:`max_size` is not specified and
|
||||||
|
:attr:`max_documents` is, :attr:`max_size` defaults to 10485760 bytes (10MB).
|
||||||
The following example shows a :class:`Log` document that will be limited to
|
The following example shows a :class:`Log` document that will be limited to
|
||||||
1000 entries and 2MB of disk space::
|
1000 entries and 2MB of disk space::
|
||||||
|
|
||||||
@@ -457,18 +469,36 @@ You can specify indexes on collections to make querying faster. This is done
|
|||||||
by creating a list of index specifications called :attr:`indexes` in the
|
by creating a list of index specifications called :attr:`indexes` in the
|
||||||
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
|
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
|
||||||
either be a single field name, a tuple containing multiple field names, or a
|
either be a single field name, a tuple containing multiple field names, or a
|
||||||
dictionary containing a full index definition. A direction may be specified on
|
dictionary containing a full index definition.
|
||||||
fields by prefixing the field name with a **+** (for ascending) or a **-** sign
|
|
||||||
(for descending). Note that direction only matters on multi-field indexes. ::
|
A direction may be specified on fields by prefixing the field name with a
|
||||||
|
**+** (for ascending) or a **-** sign (for descending). Note that direction
|
||||||
|
only matters on multi-field indexes. Text indexes may be specified by prefixing
|
||||||
|
the field name with a **$**. Hashed indexes may be specified by prefixing
|
||||||
|
the field name with a **#**::
|
||||||
|
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
|
category = IntField()
|
||||||
title = StringField()
|
title = StringField()
|
||||||
rating = StringField()
|
rating = StringField()
|
||||||
|
created = DateTimeField()
|
||||||
meta = {
|
meta = {
|
||||||
'indexes': ['title', ('title', '-rating')]
|
'indexes': [
|
||||||
|
'title',
|
||||||
|
'$title', # text index
|
||||||
|
'#title', # hashed index
|
||||||
|
('title', '-rating'),
|
||||||
|
('category', '_cls'),
|
||||||
|
{
|
||||||
|
'fields': ['created'],
|
||||||
|
'expireAfterSeconds': 3600
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
If a dictionary is passed then the following options are available:
|
If a dictionary is passed then additional options become available. Valid options include,
|
||||||
|
but are not limited to:
|
||||||
|
|
||||||
|
|
||||||
:attr:`fields` (Default: None)
|
:attr:`fields` (Default: None)
|
||||||
The fields to index. Specified in the same format as described above.
|
The fields to index. Specified in the same format as described above.
|
||||||
@@ -489,8 +519,15 @@ If a dictionary is passed then the following options are available:
|
|||||||
Allows you to automatically expire data from a collection by setting the
|
Allows you to automatically expire data from a collection by setting the
|
||||||
time in seconds to expire the a field.
|
time in seconds to expire the a field.
|
||||||
|
|
||||||
|
:attr:`name` (Optional)
|
||||||
|
Allows you to specify a name for the index
|
||||||
|
|
||||||
|
:attr:`collation` (Optional)
|
||||||
|
Allows to create case insensitive indexes (MongoDB v3.4+ only)
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
|
Additional options are forwarded as **kwargs to pymongo's create_index method.
|
||||||
Inheritance adds extra fields indices see: :ref:`document-inheritance`.
|
Inheritance adds extra fields indices see: :ref:`document-inheritance`.
|
||||||
|
|
||||||
Global index default options
|
Global index default options
|
||||||
@@ -502,25 +539,28 @@ There are a few top level defaults for all indexes that can be set::
|
|||||||
title = StringField()
|
title = StringField()
|
||||||
rating = StringField()
|
rating = StringField()
|
||||||
meta = {
|
meta = {
|
||||||
'index_options': {},
|
'index_opts': {},
|
||||||
'index_background': True,
|
'index_background': True,
|
||||||
'index_drop_dups': True,
|
'index_cls': False,
|
||||||
'index_cls': False
|
'auto_create_index': True,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
:attr:`index_options` (Optional)
|
:attr:`index_opts` (Optional)
|
||||||
Set any default index options - see the `full options list <http://docs.mongodb.org/manual/reference/method/db.collection.ensureIndex/#db.collection.ensureIndex>`_
|
Set any default index options - see the `full options list <https://docs.mongodb.com/manual/reference/method/db.collection.createIndex/#db.collection.createIndex>`_
|
||||||
|
|
||||||
:attr:`index_background` (Optional)
|
:attr:`index_background` (Optional)
|
||||||
Set the default value for if an index should be indexed in the background
|
Set the default value for if an index should be indexed in the background
|
||||||
|
|
||||||
:attr:`index_drop_dups` (Optional)
|
|
||||||
Set the default value for if an index should drop duplicates
|
|
||||||
|
|
||||||
:attr:`index_cls` (Optional)
|
:attr:`index_cls` (Optional)
|
||||||
A way to turn off a specific index for _cls.
|
A way to turn off a specific index for _cls.
|
||||||
|
|
||||||
|
:attr:`auto_create_index` (Optional)
|
||||||
|
When this is True (default), MongoEngine will ensure that the correct
|
||||||
|
indexes exist in MongoDB each time a command is run. This can be disabled
|
||||||
|
in systems where indexes are managed separately. Disabling this will improve
|
||||||
|
performance.
|
||||||
|
|
||||||
|
|
||||||
Compound Indexes and Indexing sub documents
|
Compound Indexes and Indexing sub documents
|
||||||
-------------------------------------------
|
-------------------------------------------
|
||||||
@@ -531,6 +571,8 @@ field name to the index definition.
|
|||||||
Sometimes its more efficient to index parts of Embedded / dictionary fields,
|
Sometimes its more efficient to index parts of Embedded / dictionary fields,
|
||||||
in this case use 'dot' notation to identify the value to index eg: `rank.title`
|
in this case use 'dot' notation to identify the value to index eg: `rank.title`
|
||||||
|
|
||||||
|
.. _geospatial-indexes:
|
||||||
|
|
||||||
Geospatial indexes
|
Geospatial indexes
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
@@ -541,6 +583,9 @@ The following fields will explicitly add a "2dsphere" index:
|
|||||||
- :class:`~mongoengine.fields.PointField`
|
- :class:`~mongoengine.fields.PointField`
|
||||||
- :class:`~mongoengine.fields.LineStringField`
|
- :class:`~mongoengine.fields.LineStringField`
|
||||||
- :class:`~mongoengine.fields.PolygonField`
|
- :class:`~mongoengine.fields.PolygonField`
|
||||||
|
- :class:`~mongoengine.fields.MultiPointField`
|
||||||
|
- :class:`~mongoengine.fields.MultiLineStringField`
|
||||||
|
- :class:`~mongoengine.fields.MultiPolygonField`
|
||||||
|
|
||||||
As "2dsphere" indexes can be part of a compound index, you may not want the
|
As "2dsphere" indexes can be part of a compound index, you may not want the
|
||||||
automatic index but would prefer a compound index. In this example we turn off
|
automatic index but would prefer a compound index. In this example we turn off
|
||||||
@@ -588,7 +633,7 @@ collection after a given period. See the official
|
|||||||
documentation for more information. A common usecase might be session data::
|
documentation for more information. A common usecase might be session data::
|
||||||
|
|
||||||
class Session(Document):
|
class Session(Document):
|
||||||
created = DateTimeField(default=datetime.now)
|
created = DateTimeField(default=datetime.utcnow)
|
||||||
meta = {
|
meta = {
|
||||||
'indexes': [
|
'indexes': [
|
||||||
{'fields': ['created'], 'expireAfterSeconds': 3600}
|
{'fields': ['created'], 'expireAfterSeconds': 3600}
|
||||||
@@ -651,12 +696,17 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. ::
|
|||||||
Shard keys
|
Shard keys
|
||||||
==========
|
==========
|
||||||
|
|
||||||
If your collection is sharded, then you need to specify the shard key as a tuple,
|
If your collection is sharded by multiple keys, then you can improve shard
|
||||||
using the :attr:`shard_key` attribute of :attr:`-mongoengine.Document.meta`.
|
routing (and thus the performance of your application) by specifying the shard
|
||||||
This ensures that the shard key is sent with the query when calling the
|
key, using the :attr:`shard_key` attribute of
|
||||||
:meth:`~mongoengine.document.Document.save` or
|
:attr:`~mongoengine.Document.meta`. The shard key should be defined as a tuple.
|
||||||
:meth:`~mongoengine.document.Document.update` method on an existing
|
|
||||||
:class:`-mongoengine.Document` instance::
|
This ensures that the full shard key is sent with the query when calling
|
||||||
|
methods such as :meth:`~mongoengine.document.Document.save`,
|
||||||
|
:meth:`~mongoengine.document.Document.update`,
|
||||||
|
:meth:`~mongoengine.document.Document.modify`, or
|
||||||
|
:meth:`~mongoengine.document.Document.delete` on an existing
|
||||||
|
:class:`~mongoengine.Document` instance::
|
||||||
|
|
||||||
class LogEntry(Document):
|
class LogEntry(Document):
|
||||||
machine = StringField()
|
machine = StringField()
|
||||||
@@ -665,7 +715,8 @@ This ensures that the shard key is sent with the query when calling the
|
|||||||
data = StringField()
|
data = StringField()
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
'shard_key': ('machine', 'timestamp',)
|
'shard_key': ('machine', 'timestamp'),
|
||||||
|
'indexes': ('machine', 'timestamp'),
|
||||||
}
|
}
|
||||||
|
|
||||||
.. _document-inheritance:
|
.. _document-inheritance:
|
||||||
@@ -675,10 +726,10 @@ Document inheritance
|
|||||||
|
|
||||||
To create a specialised type of a :class:`~mongoengine.Document` you have
|
To create a specialised type of a :class:`~mongoengine.Document` you have
|
||||||
defined, you may subclass it and add any extra fields or methods you may need.
|
defined, you may subclass it and add any extra fields or methods you may need.
|
||||||
As this is new class is not a direct subclass of
|
As this new class is not a direct subclass of
|
||||||
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
|
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
|
||||||
will use the same collection as its superclass uses. This allows for more
|
will use the same collection as its superclass uses. This allows for more
|
||||||
convenient and efficient retrieval of related documents - all you need do is
|
convenient and efficient retrieval of related documents -- all you need do is
|
||||||
set :attr:`allow_inheritance` to True in the :attr:`meta` data for a
|
set :attr:`allow_inheritance` to True in the :attr:`meta` data for a
|
||||||
document.::
|
document.::
|
||||||
|
|
||||||
@@ -692,12 +743,36 @@ document.::
|
|||||||
class DatedPage(Page):
|
class DatedPage(Page):
|
||||||
date = DateTimeField()
|
date = DateTimeField()
|
||||||
|
|
||||||
.. note:: From 0.8 onwards you must declare :attr:`allow_inheritance` defaults
|
.. note:: From 0.8 onwards :attr:`allow_inheritance` defaults
|
||||||
to False, meaning you must set it to True to use inheritance.
|
to False, meaning you must set it to True to use inheritance.
|
||||||
|
|
||||||
|
Setting :attr:`allow_inheritance` to True should also be used in
|
||||||
|
:class:`~mongoengine.EmbeddedDocument` class in case you need to subclass it
|
||||||
|
|
||||||
|
When it comes to querying using :attr:`.objects()`, querying `Page.objects()` will query
|
||||||
|
both `Page` and `DatedPage` whereas querying `DatedPage` will only query the `DatedPage` documents.
|
||||||
|
Behind the scenes, MongoEngine deals with inheritance by adding a :attr:`_cls` attribute that contains
|
||||||
|
the class name in every documents. When a document is loaded, MongoEngine checks
|
||||||
|
it's :attr:`_cls` attribute and use that class to construct the instance.::
|
||||||
|
|
||||||
|
Page(title='a funky title').save()
|
||||||
|
DatedPage(title='another title', date=datetime.utcnow()).save()
|
||||||
|
|
||||||
|
print(Page.objects().count()) # 2
|
||||||
|
print(DatedPage.objects().count()) # 1
|
||||||
|
|
||||||
|
# print documents in their native form
|
||||||
|
# we remove 'id' to avoid polluting the output with unnecessary detail
|
||||||
|
qs = Page.objects.exclude('id').as_pymongo()
|
||||||
|
print(list(qs))
|
||||||
|
# [
|
||||||
|
# {'_cls': u 'Page', 'title': 'a funky title'},
|
||||||
|
# {'_cls': u 'Page.DatedPage', 'title': u 'another title', 'date': datetime.datetime(2019, 12, 13, 20, 16, 59, 993000)}
|
||||||
|
# ]
|
||||||
|
|
||||||
Working with existing data
|
Working with existing data
|
||||||
--------------------------
|
--------------------------
|
||||||
As MongoEngine no longer defaults to needing :attr:`_cls` you can quickly and
|
As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and
|
||||||
easily get working with existing data. Just define the document to match
|
easily get working with existing data. Just define the document to match
|
||||||
the expected schema in your database ::
|
the expected schema in your database ::
|
||||||
|
|
||||||
@@ -720,7 +795,7 @@ Abstract classes
|
|||||||
|
|
||||||
If you want to add some extra functionality to a group of Document classes but
|
If you want to add some extra functionality to a group of Document classes but
|
||||||
you don't need or want the overhead of inheritance you can use the
|
you don't need or want the overhead of inheritance you can use the
|
||||||
:attr:`abstract` attribute of :attr:`-mongoengine.Document.meta`.
|
:attr:`abstract` attribute of :attr:`~mongoengine.Document.meta`.
|
||||||
This won't turn on :ref:`document-inheritance` but will allow you to keep your
|
This won't turn on :ref:`document-inheritance` but will allow you to keep your
|
||||||
code DRY::
|
code DRY::
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
Documents instances
|
Documents instances
|
||||||
===================
|
===================
|
||||||
To create a new document object, create an instance of the relevant document
|
To create a new document object, create an instance of the relevant document
|
||||||
class, providing values for its fields as its constructor keyword arguments.
|
class, providing values for its fields as constructor keyword arguments.
|
||||||
You may provide values for any of the fields on the document::
|
You may provide values for any of the fields on the document::
|
||||||
|
|
||||||
>>> page = Page(title="Test Page")
|
>>> page = Page(title="Test Page")
|
||||||
@@ -32,49 +32,21 @@ already exist, then any changes will be updated atomically. For example::
|
|||||||
|
|
||||||
Changes to documents are tracked and on the whole perform ``set`` operations.
|
Changes to documents are tracked and on the whole perform ``set`` operations.
|
||||||
|
|
||||||
* ``list_field.push(0)`` - *sets* the resulting list
|
* ``list_field.push(0)`` --- *sets* the resulting list
|
||||||
* ``del(list_field)`` - *unsets* whole list
|
* ``del(list_field)`` --- *unsets* whole list
|
||||||
|
|
||||||
With lists its preferable to use ``Doc.update(push__list_field=0)`` as
|
With lists its preferable to use ``Doc.update(push__list_field=0)`` as
|
||||||
this stops the whole list being updated - stopping any race conditions.
|
this stops the whole list being updated --- stopping any race conditions.
|
||||||
|
|
||||||
.. seealso::
|
.. seealso::
|
||||||
:ref:`guide-atomic-updates`
|
:ref:`guide-atomic-updates`
|
||||||
|
|
||||||
Pre save data validation and cleaning
|
|
||||||
-------------------------------------
|
|
||||||
MongoEngine allows you to create custom cleaning rules for your documents when
|
|
||||||
calling :meth:`~mongoengine.Document.save`. By providing a custom
|
|
||||||
:meth:`~mongoengine.Document.clean` method you can do any pre validation / data
|
|
||||||
cleaning.
|
|
||||||
|
|
||||||
This might be useful if you want to ensure a default value based on other
|
|
||||||
document values for example::
|
|
||||||
|
|
||||||
class Essay(Document):
|
|
||||||
status = StringField(choices=('Published', 'Draft'), required=True)
|
|
||||||
pub_date = DateTimeField()
|
|
||||||
|
|
||||||
def clean(self):
|
|
||||||
"""Ensures that only published essays have a `pub_date` and
|
|
||||||
automatically sets the pub_date if published and not set"""
|
|
||||||
if self.status == 'Draft' and self.pub_date is not None:
|
|
||||||
msg = 'Draft entries should not have a publication date.'
|
|
||||||
raise ValidationError(msg)
|
|
||||||
# Set the pub_date for published items if not set.
|
|
||||||
if self.status == 'Published' and self.pub_date is None:
|
|
||||||
self.pub_date = datetime.now()
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
Cleaning is only called if validation is turned on and when calling
|
|
||||||
:meth:`~mongoengine.Document.save`.
|
|
||||||
|
|
||||||
Cascading Saves
|
Cascading Saves
|
||||||
---------------
|
---------------
|
||||||
If your document contains :class:`~mongoengine.fields.ReferenceField` or
|
If your document contains :class:`~mongoengine.fields.ReferenceField` or
|
||||||
:class:`~mongoengine.fields.GenericReferenceField` objects, then by default the
|
:class:`~mongoengine.fields.GenericReferenceField` objects, then by default the
|
||||||
:meth:`~mongoengine.Document.save` method will not save any changes to
|
:meth:`~mongoengine.Document.save` method will not save any changes to
|
||||||
those objects. If you want all references to also be saved also, noting each
|
those objects. If you want all references to be saved also, noting each
|
||||||
save is a separate query, then passing :attr:`cascade` as True
|
save is a separate query, then passing :attr:`cascade` as True
|
||||||
to the save method will cascade any saves.
|
to the save method will cascade any saves.
|
||||||
|
|
||||||
@@ -113,12 +85,13 @@ you may still use :attr:`id` to access the primary key if you want::
|
|||||||
>>> bob.id == bob.email == 'bob@example.com'
|
>>> bob.id == bob.email == 'bob@example.com'
|
||||||
True
|
True
|
||||||
|
|
||||||
You can also access the document's "primary key" using the :attr:`pk` field; in
|
You can also access the document's "primary key" using the :attr:`pk` field,
|
||||||
is an alias to :attr:`id`::
|
it's an alias to :attr:`id`::
|
||||||
|
|
||||||
>>> page = Page(title="Another Test Page")
|
>>> page = Page(title="Another Test Page")
|
||||||
>>> page.save()
|
>>> page.save()
|
||||||
>>> page.id == page.pk
|
>>> page.id == page.pk
|
||||||
|
True
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
|
|||||||
@@ -10,8 +10,9 @@ Writing
|
|||||||
GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field
|
GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field
|
||||||
object. This field acts as a file-like object and provides a couple of
|
object. This field acts as a file-like object and provides a couple of
|
||||||
different ways of inserting and retrieving data. Arbitrary metadata such as
|
different ways of inserting and retrieving data. Arbitrary metadata such as
|
||||||
content type can also be stored alongside the files. In the following example,
|
content type can also be stored alongside the files. The object returned when accessing a
|
||||||
a document is created to store details about animals, including a photo::
|
FileField is a proxy to `Pymongo's GridFS <https://api.mongodb.com/python/current/examples/gridfs.html#gridfs-example>`_
|
||||||
|
In the following example, a document is created to store details about animals, including a photo::
|
||||||
|
|
||||||
class Animal(Document):
|
class Animal(Document):
|
||||||
genus = StringField()
|
genus = StringField()
|
||||||
@@ -20,8 +21,8 @@ a document is created to store details about animals, including a photo::
|
|||||||
|
|
||||||
marmot = Animal(genus='Marmota', family='Sciuridae')
|
marmot = Animal(genus='Marmota', family='Sciuridae')
|
||||||
|
|
||||||
marmot_photo = open('marmot.jpg', 'r')
|
with open('marmot.jpg', 'rb') as fd:
|
||||||
marmot.photo.put(marmot_photo, content_type = 'image/jpeg')
|
marmot.photo.put(fd, content_type = 'image/jpeg')
|
||||||
marmot.save()
|
marmot.save()
|
||||||
|
|
||||||
Retrieval
|
Retrieval
|
||||||
@@ -34,6 +35,20 @@ field. The file can also be retrieved just as easily::
|
|||||||
photo = marmot.photo.read()
|
photo = marmot.photo.read()
|
||||||
content_type = marmot.photo.content_type
|
content_type = marmot.photo.content_type
|
||||||
|
|
||||||
|
.. note:: If you need to read() the content of a file multiple times, you'll need to "rewind"
|
||||||
|
the file-like object using `seek`::
|
||||||
|
|
||||||
|
marmot = Animal.objects(genus='Marmota').first()
|
||||||
|
content1 = marmot.photo.read()
|
||||||
|
assert content1 != ""
|
||||||
|
|
||||||
|
content2 = marmot.photo.read() # will be empty
|
||||||
|
assert content2 == ""
|
||||||
|
|
||||||
|
marmot.photo.seek(0) # rewind the file by setting the current position of the cursor in the file to 0
|
||||||
|
content3 = marmot.photo.read()
|
||||||
|
assert content3 == content1
|
||||||
|
|
||||||
Streaming
|
Streaming
|
||||||
---------
|
---------
|
||||||
|
|
||||||
@@ -46,14 +61,15 @@ slightly different manner. First, a new file must be created by calling the
|
|||||||
marmot.photo.write('some_more_image_data')
|
marmot.photo.write('some_more_image_data')
|
||||||
marmot.photo.close()
|
marmot.photo.close()
|
||||||
|
|
||||||
marmot.photo.save()
|
marmot.save()
|
||||||
|
|
||||||
Deletion
|
Deletion
|
||||||
--------
|
--------
|
||||||
|
|
||||||
Deleting stored files is achieved with the :func:`delete` method::
|
Deleting stored files is achieved with the :func:`delete` method::
|
||||||
|
|
||||||
marmot.photo.delete()
|
marmot.photo.delete() # Deletes the GridFS document
|
||||||
|
marmot.save() # Saves the GridFS reference (being None) contained in the marmot instance
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
|
|
||||||
@@ -70,5 +86,6 @@ Replacing files
|
|||||||
Files can be replaced with the :func:`replace` method. This works just like
|
Files can be replaced with the :func:`replace` method. This works just like
|
||||||
the :func:`put` method so even metadata can (and should) be replaced::
|
the :func:`put` method so even metadata can (and should) be replaced::
|
||||||
|
|
||||||
another_marmot = open('another_marmot.png', 'r')
|
another_marmot = open('another_marmot.png', 'rb')
|
||||||
marmot.photo.replace(another_marmot, content_type='image/png')
|
marmot.photo.replace(another_marmot, content_type='image/png') # Replaces the GridFS document
|
||||||
|
marmot.save() # Replaces the GridFS reference contained in marmot instance
|
||||||
|
|||||||
@@ -10,5 +10,10 @@ User Guide
|
|||||||
defining-documents
|
defining-documents
|
||||||
document-instances
|
document-instances
|
||||||
querying
|
querying
|
||||||
|
validation
|
||||||
gridfs
|
gridfs
|
||||||
signals
|
signals
|
||||||
|
text-indexes
|
||||||
|
migration
|
||||||
|
logging-monitoring
|
||||||
|
mongomock
|
||||||
|
|||||||
@@ -2,17 +2,17 @@
|
|||||||
Installing MongoEngine
|
Installing MongoEngine
|
||||||
======================
|
======================
|
||||||
|
|
||||||
To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_
|
To use MongoEngine, you will need to download `MongoDB <http://mongodb.com/>`_
|
||||||
and ensure it is running in an accessible location. You will also need
|
and ensure it is running in an accessible location. You will also need
|
||||||
`PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you
|
`PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you
|
||||||
install MongoEngine using setuptools, then the dependencies will be handled for
|
install MongoEngine using setuptools, then the dependencies will be handled for
|
||||||
you.
|
you.
|
||||||
|
|
||||||
MongoEngine is available on PyPI, so to use it you can use :program:`pip`:
|
MongoEngine is available on PyPI, so you can use :program:`pip`:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ pip install mongoengine
|
$ python -m pip install mongoengine
|
||||||
|
|
||||||
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
||||||
<http://pypi.python.org/pypi/mongoengine/>`_ and run
|
<http://pypi.python.org/pypi/mongoengine/>`_ and run
|
||||||
|
|||||||
80
docs/guide/logging-monitoring.rst
Normal file
80
docs/guide/logging-monitoring.rst
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
==================
|
||||||
|
Logging/Monitoring
|
||||||
|
==================
|
||||||
|
|
||||||
|
It is possible to use `pymongo.monitoring <https://api.mongodb.com/python/current/api/pymongo/monitoring.html>`_ to monitor
|
||||||
|
the driver events (e.g: queries, connections, etc). This can be handy if you want to monitor the queries issued by
|
||||||
|
MongoEngine to the driver.
|
||||||
|
|
||||||
|
To use `pymongo.monitoring` with MongoEngine, you need to make sure that you are registering the listeners
|
||||||
|
**before** establishing the database connection (i.e calling `connect`):
|
||||||
|
|
||||||
|
The following snippet provides a basic logging of all command events:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from pymongo import monitoring
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
log = logging.getLogger()
|
||||||
|
log.setLevel(logging.DEBUG)
|
||||||
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
|
||||||
|
|
||||||
|
class CommandLogger(monitoring.CommandListener):
|
||||||
|
|
||||||
|
def started(self, event):
|
||||||
|
log.debug("Command {0.command_name} with request id "
|
||||||
|
"{0.request_id} started on server "
|
||||||
|
"{0.connection_id}".format(event))
|
||||||
|
|
||||||
|
def succeeded(self, event):
|
||||||
|
log.debug("Command {0.command_name} with request id "
|
||||||
|
"{0.request_id} on server {0.connection_id} "
|
||||||
|
"succeeded in {0.duration_micros} "
|
||||||
|
"microseconds".format(event))
|
||||||
|
|
||||||
|
def failed(self, event):
|
||||||
|
log.debug("Command {0.command_name} with request id "
|
||||||
|
"{0.request_id} on server {0.connection_id} "
|
||||||
|
"failed in {0.duration_micros} "
|
||||||
|
"microseconds".format(event))
|
||||||
|
|
||||||
|
monitoring.register(CommandLogger())
|
||||||
|
|
||||||
|
|
||||||
|
class Jedi(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
|
||||||
|
connect()
|
||||||
|
|
||||||
|
|
||||||
|
log.info('GO!')
|
||||||
|
|
||||||
|
log.info('Saving an item through MongoEngine...')
|
||||||
|
Jedi(name='Obi-Wan Kenobii').save()
|
||||||
|
|
||||||
|
log.info('Querying through MongoEngine...')
|
||||||
|
obiwan = Jedi.objects.first()
|
||||||
|
|
||||||
|
log.info('Updating through MongoEngine...')
|
||||||
|
obiwan.name = 'Obi-Wan Kenobi'
|
||||||
|
obiwan.save()
|
||||||
|
|
||||||
|
|
||||||
|
Executing this prints the following output::
|
||||||
|
|
||||||
|
INFO:root:GO!
|
||||||
|
INFO:root:Saving an item through MongoEngine...
|
||||||
|
DEBUG:root:Command insert with request id 1681692777 started on server ('localhost', 27017)
|
||||||
|
DEBUG:root:Command insert with request id 1681692777 on server ('localhost', 27017) succeeded in 562 microseconds
|
||||||
|
INFO:root:Querying through MongoEngine...
|
||||||
|
DEBUG:root:Command find with request id 1714636915 started on server ('localhost', 27017)
|
||||||
|
DEBUG:root:Command find with request id 1714636915 on server ('localhost', 27017) succeeded in 341 microseconds
|
||||||
|
INFO:root:Updating through MongoEngine...
|
||||||
|
DEBUG:root:Command update with request id 1957747793 started on server ('localhost', 27017)
|
||||||
|
DEBUG:root:Command update with request id 1957747793 on server ('localhost', 27017) succeeded in 455 microseconds
|
||||||
|
|
||||||
|
More details can of course be obtained by checking the `event` argument from the `CommandListener`.
|
||||||
267
docs/guide/migration.rst
Normal file
267
docs/guide/migration.rst
Normal file
@@ -0,0 +1,267 @@
|
|||||||
|
===================
|
||||||
|
Documents migration
|
||||||
|
===================
|
||||||
|
|
||||||
|
The structure of your documents and their associated mongoengine schemas are likely
|
||||||
|
to change over the lifetime of an application. This section provides guidance and
|
||||||
|
recommendations on how to deal with migrations.
|
||||||
|
|
||||||
|
Due to the very flexible nature of mongodb, migrations of models aren't trivial and
|
||||||
|
for people that know about `alembic` for `sqlalchemy`, there is unfortunately no equivalent
|
||||||
|
library that will manage the migration in an automatic fashion for mongoengine.
|
||||||
|
|
||||||
|
Example 1: Addition of a field
|
||||||
|
==============================
|
||||||
|
|
||||||
|
Let's start by taking a simple example of a model change and review the different option you
|
||||||
|
have to deal with the migration.
|
||||||
|
|
||||||
|
Let's assume we start with the following schema and save an instance:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
User(name="John Doe").save()
|
||||||
|
|
||||||
|
# print the objects as they exist in mongodb
|
||||||
|
print(User.objects().as_pymongo()) # [{u'_id': ObjectId('5d06b9c3d7c1f18db3e7c874'), u'name': u'John Doe'}]
|
||||||
|
|
||||||
|
On the next version of your application, let's now assume that a new field `enabled` gets added to the
|
||||||
|
existing ``User`` model with a `default=True`. Thus you simply update the ``User`` class to the following:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField(required=True)
|
||||||
|
enabled = BooleaField(default=True)
|
||||||
|
|
||||||
|
Without applying any migration, we now reload an object from the database into the ``User`` class
|
||||||
|
and checks its `enabled` attribute:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
assert User.objects.count() == 1
|
||||||
|
user = User.objects().first()
|
||||||
|
assert user.enabled is True
|
||||||
|
assert User.objects(enabled=True).count() == 0 # uh?
|
||||||
|
assert User.objects(enabled=False).count() == 0 # uh?
|
||||||
|
|
||||||
|
# this is consistent with what we have in the database
|
||||||
|
# in fact, 'enabled' does not exist
|
||||||
|
print(User.objects().as_pymongo().first()) # {u'_id': ObjectId('5d06b9c3d7c1f18db3e7c874'), u'name': u'John'}
|
||||||
|
assert User.objects(enabled=None).count() == 1
|
||||||
|
|
||||||
|
As you can see, even if the document wasn't updated, mongoengine applies the default value seamlessly when it
|
||||||
|
loads the pymongo dict into a ``User`` instance. At first sight it looks like you don't need to migrate the
|
||||||
|
existing documents when adding new fields but this actually leads to inconsistencies when it comes to querying.
|
||||||
|
|
||||||
|
In fact, when querying, mongoengine isn't trying to account for the default value of the new field and so
|
||||||
|
if you don't actually migrate the existing documents, you are taking a risk that querying/updating
|
||||||
|
will be missing relevant record.
|
||||||
|
|
||||||
|
When adding fields/modifying default values, you can use any of the following to do the migration
|
||||||
|
as a standalone script:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
# Use mongoengine to set a default value for a given field
|
||||||
|
User.objects().update(enabled=True)
|
||||||
|
# or use pymongo
|
||||||
|
user_coll = User._get_collection()
|
||||||
|
user_coll.update_many({}, {'$set': {'enabled': True}})
|
||||||
|
|
||||||
|
|
||||||
|
Example 2: Inheritance change
|
||||||
|
=============================
|
||||||
|
|
||||||
|
Let's consider the following example:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class Human(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
class Jedi(Human):
|
||||||
|
dark_side = BooleanField()
|
||||||
|
light_saber_color = StringField()
|
||||||
|
|
||||||
|
Jedi(name="Darth Vader", dark_side=True, light_saber_color="red").save()
|
||||||
|
Jedi(name="Obi Wan Kenobi", dark_side=False, light_saber_color="blue").save()
|
||||||
|
|
||||||
|
assert Human.objects.count() == 2
|
||||||
|
assert Jedi.objects.count() == 2
|
||||||
|
|
||||||
|
# Let's check how these documents got stored in mongodb
|
||||||
|
print(Jedi.objects.as_pymongo())
|
||||||
|
# [
|
||||||
|
# {'_id': ObjectId('5fac4aaaf61d7fb06046e0f9'), '_cls': 'Human.Jedi', 'name': 'Darth Vader', 'dark_side': True, 'light_saber_color': 'red'},
|
||||||
|
# {'_id': ObjectId('5fac4ac4f61d7fb06046e0fa'), '_cls': 'Human.Jedi', 'name': 'Obi Wan Kenobi', 'dark_side': False, 'light_saber_color': 'blue'}
|
||||||
|
# ]
|
||||||
|
|
||||||
|
As you can observe, when you use inheritance, MongoEngine stores a field named '_cls' behind the scene to keep
|
||||||
|
track of the Document class.
|
||||||
|
|
||||||
|
Let's now take the scenario that you want to refactor the inheritance schema and:
|
||||||
|
- Have the Jedi's with dark_side=True/False become GoodJedi's/DarkSith
|
||||||
|
- get rid of the 'dark_side' field
|
||||||
|
|
||||||
|
move to the following schemas:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
# unchanged
|
||||||
|
class Human(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
# attribute 'dark_side' removed
|
||||||
|
class GoodJedi(Human):
|
||||||
|
light_saber_color = StringField()
|
||||||
|
|
||||||
|
# new class
|
||||||
|
class BadSith(Human):
|
||||||
|
light_saber_color = StringField()
|
||||||
|
|
||||||
|
MongoEngine doesn't know about the change or how to map them with the existing data
|
||||||
|
so if you don't apply any migration, you will observe a strange behavior, as if the collection was suddenly
|
||||||
|
empty.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
# As a reminder, the documents that we inserted
|
||||||
|
# have the _cls field = 'Human.Jedi'
|
||||||
|
|
||||||
|
# Following has no match
|
||||||
|
# because the query that is used behind the scene is
|
||||||
|
# filtering on {'_cls': 'Human.GoodJedi'}
|
||||||
|
assert GoodJedi.objects().count() == 0
|
||||||
|
|
||||||
|
# Following has also no match
|
||||||
|
# because it is filtering on {'_cls': {'$in': ('Human', 'Human.GoodJedi', 'Human.BadSith')}}
|
||||||
|
# which has no match
|
||||||
|
assert Human.objects.count() == 0
|
||||||
|
assert Human.objects.first() is None
|
||||||
|
|
||||||
|
# If we bypass MongoEngine and make use of underlying driver (PyMongo)
|
||||||
|
# we can see that the documents are there
|
||||||
|
humans_coll = Human._get_collection()
|
||||||
|
assert humans_coll.count_documents({}) == 2
|
||||||
|
# print first document
|
||||||
|
print(humans_coll.find_one())
|
||||||
|
# {'_id': ObjectId('5fac4aaaf61d7fb06046e0f9'), '_cls': 'Human.Jedi', 'name': 'Darth Vader', 'dark_side': True, 'light_saber_color': 'red'}
|
||||||
|
|
||||||
|
As you can see, first obvious problem is that we need to modify '_cls' values based on existing values of
|
||||||
|
'dark_side' documents.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
humans_coll = Human._get_collection()
|
||||||
|
old_class = 'Human.Jedi'
|
||||||
|
good_jedi_class = 'Human.GoodJedi'
|
||||||
|
bad_sith_class = 'Human.BadSith'
|
||||||
|
humans_coll.update_many({'_cls': old_class, 'dark_side': False}, {'$set': {'_cls': good_jedi_class}})
|
||||||
|
humans_coll.update_many({'_cls': old_class, 'dark_side': True}, {'$set': {'_cls': bad_sith_class}})
|
||||||
|
|
||||||
|
Let's now check if querying improved in MongoEngine:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
assert GoodJedi.objects().count() == 1 # Hoorah!
|
||||||
|
assert BadSith.objects().count() == 1 # Hoorah!
|
||||||
|
assert Human.objects.count() == 2 # Hoorah!
|
||||||
|
|
||||||
|
# let's now check that documents load correctly
|
||||||
|
jedi = GoodJedi.objects().first()
|
||||||
|
# raises FieldDoesNotExist: The fields "{'dark_side'}" do not exist on the document "Human.GoodJedi"
|
||||||
|
|
||||||
|
In fact we only took care of renaming the _cls values but we havn't removed the 'dark_side' fields
|
||||||
|
which does not exist anymore on the GoodJedi's and BadSith's models.
|
||||||
|
Let's remove the field from the collections:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
humans_coll = Human._get_collection()
|
||||||
|
humans_coll.update_many({}, {'$unset': {'dark_side': 1}})
|
||||||
|
|
||||||
|
.. note:: We did this migration in 2 different steps for the sake of example but it could have been combined
|
||||||
|
with the migration of the _cls fields: ::
|
||||||
|
|
||||||
|
humans_coll.update_many(
|
||||||
|
{'_cls': old_class, 'dark_side': False},
|
||||||
|
{
|
||||||
|
'$set': {'_cls': good_jedi_class},
|
||||||
|
'$unset': {'dark_side': 1}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
And verify that the documents now load correctly:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
jedi = GoodJedi.objects().first()
|
||||||
|
assert jedi.name == "Obi Wan Kenobi"
|
||||||
|
|
||||||
|
sith = BadSith.objects().first()
|
||||||
|
assert sith.name == "Darth Vader"
|
||||||
|
|
||||||
|
|
||||||
|
An other way of dealing with this migration is to iterate over
|
||||||
|
the documents and update/replace them one by one. This is way slower but
|
||||||
|
it is often useful for complex migrations of Document models.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
for doc in humans_coll.find():
|
||||||
|
if doc['_cls'] == 'Human.Jedi':
|
||||||
|
doc['_cls'] = 'Human.BadSith' if doc['dark_side'] else 'Human.GoodJedi'
|
||||||
|
doc.pop('dark_side')
|
||||||
|
humans_coll.replace_one({'_id': doc['_id']}, doc)
|
||||||
|
|
||||||
|
.. warning:: Be aware of this `flaw <https://groups.google.com/g/mongodb-user/c/AFC1ia7MHzk>`_ if you modify documents while iterating
|
||||||
|
|
||||||
|
Recommendations
|
||||||
|
===============
|
||||||
|
|
||||||
|
- Write migration scripts whenever you do changes to the model schemas
|
||||||
|
- Using :class:`~mongoengine.DynamicDocument` or ``meta = {"strict": False}`` may help to avoid some migrations or to have the 2 versions of your application to co-exist.
|
||||||
|
- Write post-processing checks to verify that migrations script worked. See below
|
||||||
|
|
||||||
|
Post-processing checks
|
||||||
|
======================
|
||||||
|
|
||||||
|
The following recipe can be used to sanity check a Document collection after you applied migration.
|
||||||
|
It does not make any assumption on what was migrated, it will fetch 1000 objects randomly and
|
||||||
|
run some quick checks on the documents to make sure the document looks OK. As it is, it will fail
|
||||||
|
on the first occurrence of an error but this is something that can be adapted based on your needs.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
def get_random_oids(collection, sample_size):
|
||||||
|
pipeline = [{"$project": {'_id': 1}}, {"$sample": {"size": sample_size}}]
|
||||||
|
return [s['_id'] for s in collection.aggregate(pipeline)]
|
||||||
|
|
||||||
|
def get_random_documents(DocCls, sample_size):
|
||||||
|
doc_collection = DocCls._get_collection()
|
||||||
|
random_oids = get_random_oids(doc_collection, sample_size)
|
||||||
|
return DocCls.objects(id__in=random_oids)
|
||||||
|
|
||||||
|
def check_documents(DocCls, sample_size):
|
||||||
|
for doc in get_random_documents(DocCls, sample_size):
|
||||||
|
# general validation (types and values)
|
||||||
|
doc.validate()
|
||||||
|
|
||||||
|
# load all subfields,
|
||||||
|
# this may trigger additional queries if you have ReferenceFields
|
||||||
|
# so it may be slow
|
||||||
|
for field in doc._fields:
|
||||||
|
try:
|
||||||
|
getattr(doc, field)
|
||||||
|
except Exception:
|
||||||
|
LOG.warning(f"Could not load field {field} in Document {doc.id}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
check_documents(Human, sample_size=1000)
|
||||||
48
docs/guide/mongomock.rst
Normal file
48
docs/guide/mongomock.rst
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
==============================
|
||||||
|
Use mongomock for testing
|
||||||
|
==============================
|
||||||
|
|
||||||
|
`mongomock <https://github.com/vmalloc/mongomock/>`_ is a package to do just
|
||||||
|
what the name implies, mocking a mongo database.
|
||||||
|
|
||||||
|
To use with mongoengine, simply specify mongomock when connecting with
|
||||||
|
mongoengine:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
connect('mongoenginetest', host='mongomock://localhost')
|
||||||
|
conn = get_connection()
|
||||||
|
|
||||||
|
or with an alias:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
connect('mongoenginetest', host='mongomock://localhost', alias='testdb')
|
||||||
|
conn = get_connection('testdb')
|
||||||
|
|
||||||
|
Example of test file:
|
||||||
|
---------------------
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
from mongoengine import connect, disconnect
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class TestPerson(unittest.TestCase):
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def setUpClass(cls):
|
||||||
|
connect('mongoenginetest', host='mongomock://localhost')
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def tearDownClass(cls):
|
||||||
|
disconnect()
|
||||||
|
|
||||||
|
def test_thing(self):
|
||||||
|
pers = Person(name='John')
|
||||||
|
pers.save()
|
||||||
|
|
||||||
|
fresh_pers = Person.objects().first()
|
||||||
|
assert fresh_pers.name == 'John'
|
||||||
@@ -17,7 +17,7 @@ fetch documents from the database::
|
|||||||
|
|
||||||
As of MongoEngine 0.8 the querysets utilise a local cache. So iterating
|
As of MongoEngine 0.8 the querysets utilise a local cache. So iterating
|
||||||
it multiple times will only cause a single query. If this is not the
|
it multiple times will only cause a single query. If this is not the
|
||||||
desired behavour you can call :class:`~mongoengine.QuerySet.no_cache`
|
desired behaviour you can call :class:`~mongoengine.QuerySet.no_cache`
|
||||||
(version **0.8.3+**) to return a non-caching queryset.
|
(version **0.8.3+**) to return a non-caching queryset.
|
||||||
|
|
||||||
Filtering queries
|
Filtering queries
|
||||||
@@ -39,10 +39,18 @@ syntax::
|
|||||||
# been written by a user whose 'country' field is set to 'uk'
|
# been written by a user whose 'country' field is set to 'uk'
|
||||||
uk_pages = Page.objects(author__country='uk')
|
uk_pages = Page.objects(author__country='uk')
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
(version **0.9.1+**) if your field name is like mongodb operator name (for example
|
||||||
|
type, lte, lt...) and you want to place it at the end of lookup keyword
|
||||||
|
mongoengine automatically prepend $ to it. To avoid this use __ at the end of
|
||||||
|
your lookup keyword. For example if your field name is ``type`` and you want to
|
||||||
|
query by this field you must use ``.objects(user__type__="admin")`` instead of
|
||||||
|
``.objects(user__type="admin")``
|
||||||
|
|
||||||
Query operators
|
Query operators
|
||||||
===============
|
===============
|
||||||
Operators other than equality may also be used in queries; just attach the
|
Operators other than equality may also be used in queries --- just attach the
|
||||||
operator name to a key with a double-underscore::
|
operator name to a key with a double-underscore::
|
||||||
|
|
||||||
# Only find users whose age is 18 or less
|
# Only find users whose age is 18 or less
|
||||||
@@ -56,7 +64,7 @@ Available operators are as follows:
|
|||||||
* ``gt`` -- greater than
|
* ``gt`` -- greater than
|
||||||
* ``gte`` -- greater than or equal to
|
* ``gte`` -- greater than or equal to
|
||||||
* ``not`` -- negate a standard check, may be used before other operators (e.g.
|
* ``not`` -- negate a standard check, may be used before other operators (e.g.
|
||||||
``Q(age__not__mod=5)``)
|
``Q(age__not__mod=(5, 0))``)
|
||||||
* ``in`` -- value is in list (a list of values should be provided)
|
* ``in`` -- value is in list (a list of values should be provided)
|
||||||
* ``nin`` -- value is not in list (a list of values should be provided)
|
* ``nin`` -- value is not in list (a list of values should be provided)
|
||||||
* ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values
|
* ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values
|
||||||
@@ -84,19 +92,20 @@ expressions:
|
|||||||
Geo queries
|
Geo queries
|
||||||
-----------
|
-----------
|
||||||
|
|
||||||
There are a few special operators for performing geographical queries. The following
|
There are a few special operators for performing geographical queries.
|
||||||
were added in 0.8 for: :class:`~mongoengine.fields.PointField`,
|
The following were added in MongoEngine 0.8 for
|
||||||
|
:class:`~mongoengine.fields.PointField`,
|
||||||
:class:`~mongoengine.fields.LineStringField` and
|
:class:`~mongoengine.fields.LineStringField` and
|
||||||
:class:`~mongoengine.fields.PolygonField`:
|
:class:`~mongoengine.fields.PolygonField`:
|
||||||
|
|
||||||
* ``geo_within`` -- Check if a geometry is within a polygon. For ease of use
|
* ``geo_within`` -- check if a geometry is within a polygon. For ease of use
|
||||||
it accepts either a geojson geometry or just the polygon coordinates eg::
|
it accepts either a geojson geometry or just the polygon coordinates eg::
|
||||||
|
|
||||||
loc.objects(point__geo_with=[[[40, 5], [40, 6], [41, 6], [40, 5]]])
|
loc.objects(point__geo_within=[[[40, 5], [40, 6], [41, 6], [40, 5]]])
|
||||||
loc.objects(point__geo_with={"type": "Polygon",
|
loc.objects(point__geo_within={"type": "Polygon",
|
||||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
|
"coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
|
||||||
|
|
||||||
* ``geo_within_box`` - simplified geo_within searching with a box eg::
|
* ``geo_within_box`` -- simplified geo_within searching with a box eg::
|
||||||
|
|
||||||
loc.objects(point__geo_within_box=[(-125.0, 35.0), (-100.0, 40.0)])
|
loc.objects(point__geo_within_box=[(-125.0, 35.0), (-100.0, 40.0)])
|
||||||
loc.objects(point__geo_within_box=[<bottom left coordinates>, <upper right coordinates>])
|
loc.objects(point__geo_within_box=[<bottom left coordinates>, <upper right coordinates>])
|
||||||
@@ -132,23 +141,22 @@ were added in 0.8 for: :class:`~mongoengine.fields.PointField`,
|
|||||||
loc.objects(poly__geo_intersects={"type": "Polygon",
|
loc.objects(poly__geo_intersects={"type": "Polygon",
|
||||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]})
|
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]})
|
||||||
|
|
||||||
* ``near`` -- Find all the locations near a given point::
|
* ``near`` -- find all the locations near a given point::
|
||||||
|
|
||||||
loc.objects(point__near=[40, 5])
|
loc.objects(point__near=[40, 5])
|
||||||
loc.objects(point__near={"type": "Point", "coordinates": [40, 5]})
|
loc.objects(point__near={"type": "Point", "coordinates": [40, 5]})
|
||||||
|
|
||||||
|
You can also set the maximum and/or the minimum distance in meters as well::
|
||||||
You can also set the maximum distance in meters as well::
|
|
||||||
|
|
||||||
loc.objects(point__near=[40, 5], point__max_distance=1000)
|
loc.objects(point__near=[40, 5], point__max_distance=1000)
|
||||||
|
loc.objects(point__near=[40, 5], point__min_distance=100)
|
||||||
|
|
||||||
The older 2D indexes are still supported with the
|
The older 2D indexes are still supported with the
|
||||||
:class:`~mongoengine.fields.GeoPointField`:
|
:class:`~mongoengine.fields.GeoPointField`:
|
||||||
|
|
||||||
* ``within_distance`` -- provide a list containing a point and a maximum
|
* ``within_distance`` -- provide a list containing a point and a maximum
|
||||||
distance (e.g. [(41.342, -87.653), 5])
|
distance (e.g. [(41.342, -87.653), 5])
|
||||||
* ``within_spherical_distance`` -- Same as above but using the spherical geo model
|
* ``within_spherical_distance`` -- same as above but using the spherical geo model
|
||||||
(e.g. [(41.342, -87.653), 5/earth_radius])
|
(e.g. [(41.342, -87.653), 5/earth_radius])
|
||||||
* ``near`` -- order the documents by how close they are to a given point
|
* ``near`` -- order the documents by how close they are to a given point
|
||||||
* ``near_sphere`` -- Same as above but using the spherical geo model
|
* ``near_sphere`` -- Same as above but using the spherical geo model
|
||||||
@@ -161,7 +169,8 @@ The older 2D indexes are still supported with the
|
|||||||
|
|
||||||
* ``max_distance`` -- can be added to your location queries to set a maximum
|
* ``max_distance`` -- can be added to your location queries to set a maximum
|
||||||
distance.
|
distance.
|
||||||
|
* ``min_distance`` -- can be added to your location queries to set a minimum
|
||||||
|
distance.
|
||||||
|
|
||||||
Querying lists
|
Querying lists
|
||||||
--------------
|
--------------
|
||||||
@@ -198,12 +207,14 @@ However, this doesn't map well to the syntax so you can also use a capital S ins
|
|||||||
|
|
||||||
Post.objects(comments__by="joe").update(inc__comments__S__votes=1)
|
Post.objects(comments__by="joe").update(inc__comments__S__votes=1)
|
||||||
|
|
||||||
.. note:: Due to Mongo currently the $ operator only applies to the first matched item in the query.
|
.. note::
|
||||||
|
Due to :program:`Mongo`, currently the $ operator only applies to the
|
||||||
|
first matched item in the query.
|
||||||
|
|
||||||
|
|
||||||
Raw queries
|
Raw queries
|
||||||
-----------
|
-----------
|
||||||
It is possible to provide a raw PyMongo query as a query parameter, which will
|
It is possible to provide a raw :mod:`PyMongo` query as a query parameter, which will
|
||||||
be integrated directly into the query. This is done using the ``__raw__``
|
be integrated directly into the query. This is done using the ``__raw__``
|
||||||
keyword argument::
|
keyword argument::
|
||||||
|
|
||||||
@@ -211,14 +222,26 @@ keyword argument::
|
|||||||
|
|
||||||
.. versionadded:: 0.4
|
.. versionadded:: 0.4
|
||||||
|
|
||||||
|
Sorting/Ordering results
|
||||||
|
========================
|
||||||
|
It is possible to order the results by 1 or more keys using :meth:`~mongoengine.queryset.QuerySet.order_by`.
|
||||||
|
The order may be specified by prepending each of the keys by "+" or "-". Ascending order is assumed if there's no prefix.::
|
||||||
|
|
||||||
|
# Order by ascending date
|
||||||
|
blogs = BlogPost.objects().order_by('date') # equivalent to .order_by('+date')
|
||||||
|
|
||||||
|
# Order by ascending date first, then descending title
|
||||||
|
blogs = BlogPost.objects().order_by('+date', '-title')
|
||||||
|
|
||||||
|
|
||||||
Limiting and skipping results
|
Limiting and skipping results
|
||||||
=============================
|
=============================
|
||||||
Just as with traditional ORMs, you may limit the number of results returned, or
|
Just as with traditional ORMs, you may limit the number of results returned or
|
||||||
skip a number or results in you query.
|
skip a number or results in you query.
|
||||||
:meth:`~mongoengine.queryset.QuerySet.limit` and
|
:meth:`~mongoengine.queryset.QuerySet.limit` and
|
||||||
:meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on
|
:meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on
|
||||||
:class:`~mongoengine.queryset.QuerySet` objects, but the prefered syntax for
|
:class:`~mongoengine.queryset.QuerySet` objects, but the `array-slicing` syntax
|
||||||
achieving this is using array-slicing syntax::
|
is preferred for achieving this::
|
||||||
|
|
||||||
# Only the first 5 people
|
# Only the first 5 people
|
||||||
users = User.objects[:5]
|
users = User.objects[:5]
|
||||||
@@ -226,7 +249,7 @@ achieving this is using array-slicing syntax::
|
|||||||
# All except for the first 5 people
|
# All except for the first 5 people
|
||||||
users = User.objects[5:]
|
users = User.objects[5:]
|
||||||
|
|
||||||
# 5 users, starting from the 10th user found
|
# 5 users, starting from the 11th user found
|
||||||
users = User.objects[10:15]
|
users = User.objects[10:15]
|
||||||
|
|
||||||
You may also index the query to retrieve a single result. If an item at that
|
You may also index the query to retrieve a single result. If an item at that
|
||||||
@@ -252,23 +275,17 @@ To retrieve a result that should be unique in the collection, use
|
|||||||
no document matches the query, and
|
no document matches the query, and
|
||||||
:class:`~mongoengine.queryset.MultipleObjectsReturned`
|
:class:`~mongoengine.queryset.MultipleObjectsReturned`
|
||||||
if more than one document matched the query. These exceptions are merged into
|
if more than one document matched the query. These exceptions are merged into
|
||||||
your document defintions eg: `MyDoc.DoesNotExist`
|
your document definitions eg: `MyDoc.DoesNotExist`
|
||||||
|
|
||||||
A variation of this method exists,
|
A variation of this method, get_or_create() existed, but it was unsafe. It
|
||||||
:meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new
|
could not be made safe, because there are no transactions in mongoDB. Other
|
||||||
document with the query arguments if no documents match the query. An
|
approaches should be investigated, to ensure you don't accidentally duplicate
|
||||||
additional keyword argument, :attr:`defaults` may be provided, which will be
|
data when using something similar to this method. Therefore it was deprecated
|
||||||
used as default values for the new document, in the case that it should need
|
in 0.8 and removed in 0.10.
|
||||||
to be created::
|
|
||||||
|
|
||||||
>>> a, created = User.objects.get_or_create(name='User A', defaults={'age': 30})
|
|
||||||
>>> b, created = User.objects.get_or_create(name='User A', defaults={'age': 40})
|
|
||||||
>>> a.name == b.name and a.age == b.age
|
|
||||||
True
|
|
||||||
|
|
||||||
Default Document queries
|
Default Document queries
|
||||||
========================
|
========================
|
||||||
By default, the objects :attr:`~mongoengine.Document.objects` attribute on a
|
By default, the objects :attr:`~Document.objects` attribute on a
|
||||||
document returns a :class:`~mongoengine.queryset.QuerySet` that doesn't filter
|
document returns a :class:`~mongoengine.queryset.QuerySet` that doesn't filter
|
||||||
the collection -- it returns all objects. This may be changed by defining a
|
the collection -- it returns all objects. This may be changed by defining a
|
||||||
method on a document that modifies a queryset. The method should accept two
|
method on a document that modifies a queryset. The method should accept two
|
||||||
@@ -311,7 +328,7 @@ Should you want to add custom methods for interacting with or filtering
|
|||||||
documents, extending the :class:`~mongoengine.queryset.QuerySet` class may be
|
documents, extending the :class:`~mongoengine.queryset.QuerySet` class may be
|
||||||
the way to go. To use a custom :class:`~mongoengine.queryset.QuerySet` class on
|
the way to go. To use a custom :class:`~mongoengine.queryset.QuerySet` class on
|
||||||
a document, set ``queryset_class`` to the custom class in a
|
a document, set ``queryset_class`` to the custom class in a
|
||||||
:class:`~mongoengine.Document`\ s ``meta`` dictionary::
|
:class:`~mongoengine.Document`'s ``meta`` dictionary::
|
||||||
|
|
||||||
class AwesomerQuerySet(QuerySet):
|
class AwesomerQuerySet(QuerySet):
|
||||||
|
|
||||||
@@ -335,12 +352,19 @@ Javascript code that is executed on the database server.
|
|||||||
|
|
||||||
Counting results
|
Counting results
|
||||||
----------------
|
----------------
|
||||||
Just as with limiting and skipping results, there is a method on
|
Just as with limiting and skipping results, there is a method on a
|
||||||
:class:`~mongoengine.queryset.QuerySet` objects --
|
:class:`~mongoengine.queryset.QuerySet` object --
|
||||||
:meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic
|
:meth:`~mongoengine.queryset.QuerySet.count`::
|
||||||
way of achieving this::
|
|
||||||
|
|
||||||
num_users = len(User.objects)
|
num_users = User.objects.count()
|
||||||
|
|
||||||
|
You could technically use ``len(User.objects)`` to get the same result, but it
|
||||||
|
would be significantly slower than :meth:`~mongoengine.queryset.QuerySet.count`.
|
||||||
|
When you execute a server-side count query, you let MongoDB do the heavy
|
||||||
|
lifting and you receive a single integer over the wire. Meanwhile, ``len()``
|
||||||
|
retrieves all the results, places them in a local cache, and finally counts
|
||||||
|
them. If we compare the performance of the two operations, ``len()`` is much slower
|
||||||
|
than :meth:`~mongoengine.queryset.QuerySet.count`.
|
||||||
|
|
||||||
Further aggregation
|
Further aggregation
|
||||||
-------------------
|
-------------------
|
||||||
@@ -374,6 +398,25 @@ would be generating "tag-clouds"::
|
|||||||
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
|
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
|
||||||
|
|
||||||
|
|
||||||
|
MongoDB aggregation API
|
||||||
|
-----------------------
|
||||||
|
If you need to run aggregation pipelines, MongoEngine provides an entry point to `Pymongo's aggregation framework <https://api.mongodb.com/python/current/examples/aggregation.html#aggregation-framework>`_
|
||||||
|
through :meth:`~mongoengine.queryset.QuerySet.aggregate`. Check out Pymongo's documentation for the syntax and pipeline.
|
||||||
|
An example of its use would be::
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Person(name='John').save()
|
||||||
|
Person(name='Bob').save()
|
||||||
|
|
||||||
|
pipeline = [
|
||||||
|
{"$sort" : {"name" : -1}},
|
||||||
|
{"$project": {"_id": 0, "name": {"$toUpper": "$name"}}}
|
||||||
|
]
|
||||||
|
data = Person.objects().aggregate(pipeline)
|
||||||
|
assert data == [{'name': 'BOB'}, {'name': 'JOHN'}]
|
||||||
|
|
||||||
Query efficiency and performance
|
Query efficiency and performance
|
||||||
================================
|
================================
|
||||||
|
|
||||||
@@ -444,14 +487,14 @@ data. To turn off dereferencing of the results of a query use
|
|||||||
:func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so::
|
:func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so::
|
||||||
|
|
||||||
post = Post.objects.no_dereference().first()
|
post = Post.objects.no_dereference().first()
|
||||||
assert(isinstance(post.author, ObjectId))
|
assert(isinstance(post.author, DBRef))
|
||||||
|
|
||||||
You can also turn off all dereferencing for a fixed period by using the
|
You can also turn off all dereferencing for a fixed period by using the
|
||||||
:class:`~mongoengine.context_managers.no_dereference` context manager::
|
:class:`~mongoengine.context_managers.no_dereference` context manager::
|
||||||
|
|
||||||
with no_dereference(Post) as Post:
|
with no_dereference(Post) as Post:
|
||||||
post = Post.objects.first()
|
post = Post.objects.first()
|
||||||
assert(isinstance(post.author, ObjectId))
|
assert(isinstance(post.author, DBRef))
|
||||||
|
|
||||||
# Outside the context manager dereferencing occurs.
|
# Outside the context manager dereferencing occurs.
|
||||||
assert(isinstance(post.author, User))
|
assert(isinstance(post.author, User))
|
||||||
@@ -472,6 +515,8 @@ operators. To use a :class:`~mongoengine.queryset.Q` object, pass it in as the
|
|||||||
first positional argument to :attr:`Document.objects` when you filter it by
|
first positional argument to :attr:`Document.objects` when you filter it by
|
||||||
calling it with keyword arguments::
|
calling it with keyword arguments::
|
||||||
|
|
||||||
|
from mongoengine.queryset.visitor import Q
|
||||||
|
|
||||||
# Get published posts
|
# Get published posts
|
||||||
Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now()))
|
Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now()))
|
||||||
|
|
||||||
@@ -488,22 +533,28 @@ calling it with keyword arguments::
|
|||||||
Atomic updates
|
Atomic updates
|
||||||
==============
|
==============
|
||||||
Documents may be updated atomically by using the
|
Documents may be updated atomically by using the
|
||||||
:meth:`~mongoengine.queryset.QuerySet.update_one` and
|
:meth:`~mongoengine.queryset.QuerySet.update_one`,
|
||||||
:meth:`~mongoengine.queryset.QuerySet.update` methods on a
|
:meth:`~mongoengine.queryset.QuerySet.update` and
|
||||||
:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers"
|
:meth:`~mongoengine.queryset.QuerySet.modify` methods on a
|
||||||
that you may use with these methods:
|
:class:`~mongoengine.queryset.QuerySet` or
|
||||||
|
:meth:`~mongoengine.Document.modify` and
|
||||||
|
:meth:`~mongoengine.Document.save` (with :attr:`save_condition` argument) on a
|
||||||
|
:class:`~mongoengine.Document`.
|
||||||
|
There are several different "modifiers" that you may use with these methods:
|
||||||
|
|
||||||
* ``set`` -- set a particular value
|
* ``set`` -- set a particular value
|
||||||
* ``unset`` -- delete a particular value (since MongoDB v1.3+)
|
* ``unset`` -- delete a particular value (since MongoDB v1.3)
|
||||||
* ``inc`` -- increment a value by a given amount
|
* ``inc`` -- increment a value by a given amount
|
||||||
* ``dec`` -- decrement a value by a given amount
|
* ``dec`` -- decrement a value by a given amount
|
||||||
* ``push`` -- append a value to a list
|
* ``push`` -- append a value to a list
|
||||||
* ``push_all`` -- append several values to a list
|
* ``push_all`` -- append several values to a list
|
||||||
* ``pop`` -- remove the first or last element of a list
|
* ``pop`` -- remove the first or last element of a list `depending on the value`_
|
||||||
* ``pull`` -- remove a value from a list
|
* ``pull`` -- remove a value from a list
|
||||||
* ``pull_all`` -- remove several values from a list
|
* ``pull_all`` -- remove several values from a list
|
||||||
* ``add_to_set`` -- add value to a list only if its not in the list already
|
* ``add_to_set`` -- add value to a list only if its not in the list already
|
||||||
|
|
||||||
|
.. _depending on the value: http://docs.mongodb.org/manual/reference/operator/update/pop/
|
||||||
|
|
||||||
The syntax for atomic updates is similar to the querying syntax, but the
|
The syntax for atomic updates is similar to the querying syntax, but the
|
||||||
modifier comes before the field, not after it::
|
modifier comes before the field, not after it::
|
||||||
|
|
||||||
@@ -522,6 +573,13 @@ modifier comes before the field, not after it::
|
|||||||
>>> post.tags
|
>>> post.tags
|
||||||
['database', 'nosql']
|
['database', 'nosql']
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
If no modifier operator is specified the default will be ``$set``. So the following sentences are identical::
|
||||||
|
|
||||||
|
>>> BlogPost.objects(id=post.id).update(title='Example Post')
|
||||||
|
>>> BlogPost.objects(id=post.id).update(set__title='Example Post')
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates
|
In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates
|
||||||
@@ -538,10 +596,20 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`::
|
|||||||
>>> post.tags
|
>>> post.tags
|
||||||
['database', 'mongodb']
|
['database', 'mongodb']
|
||||||
|
|
||||||
|
From MongoDB version 2.6, push operator supports $position value which allows
|
||||||
|
to push values with index::
|
||||||
|
|
||||||
|
>>> post = BlogPost(title="Test", tags=["mongo"])
|
||||||
|
>>> post.save()
|
||||||
|
>>> post.update(push__tags__0=["database", "code"])
|
||||||
|
>>> post.reload()
|
||||||
|
>>> post.tags
|
||||||
|
['database', 'code', 'mongo']
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
Currently only top level lists are handled, future versions of mongodb /
|
Currently only top level lists are handled, future versions of mongodb /
|
||||||
pymongo plan to support nested positional operators. See `The $ positional
|
pymongo plan to support nested positional operators. See `The $ positional
|
||||||
operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_.
|
operator <https://docs.mongodb.com/manual/tutorial/update-documents/#Updating-The%24positionaloperator>`_.
|
||||||
|
|
||||||
Server-side javascript execution
|
Server-side javascript execution
|
||||||
================================
|
================================
|
||||||
@@ -580,7 +648,7 @@ Some variables are made available in the scope of the Javascript function:
|
|||||||
|
|
||||||
The following example demonstrates the intended usage of
|
The following example demonstrates the intended usage of
|
||||||
:meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums
|
:meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums
|
||||||
over a field on a document (this functionality is already available throught
|
over a field on a document (this functionality is already available through
|
||||||
:meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of
|
:meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of
|
||||||
example)::
|
example)::
|
||||||
|
|
||||||
|
|||||||
@@ -35,25 +35,25 @@ Available signals include:
|
|||||||
:class:`~mongoengine.EmbeddedDocument` instance has been completed.
|
:class:`~mongoengine.EmbeddedDocument` instance has been completed.
|
||||||
|
|
||||||
`pre_save`
|
`pre_save`
|
||||||
Called within :meth:`~mongoengine.document.Document.save` prior to performing
|
Called within :meth:`~mongoengine.Document.save` prior to performing
|
||||||
any actions.
|
any actions.
|
||||||
|
|
||||||
`pre_save_post_validation`
|
`pre_save_post_validation`
|
||||||
Called within :meth:`~mongoengine.document.Document.save` after validation
|
Called within :meth:`~mongoengine.Document.save` after validation
|
||||||
has taken place but before saving.
|
has taken place but before saving.
|
||||||
|
|
||||||
`post_save`
|
`post_save`
|
||||||
Called within :meth:`~mongoengine.document.Document.save` after all actions
|
Called within :meth:`~mongoengine.Document.save` after most actions
|
||||||
(validation, insert/update, cascades, clearing dirty flags) have completed
|
(validation, insert/update, and cascades, but not clearing dirty flags) have
|
||||||
successfully. Passed the additional boolean keyword argument `created` to
|
completed successfully. Passed the additional boolean keyword argument
|
||||||
indicate if the save was an insert or an update.
|
`created` to indicate if the save was an insert or an update.
|
||||||
|
|
||||||
`pre_delete`
|
`pre_delete`
|
||||||
Called within :meth:`~mongoengine.document.Document.delete` prior to
|
Called within :meth:`~mongoengine.Document.delete` prior to
|
||||||
attempting the delete operation.
|
attempting the delete operation.
|
||||||
|
|
||||||
`post_delete`
|
`post_delete`
|
||||||
Called within :meth:`~mongoengine.document.Document.delete` upon successful
|
Called within :meth:`~mongoengine.Document.delete` upon successful
|
||||||
deletion of the record.
|
deletion of the record.
|
||||||
|
|
||||||
`pre_bulk_insert`
|
`pre_bulk_insert`
|
||||||
@@ -113,6 +113,10 @@ handlers within your subclass::
|
|||||||
signals.pre_save.connect(Author.pre_save, sender=Author)
|
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||||
signals.post_save.connect(Author.post_save, sender=Author)
|
signals.post_save.connect(Author.post_save, sender=Author)
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
Note that EmbeddedDocument only supports pre/post_init signals. pre/post_save, etc should be attached to Document's class only. Attaching pre_save to an EmbeddedDocument is ignored silently.
|
||||||
|
|
||||||
Finally, you can also use this small decorator to quickly create a number of
|
Finally, you can also use this small decorator to quickly create a number of
|
||||||
signals and attach them to your :class:`~mongoengine.Document` or
|
signals and attach them to your :class:`~mongoengine.Document` or
|
||||||
:class:`~mongoengine.EmbeddedDocument` subclasses as class decorators::
|
:class:`~mongoengine.EmbeddedDocument` subclasses as class decorators::
|
||||||
@@ -142,11 +146,4 @@ cleaner looking while still allowing manual execution of the callback::
|
|||||||
modified = DateTimeField()
|
modified = DateTimeField()
|
||||||
|
|
||||||
|
|
||||||
ReferenceFields and Signals
|
|
||||||
---------------------------
|
|
||||||
|
|
||||||
Currently `reverse_delete_rules` do not trigger signals on the other part of
|
|
||||||
the relationship. If this is required you must manually handle the
|
|
||||||
reverse deletion.
|
|
||||||
|
|
||||||
.. _blinker: http://pypi.python.org/pypi/blinker
|
.. _blinker: http://pypi.python.org/pypi/blinker
|
||||||
|
|||||||
51
docs/guide/text-indexes.rst
Normal file
51
docs/guide/text-indexes.rst
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
===========
|
||||||
|
Text Search
|
||||||
|
===========
|
||||||
|
|
||||||
|
After MongoDB 2.4 version, supports search documents by text indexes.
|
||||||
|
|
||||||
|
|
||||||
|
Defining a Document with text index
|
||||||
|
===================================
|
||||||
|
Use the *$* prefix to set a text index, Look the declaration::
|
||||||
|
|
||||||
|
class News(Document):
|
||||||
|
title = StringField()
|
||||||
|
content = StringField()
|
||||||
|
is_active = BooleanField()
|
||||||
|
|
||||||
|
meta = {'indexes': [
|
||||||
|
{'fields': ['$title', "$content"],
|
||||||
|
'default_language': 'english',
|
||||||
|
'weights': {'title': 10, 'content': 2}
|
||||||
|
}
|
||||||
|
]}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Querying
|
||||||
|
========
|
||||||
|
|
||||||
|
Saving a document::
|
||||||
|
|
||||||
|
News(title="Using mongodb text search",
|
||||||
|
content="Testing text search").save()
|
||||||
|
|
||||||
|
News(title="MongoEngine 0.9 released",
|
||||||
|
content="Various improvements").save()
|
||||||
|
|
||||||
|
Next, start a text search using :attr:`QuerySet.search_text` method::
|
||||||
|
|
||||||
|
document = News.objects.search_text('testing').first()
|
||||||
|
document.title # may be: "Using mongodb text search"
|
||||||
|
|
||||||
|
document = News.objects.search_text('released').first()
|
||||||
|
document.title # may be: "MongoEngine 0.9 released"
|
||||||
|
|
||||||
|
|
||||||
|
Ordering by text score
|
||||||
|
======================
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
objects = News.objects.search_text('mongo').order_by('$text_score')
|
||||||
123
docs/guide/validation.rst
Normal file
123
docs/guide/validation.rst
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
====================
|
||||||
|
Document Validation
|
||||||
|
====================
|
||||||
|
|
||||||
|
By design, MongoEngine strictly validates the documents right before they are inserted in MongoDB
|
||||||
|
and makes sure they are consistent with the fields defined in your models.
|
||||||
|
|
||||||
|
MongoEngine makes the assumption that the documents that exists in the DB are compliant with the schema.
|
||||||
|
This means that Mongoengine will not validate a document when an object is loaded from the DB into an instance
|
||||||
|
of your model but this operation may fail under some circumstances (e.g. if there is a field in
|
||||||
|
the document fetched from the database that is not defined in your model).
|
||||||
|
|
||||||
|
|
||||||
|
Built-in validation
|
||||||
|
===================
|
||||||
|
|
||||||
|
Mongoengine provides different fields that encapsulate the corresponding validation
|
||||||
|
out of the box. Validation runs when calling `.validate()` or `.save()`
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
from mongoengine import Document, EmailField
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
email = EmailField()
|
||||||
|
age = IntField(min_value=0, max_value=99)
|
||||||
|
|
||||||
|
user = User(email='invalid@', age=24)
|
||||||
|
user.validate() # raises ValidationError (Invalid email address: ['email'])
|
||||||
|
user.save() # raises ValidationError (Invalid email address: ['email'])
|
||||||
|
|
||||||
|
user2 = User(email='john.doe@garbage.com', age=1000)
|
||||||
|
user2.save() # raises ValidationError (Integer value is too large: ['age'])
|
||||||
|
|
||||||
|
Custom validation
|
||||||
|
=================
|
||||||
|
|
||||||
|
The following feature can be used to customize the validation:
|
||||||
|
|
||||||
|
* Field `validation` parameter
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
def not_john_doe(name):
|
||||||
|
if name == 'John Doe':
|
||||||
|
raise ValidationError("John Doe is not a valid name")
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
full_name = StringField(validation=not_john_doe)
|
||||||
|
|
||||||
|
Person(full_name='Billy Doe').save()
|
||||||
|
Person(full_name='John Doe').save() # raises ValidationError (John Doe is not a valid name)
|
||||||
|
|
||||||
|
|
||||||
|
* Document `clean` method
|
||||||
|
|
||||||
|
This method is called as part of :meth:`~mongoengine.document.Document.save` and should be used to provide
|
||||||
|
custom model validation and/or to modify some of the field values prior to validation.
|
||||||
|
For instance, you could use it to automatically provide a value for a field, or to do validation
|
||||||
|
that requires access to more than a single field.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class Essay(Document):
|
||||||
|
status = StringField(choices=('Published', 'Draft'), required=True)
|
||||||
|
pub_date = DateTimeField()
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
# Validate that only published essays have a `pub_date`
|
||||||
|
if self.status == 'Draft' and self.pub_date is not None:
|
||||||
|
raise ValidationError('Draft entries should not have a publication date.')
|
||||||
|
# Set the pub_date for published items if not set.
|
||||||
|
if self.status == 'Published' and self.pub_date is None:
|
||||||
|
self.pub_date = datetime.now()
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
Cleaning is only called if validation is turned on and when calling
|
||||||
|
:meth:`~mongoengine.Document.save`.
|
||||||
|
|
||||||
|
* Adding custom Field classes
|
||||||
|
|
||||||
|
We recommend as much as possible to use fields provided by MongoEngine. However, it is also possible
|
||||||
|
to subclass a Field and encapsulate some validation by overriding the `validate` method
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class AgeField(IntField):
|
||||||
|
|
||||||
|
def validate(self, value):
|
||||||
|
super(AgeField, self).validate(value) # let IntField.validate run first
|
||||||
|
if value == 60:
|
||||||
|
self.error('60 is not allowed')
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
age = AgeField(min_value=0, max_value=99)
|
||||||
|
|
||||||
|
Person(age=20).save() # passes
|
||||||
|
Person(age=1000).save() # raises ValidationError (Integer value is too large: ['age'])
|
||||||
|
Person(age=60).save() # raises ValidationError (Person:None) (60 is not allowed: ['age'])
|
||||||
|
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
When overriding `validate`, use `self.error("your-custom-error")` instead of raising ValidationError explicitly,
|
||||||
|
it will provide a better context with the error message
|
||||||
|
|
||||||
|
Skipping validation
|
||||||
|
====================
|
||||||
|
|
||||||
|
Although discouraged as it allows to violate fields constraints, if for some reason you need to disable
|
||||||
|
the validation and cleaning of a document when you call :meth:`~mongoengine.document.Document.save`, you can use `.save(validate=False)`.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
age = IntField(max_value=100)
|
||||||
|
|
||||||
|
Person(age=1000).save() # raises ValidationError (Integer value is too large)
|
||||||
|
|
||||||
|
Person(age=1000).save(validate=False)
|
||||||
|
person = Person.objects.first()
|
||||||
|
assert person.age == 1000
|
||||||
|
|
||||||
@@ -7,14 +7,14 @@ MongoDB. To install it, simply run
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ pip install -U mongoengine
|
$ python -m pip install -U mongoengine
|
||||||
|
|
||||||
:doc:`tutorial`
|
:doc:`tutorial`
|
||||||
A quick tutorial building a tumblelog to get you up and running with
|
A quick tutorial building a tumblelog to get you up and running with
|
||||||
MongoEngine.
|
MongoEngine.
|
||||||
|
|
||||||
:doc:`guide/index`
|
:doc:`guide/index`
|
||||||
The Full guide to MongoEngine - from modeling documents to storing files,
|
The Full guide to MongoEngine --- from modeling documents to storing files,
|
||||||
from querying for data to firing signals and *everything* between.
|
from querying for data to firing signals and *everything* between.
|
||||||
|
|
||||||
:doc:`apireference`
|
:doc:`apireference`
|
||||||
@@ -23,9 +23,18 @@ MongoDB. To install it, simply run
|
|||||||
:doc:`upgrade`
|
:doc:`upgrade`
|
||||||
How to upgrade MongoEngine.
|
How to upgrade MongoEngine.
|
||||||
|
|
||||||
|
:doc:`faq`
|
||||||
|
Frequently Asked Questions
|
||||||
|
|
||||||
:doc:`django`
|
:doc:`django`
|
||||||
Using MongoEngine and Django
|
Using MongoEngine and Django
|
||||||
|
|
||||||
|
MongoDB and driver support
|
||||||
|
--------------------------
|
||||||
|
|
||||||
|
MongoEngine is based on the PyMongo driver and tested against multiple versions of MongoDB.
|
||||||
|
For further details, please refer to the `readme <https://github.com/MongoEngine/mongoengine#mongoengine>`_.
|
||||||
|
|
||||||
Community
|
Community
|
||||||
---------
|
---------
|
||||||
|
|
||||||
@@ -73,6 +82,7 @@ formats for offline reading.
|
|||||||
apireference
|
apireference
|
||||||
changelog
|
changelog
|
||||||
upgrade
|
upgrade
|
||||||
|
faq
|
||||||
django
|
django
|
||||||
|
|
||||||
Indices and tables
|
Indices and tables
|
||||||
@@ -81,4 +91,3 @@ Indices and tables
|
|||||||
* :ref:`genindex`
|
* :ref:`genindex`
|
||||||
* :ref:`modindex`
|
* :ref:`modindex`
|
||||||
* :ref:`search`
|
* :ref:`search`
|
||||||
|
|
||||||
|
|||||||
3
docs/requirements.txt
Normal file
3
docs/requirements.txt
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
pymongo>=3.11
|
||||||
|
Sphinx==3.2.1
|
||||||
|
sphinx-rtd-theme==0.5.0
|
||||||
@@ -3,11 +3,10 @@ Tutorial
|
|||||||
========
|
========
|
||||||
|
|
||||||
This tutorial introduces **MongoEngine** by means of example --- we will walk
|
This tutorial introduces **MongoEngine** by means of example --- we will walk
|
||||||
through how to create a simple **Tumblelog** application. A Tumblelog is a type
|
through how to create a simple **Tumblelog** application. A tumblelog is a
|
||||||
of blog where posts are not constrained to being conventional text-based posts.
|
blog that supports mixed media content, including text, images, links, video,
|
||||||
As well as text-based entries, users may post images, links, videos, etc. For
|
audio, etc. For simplicity's sake, we'll stick to text, image, and link
|
||||||
simplicity's sake, we'll stick to text, image and link entries in our
|
entries. As the purpose of this tutorial is to introduce MongoEngine, we'll
|
||||||
application. As the purpose of this tutorial is to introduce MongoEngine, we'll
|
|
||||||
focus on the data-modelling side of the application, leaving out a user
|
focus on the data-modelling side of the application, leaving out a user
|
||||||
interface.
|
interface.
|
||||||
|
|
||||||
@@ -16,14 +15,14 @@ Getting started
|
|||||||
|
|
||||||
Before we start, make sure that a copy of MongoDB is running in an accessible
|
Before we start, make sure that a copy of MongoDB is running in an accessible
|
||||||
location --- running it locally will be easier, but if that is not an option
|
location --- running it locally will be easier, but if that is not an option
|
||||||
then it may be run on a remote server. If you haven't installed mongoengine,
|
then it may be run on a remote server. If you haven't installed MongoEngine,
|
||||||
simply use pip to install it like so::
|
simply use pip to install it like so::
|
||||||
|
|
||||||
$ pip install mongoengine
|
$ python -m pip install mongoengine
|
||||||
|
|
||||||
Before we can start using MongoEngine, we need to tell it how to connect to our
|
Before we can start using MongoEngine, we need to tell it how to connect to our
|
||||||
instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect`
|
instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect`
|
||||||
function. If running locally the only argument we need to provide is the name
|
function. If running locally, the only argument we need to provide is the name
|
||||||
of the MongoDB database to use::
|
of the MongoDB database to use::
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
@@ -39,18 +38,18 @@ Defining our documents
|
|||||||
MongoDB is *schemaless*, which means that no schema is enforced by the database
|
MongoDB is *schemaless*, which means that no schema is enforced by the database
|
||||||
--- we may add and remove fields however we want and MongoDB won't complain.
|
--- we may add and remove fields however we want and MongoDB won't complain.
|
||||||
This makes life a lot easier in many regards, especially when there is a change
|
This makes life a lot easier in many regards, especially when there is a change
|
||||||
to the data model. However, defining schemata for our documents can help to
|
to the data model. However, defining schemas for our documents can help to iron
|
||||||
iron out bugs involving incorrect types or missing fields, and also allow us to
|
out bugs involving incorrect types or missing fields, and also allow us to
|
||||||
define utility methods on our documents in the same way that traditional
|
define utility methods on our documents in the same way that traditional
|
||||||
:abbr:`ORMs (Object-Relational Mappers)` do.
|
:abbr:`ORMs (Object-Relational Mappers)` do.
|
||||||
|
|
||||||
In our Tumblelog application we need to store several different types of
|
In our Tumblelog application we need to store several different types of
|
||||||
information. We will need to have a collection of **users**, so that we may
|
information. We will need to have a collection of **users**, so that we may
|
||||||
link posts to an individual. We also need to store our different types of
|
link posts to an individual. We also need to store our different types of
|
||||||
**posts** (eg: text, image and link) in the database. To aid navigation of our
|
**posts** (eg: text, image and link) in the database. To aid navigation of our
|
||||||
Tumblelog, posts may have **tags** associated with them, so that the list of
|
Tumblelog, posts may have **tags** associated with them, so that the list of
|
||||||
posts shown to the user may be limited to posts that have been assigned a
|
posts shown to the user may be limited to posts that have been assigned a
|
||||||
specific tag. Finally, it would be nice if **comments** could be added to
|
specific tag. Finally, it would be nice if **comments** could be added to
|
||||||
posts. We'll start with **users**, as the other document models are slightly
|
posts. We'll start with **users**, as the other document models are slightly
|
||||||
more involved.
|
more involved.
|
||||||
|
|
||||||
@@ -65,7 +64,7 @@ which fields a :class:`User` may have, and what types of data they might store::
|
|||||||
first_name = StringField(max_length=50)
|
first_name = StringField(max_length=50)
|
||||||
last_name = StringField(max_length=50)
|
last_name = StringField(max_length=50)
|
||||||
|
|
||||||
This looks similar to how a the structure of a table would be defined in a
|
This looks similar to how the structure of a table would be defined in a
|
||||||
regular ORM. The key difference is that this schema will never be passed on to
|
regular ORM. The key difference is that this schema will never be passed on to
|
||||||
MongoDB --- this will only be enforced at the application level, making future
|
MongoDB --- this will only be enforced at the application level, making future
|
||||||
changes easy to manage. Also, the User documents will be stored in a
|
changes easy to manage. Also, the User documents will be stored in a
|
||||||
@@ -78,7 +77,7 @@ Now we'll think about how to store the rest of the information. If we were
|
|||||||
using a relational database, we would most likely have a table of **posts**, a
|
using a relational database, we would most likely have a table of **posts**, a
|
||||||
table of **comments** and a table of **tags**. To associate the comments with
|
table of **comments** and a table of **tags**. To associate the comments with
|
||||||
individual posts, we would put a column in the comments table that contained a
|
individual posts, we would put a column in the comments table that contained a
|
||||||
foreign key to the posts table. We'd also need a link table to provide the
|
foreign key to the posts table. We'd also need a link table to provide the
|
||||||
many-to-many relationship between posts and tags. Then we'd need to address the
|
many-to-many relationship between posts and tags. Then we'd need to address the
|
||||||
problem of storing the specialised post-types (text, image and link). There are
|
problem of storing the specialised post-types (text, image and link). There are
|
||||||
several ways we can achieve this, but each of them have their problems --- none
|
several ways we can achieve this, but each of them have their problems --- none
|
||||||
@@ -87,7 +86,7 @@ of them stand out as particularly intuitive solutions.
|
|||||||
Posts
|
Posts
|
||||||
^^^^^
|
^^^^^
|
||||||
|
|
||||||
Happily mongoDB *isn't* a relational database, so we're not going to do it that
|
Happily MongoDB *isn't* a relational database, so we're not going to do it that
|
||||||
way. As it turns out, we can use MongoDB's schemaless nature to provide us with
|
way. As it turns out, we can use MongoDB's schemaless nature to provide us with
|
||||||
a much nicer solution. We will store all of the posts in *one collection* and
|
a much nicer solution. We will store all of the posts in *one collection* and
|
||||||
each post type will only store the fields it needs. If we later want to add
|
each post type will only store the fields it needs. If we later want to add
|
||||||
@@ -96,7 +95,7 @@ using* the new fields we need to support video posts. This fits with the
|
|||||||
Object-Oriented principle of *inheritance* nicely. We can think of
|
Object-Oriented principle of *inheritance* nicely. We can think of
|
||||||
:class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and
|
:class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and
|
||||||
:class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports
|
:class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports
|
||||||
this kind of modelling out of the box --- all you need do is turn on inheritance
|
this kind of modeling out of the box --- all you need do is turn on inheritance
|
||||||
by setting :attr:`allow_inheritance` to True in the :attr:`meta`::
|
by setting :attr:`allow_inheritance` to True in the :attr:`meta`::
|
||||||
|
|
||||||
class Post(Document):
|
class Post(Document):
|
||||||
@@ -128,8 +127,8 @@ link table, we can just store a list of tags in each post. So, for both
|
|||||||
efficiency and simplicity's sake, we'll store the tags as strings directly
|
efficiency and simplicity's sake, we'll store the tags as strings directly
|
||||||
within the post, rather than storing references to tags in a separate
|
within the post, rather than storing references to tags in a separate
|
||||||
collection. Especially as tags are generally very short (often even shorter
|
collection. Especially as tags are generally very short (often even shorter
|
||||||
than a document's id), this denormalisation won't impact very strongly on the
|
than a document's id), this denormalization won't impact the size of the
|
||||||
size of our database. So let's take a look that the code our modified
|
database very strongly. Let's take a look at the code of our modified
|
||||||
:class:`Post` class::
|
:class:`Post` class::
|
||||||
|
|
||||||
class Post(Document):
|
class Post(Document):
|
||||||
@@ -141,7 +140,7 @@ The :class:`~mongoengine.fields.ListField` object that is used to define a Post'
|
|||||||
takes a field object as its first argument --- this means that you can have
|
takes a field object as its first argument --- this means that you can have
|
||||||
lists of any type of field (including lists).
|
lists of any type of field (including lists).
|
||||||
|
|
||||||
.. note:: We don't need to modify the specialised post types as they all
|
.. note:: We don't need to modify the specialized post types as they all
|
||||||
inherit from :class:`Post`.
|
inherit from :class:`Post`.
|
||||||
|
|
||||||
Comments
|
Comments
|
||||||
@@ -149,12 +148,12 @@ Comments
|
|||||||
|
|
||||||
A comment is typically associated with *one* post. In a relational database, to
|
A comment is typically associated with *one* post. In a relational database, to
|
||||||
display a post with its comments, we would have to retrieve the post from the
|
display a post with its comments, we would have to retrieve the post from the
|
||||||
database, then query the database again for the comments associated with the
|
database and then query the database again for the comments associated with the
|
||||||
post. This works, but there is no real reason to be storing the comments
|
post. This works, but there is no real reason to be storing the comments
|
||||||
separately from their associated posts, other than to work around the
|
separately from their associated posts, other than to work around the
|
||||||
relational model. Using MongoDB we can store the comments as a list of
|
relational model. Using MongoDB we can store the comments as a list of
|
||||||
*embedded documents* directly on a post document. An embedded document should
|
*embedded documents* directly on a post document. An embedded document should
|
||||||
be treated no differently that a regular document; it just doesn't have its own
|
be treated no differently than a regular document; it just doesn't have its own
|
||||||
collection in the database. Using MongoEngine, we can define the structure of
|
collection in the database. Using MongoEngine, we can define the structure of
|
||||||
embedded documents, along with utility methods, in exactly the same way we do
|
embedded documents, along with utility methods, in exactly the same way we do
|
||||||
with regular documents::
|
with regular documents::
|
||||||
@@ -207,7 +206,10 @@ object::
|
|||||||
ross.last_name = 'Lawley'
|
ross.last_name = 'Lawley'
|
||||||
ross.save()
|
ross.save()
|
||||||
|
|
||||||
Now that we've got our user in the database, let's add a couple of posts::
|
Assign another user to a variable called ``john``, just like we did above with
|
||||||
|
``ross``.
|
||||||
|
|
||||||
|
Now that we've got our users in the database, let's add a couple of posts::
|
||||||
|
|
||||||
post1 = TextPost(title='Fun with MongoEngine', author=john)
|
post1 = TextPost(title='Fun with MongoEngine', author=john)
|
||||||
post1.content = 'Took a look at MongoEngine today, looks pretty cool.'
|
post1.content = 'Took a look at MongoEngine today, looks pretty cool.'
|
||||||
@@ -219,8 +221,8 @@ Now that we've got our user in the database, let's add a couple of posts::
|
|||||||
post2.tags = ['mongoengine']
|
post2.tags = ['mongoengine']
|
||||||
post2.save()
|
post2.save()
|
||||||
|
|
||||||
.. note:: If you change a field on a object that has already been saved, then
|
.. note:: If you change a field on an object that has already been saved and
|
||||||
call :meth:`save` again, the document will be updated.
|
then call :meth:`save` again, the document will be updated.
|
||||||
|
|
||||||
Accessing our data
|
Accessing our data
|
||||||
==================
|
==================
|
||||||
@@ -232,17 +234,17 @@ used to access the documents in the database collection associated with that
|
|||||||
class. So let's see how we can get our posts' titles::
|
class. So let's see how we can get our posts' titles::
|
||||||
|
|
||||||
for post in Post.objects:
|
for post in Post.objects:
|
||||||
print post.title
|
print(post.title)
|
||||||
|
|
||||||
Retrieving type-specific information
|
Retrieving type-specific information
|
||||||
------------------------------------
|
------------------------------------
|
||||||
|
|
||||||
This will print the titles of our posts, one on each line. But What if we want
|
This will print the titles of our posts, one on each line. But what if we want
|
||||||
to access the type-specific data (link_url, content, etc.)? One way is simply
|
to access the type-specific data (link_url, content, etc.)? One way is simply
|
||||||
to use the :attr:`objects` attribute of a subclass of :class:`Post`::
|
to use the :attr:`objects` attribute of a subclass of :class:`Post`::
|
||||||
|
|
||||||
for post in TextPost.objects:
|
for post in TextPost.objects:
|
||||||
print post.content
|
print(post.content)
|
||||||
|
|
||||||
Using TextPost's :attr:`objects` attribute only returns documents that were
|
Using TextPost's :attr:`objects` attribute only returns documents that were
|
||||||
created using :class:`TextPost`. Actually, there is a more general rule here:
|
created using :class:`TextPost`. Actually, there is a more general rule here:
|
||||||
@@ -259,16 +261,14 @@ instances of :class:`Post` --- they were instances of the subclass of
|
|||||||
practice::
|
practice::
|
||||||
|
|
||||||
for post in Post.objects:
|
for post in Post.objects:
|
||||||
print post.title
|
print(post.title)
|
||||||
print '=' * len(post.title)
|
print('=' * len(post.title))
|
||||||
|
|
||||||
if isinstance(post, TextPost):
|
if isinstance(post, TextPost):
|
||||||
print post.content
|
print(post.content)
|
||||||
|
|
||||||
if isinstance(post, LinkPost):
|
if isinstance(post, LinkPost):
|
||||||
print 'Link:', post.link_url
|
print('Link: {}'.format(post.link_url))
|
||||||
|
|
||||||
print
|
|
||||||
|
|
||||||
This would print the title of each post, followed by the content if it was a
|
This would print the title of each post, followed by the content if it was a
|
||||||
text post, and "Link: <url>" if it was a link post.
|
text post, and "Link: <url>" if it was a link post.
|
||||||
@@ -283,7 +283,7 @@ your query. Let's adjust our query so that only posts with the tag "mongodb"
|
|||||||
are returned::
|
are returned::
|
||||||
|
|
||||||
for post in Post.objects(tags='mongodb'):
|
for post in Post.objects(tags='mongodb'):
|
||||||
print post.title
|
print(post.title)
|
||||||
|
|
||||||
There are also methods available on :class:`~mongoengine.queryset.QuerySet`
|
There are also methods available on :class:`~mongoengine.queryset.QuerySet`
|
||||||
objects that allow different results to be returned, for example, calling
|
objects that allow different results to be returned, for example, calling
|
||||||
@@ -292,11 +292,11 @@ the first matched by the query you provide. Aggregation functions may also be
|
|||||||
used on :class:`~mongoengine.queryset.QuerySet` objects::
|
used on :class:`~mongoengine.queryset.QuerySet` objects::
|
||||||
|
|
||||||
num_posts = Post.objects(tags='mongodb').count()
|
num_posts = Post.objects(tags='mongodb').count()
|
||||||
print 'Found %d posts with tag "mongodb"' % num_posts
|
print('Found {} posts with tag "mongodb"'.format(num_posts))
|
||||||
|
|
||||||
Learning more about mongoengine
|
Learning more about MongoEngine
|
||||||
-------------------------------
|
-------------------------------
|
||||||
|
|
||||||
If you got this far you've made a great start, so well done! The next step on
|
If you got this far you've made a great start, so well done! The next step on
|
||||||
your mongoengine journey is the `full user guide <guide/index.html>`_, where you
|
your MongoEngine journey is the `full user guide <guide/index.html>`_, where
|
||||||
can learn indepth about how to use mongoengine and mongodb.
|
you can learn in-depth about how to use MongoEngine and MongoDB.
|
||||||
|
|||||||
100
docs/upgrade.rst
100
docs/upgrade.rst
@@ -2,6 +2,100 @@
|
|||||||
Upgrading
|
Upgrading
|
||||||
#########
|
#########
|
||||||
|
|
||||||
|
Development
|
||||||
|
***********
|
||||||
|
(Fill this out whenever you introduce breaking changes to MongoEngine)
|
||||||
|
|
||||||
|
URLField's constructor no longer takes `verify_exists`
|
||||||
|
|
||||||
|
0.15.0
|
||||||
|
******
|
||||||
|
|
||||||
|
0.14.0
|
||||||
|
******
|
||||||
|
This release includes a few bug fixes and a significant code cleanup. The most
|
||||||
|
important change is that `QuerySet.as_pymongo` no longer supports a
|
||||||
|
`coerce_types` mode. If you used it in the past, a) please let us know of your
|
||||||
|
use case, b) you'll need to override `as_pymongo` to get the desired outcome.
|
||||||
|
|
||||||
|
This release also makes the EmbeddedDocument not hashable by default. If you
|
||||||
|
use embedded documents in sets or dictionaries, you might have to override
|
||||||
|
`__hash__` and implement a hashing logic specific to your use case. See #1528
|
||||||
|
for the reason behind this change.
|
||||||
|
|
||||||
|
0.13.0
|
||||||
|
******
|
||||||
|
This release adds Unicode support to the `EmailField` and changes its
|
||||||
|
structure significantly. Previously, email addresses containing Unicode
|
||||||
|
characters didn't work at all. Starting with v0.13.0, domains with Unicode
|
||||||
|
characters are supported out of the box, meaning some emails that previously
|
||||||
|
didn't pass validation now do. Make sure the rest of your application can
|
||||||
|
accept such email addresses. Additionally, if you subclassed the `EmailField`
|
||||||
|
in your application and overrode `EmailField.EMAIL_REGEX`, you will have to
|
||||||
|
adjust your code to override `EmailField.USER_REGEX`, `EmailField.DOMAIN_REGEX`,
|
||||||
|
and potentially `EmailField.UTF8_USER_REGEX`.
|
||||||
|
|
||||||
|
0.12.0
|
||||||
|
******
|
||||||
|
This release includes various fixes for the `BaseQuerySet` methods and how they
|
||||||
|
are chained together. Since version 0.10.1 applying limit/skip/hint/batch_size
|
||||||
|
to an already-existing queryset wouldn't modify the underlying PyMongo cursor.
|
||||||
|
This has been fixed now, so you'll need to make sure that your code didn't rely
|
||||||
|
on the broken implementation.
|
||||||
|
|
||||||
|
Additionally, a public `BaseQuerySet.clone_into` has been renamed to a private
|
||||||
|
`_clone_into`. If you directly used that method in your code, you'll need to
|
||||||
|
rename its occurrences.
|
||||||
|
|
||||||
|
0.11.0
|
||||||
|
******
|
||||||
|
This release includes a major rehaul of MongoEngine's code quality and
|
||||||
|
introduces a few breaking changes. It also touches many different parts of
|
||||||
|
the package and although all the changes have been tested and scrutinized,
|
||||||
|
you're encouraged to thoroughly test the upgrade.
|
||||||
|
|
||||||
|
First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`.
|
||||||
|
If you import or catch this exception, you'll need to rename it in your code.
|
||||||
|
|
||||||
|
Second breaking change drops Python v2.6 support. If you run MongoEngine on
|
||||||
|
that Python version, you'll need to upgrade it first.
|
||||||
|
|
||||||
|
Third breaking change drops an old backward compatibility measure where
|
||||||
|
`from mongoengine.base import ErrorClass` would work on top of
|
||||||
|
`from mongoengine.errors import ErrorClass` (where `ErrorClass` is e.g.
|
||||||
|
`ValidationError`). If you import any exceptions from `mongoengine.base`,
|
||||||
|
change it to `mongoengine.errors`.
|
||||||
|
|
||||||
|
0.10.8
|
||||||
|
******
|
||||||
|
This version fixed an issue where specifying a MongoDB URI host would override
|
||||||
|
more information than it should. These changes are minor, but they still
|
||||||
|
subtly modify the connection logic and thus you're encouraged to test your
|
||||||
|
MongoDB connection before shipping v0.10.8 in production.
|
||||||
|
|
||||||
|
0.10.7
|
||||||
|
******
|
||||||
|
|
||||||
|
`QuerySet.aggregate_sum` and `QuerySet.aggregate_average` are dropped. Use
|
||||||
|
`QuerySet.sum` and `QuerySet.average` instead which use the aggreation framework
|
||||||
|
by default from now on.
|
||||||
|
|
||||||
|
0.9.0
|
||||||
|
*****
|
||||||
|
|
||||||
|
The 0.8.7 package on pypi was corrupted. If upgrading from 0.8.7 to 0.9.0 please follow: ::
|
||||||
|
|
||||||
|
python -m pip uninstall pymongo
|
||||||
|
python -m pip uninstall mongoengine
|
||||||
|
python -m pip install pymongo==2.8
|
||||||
|
python -m pip install mongoengine
|
||||||
|
|
||||||
|
0.8.7
|
||||||
|
*****
|
||||||
|
|
||||||
|
Calling reload on deleted / nonexistent documents now raises a DoesNotExist
|
||||||
|
exception.
|
||||||
|
|
||||||
|
|
||||||
0.8.2 to 0.8.3
|
0.8.2 to 0.8.3
|
||||||
**************
|
**************
|
||||||
@@ -59,7 +153,7 @@ inherited classes like so: ::
|
|||||||
|
|
||||||
# 4. Remove indexes
|
# 4. Remove indexes
|
||||||
info = collection.index_information()
|
info = collection.index_information()
|
||||||
indexes_to_drop = [key for key, value in info.iteritems()
|
indexes_to_drop = [key for key, value in info.items()
|
||||||
if '_types' in dict(value['key'])]
|
if '_types' in dict(value['key'])]
|
||||||
for index in indexes_to_drop:
|
for index in indexes_to_drop:
|
||||||
collection.drop_index(index)
|
collection.drop_index(index)
|
||||||
@@ -257,7 +351,7 @@ update your code like so: ::
|
|||||||
[m for m in mammals] # This will return all carnivores
|
[m for m in mammals] # This will return all carnivores
|
||||||
|
|
||||||
Len iterates the queryset
|
Len iterates the queryset
|
||||||
--------------------------
|
-------------------------
|
||||||
|
|
||||||
If you ever did `len(queryset)` it previously did a `count()` under the covers,
|
If you ever did `len(queryset)` it previously did a `count()` under the covers,
|
||||||
this caused some unusual issues. As `len(queryset)` is most often used by
|
this caused some unusual issues. As `len(queryset)` is most often used by
|
||||||
@@ -270,7 +364,7 @@ queryset you should upgrade to use count::
|
|||||||
len(Animal.objects(type="mammal"))
|
len(Animal.objects(type="mammal"))
|
||||||
|
|
||||||
# New code
|
# New code
|
||||||
Animal.objects(type="mammal").count())
|
Animal.objects(type="mammal").count()
|
||||||
|
|
||||||
|
|
||||||
.only() now inline with .exclude()
|
.only() now inline with .exclude()
|
||||||
|
|||||||
@@ -1,26 +1,42 @@
|
|||||||
import document
|
# Import submodules so that we can expose their __all__
|
||||||
from document import *
|
from mongoengine import connection
|
||||||
import fields
|
from mongoengine import document
|
||||||
from fields import *
|
from mongoengine import errors
|
||||||
import connection
|
from mongoengine import fields
|
||||||
from connection import *
|
from mongoengine import queryset
|
||||||
import queryset
|
from mongoengine import signals
|
||||||
from queryset import *
|
|
||||||
import signals
|
|
||||||
from signals import *
|
|
||||||
from errors import *
|
|
||||||
import errors
|
|
||||||
import django
|
|
||||||
|
|
||||||
__all__ = (list(document.__all__) + fields.__all__ + connection.__all__ +
|
# Import everything from each submodule so that it can be accessed via
|
||||||
list(queryset.__all__) + signals.__all__ + list(errors.__all__))
|
# mongoengine, e.g. instead of `from mongoengine.connection import connect`,
|
||||||
|
# users can simply use `from mongoengine import connect`, or even
|
||||||
|
# `from mongoengine import *` and then `connect('testdb')`.
|
||||||
|
from mongoengine.connection import *
|
||||||
|
from mongoengine.document import *
|
||||||
|
from mongoengine.errors import *
|
||||||
|
from mongoengine.fields import *
|
||||||
|
from mongoengine.queryset import *
|
||||||
|
from mongoengine.signals import *
|
||||||
|
|
||||||
VERSION = (0, 8, 5)
|
|
||||||
|
__all__ = (
|
||||||
|
list(document.__all__)
|
||||||
|
+ list(fields.__all__)
|
||||||
|
+ list(connection.__all__)
|
||||||
|
+ list(queryset.__all__)
|
||||||
|
+ list(signals.__all__)
|
||||||
|
+ list(errors.__all__)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
VERSION = (0, 21, 0)
|
||||||
|
|
||||||
|
|
||||||
def get_version():
|
def get_version():
|
||||||
if isinstance(VERSION[-1], basestring):
|
"""Return the VERSION as a string.
|
||||||
return '.'.join(map(str, VERSION[:-1])) + VERSION[-1]
|
|
||||||
return '.'.join(map(str, VERSION))
|
For example, if `VERSION == (0, 10, 7)`, return '0.10.7'.
|
||||||
|
"""
|
||||||
|
return ".".join(map(str, VERSION))
|
||||||
|
|
||||||
|
|
||||||
__version__ = get_version()
|
__version__ = get_version()
|
||||||
|
|||||||
@@ -1,8 +1,33 @@
|
|||||||
|
# Base module is split into several files for convenience. Files inside of
|
||||||
|
# this module should import from a specific submodule (e.g.
|
||||||
|
# `from mongoengine.base.document import BaseDocument`), but all of the
|
||||||
|
# other modules should import directly from the top-level module (e.g.
|
||||||
|
# `from mongoengine.base import BaseDocument`). This approach is cleaner and
|
||||||
|
# also helps with cyclical import errors.
|
||||||
from mongoengine.base.common import *
|
from mongoengine.base.common import *
|
||||||
from mongoengine.base.datastructures import *
|
from mongoengine.base.datastructures import *
|
||||||
from mongoengine.base.document import *
|
from mongoengine.base.document import *
|
||||||
from mongoengine.base.fields import *
|
from mongoengine.base.fields import *
|
||||||
from mongoengine.base.metaclasses import *
|
from mongoengine.base.metaclasses import *
|
||||||
|
|
||||||
# Help with backwards compatibility
|
__all__ = (
|
||||||
from mongoengine.errors import *
|
# common
|
||||||
|
"UPDATE_OPERATORS",
|
||||||
|
"_document_registry",
|
||||||
|
"get_document",
|
||||||
|
# datastructures
|
||||||
|
"BaseDict",
|
||||||
|
"BaseList",
|
||||||
|
"EmbeddedDocumentList",
|
||||||
|
"LazyReference",
|
||||||
|
# document
|
||||||
|
"BaseDocument",
|
||||||
|
# fields
|
||||||
|
"BaseField",
|
||||||
|
"ComplexBaseField",
|
||||||
|
"ObjectIdField",
|
||||||
|
"GeoJsonBaseField",
|
||||||
|
# metaclasses
|
||||||
|
"DocumentMetaclass",
|
||||||
|
"TopLevelDocumentMetaclass",
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,26 +1,62 @@
|
|||||||
from mongoengine.errors import NotRegistered
|
from mongoengine.errors import NotRegistered
|
||||||
|
|
||||||
__all__ = ('ALLOW_INHERITANCE', 'get_document', '_document_registry')
|
__all__ = ("UPDATE_OPERATORS", "get_document", "_document_registry")
|
||||||
|
|
||||||
|
|
||||||
|
UPDATE_OPERATORS = {
|
||||||
|
"set",
|
||||||
|
"unset",
|
||||||
|
"inc",
|
||||||
|
"dec",
|
||||||
|
"mul",
|
||||||
|
"pop",
|
||||||
|
"push",
|
||||||
|
"push_all",
|
||||||
|
"pull",
|
||||||
|
"pull_all",
|
||||||
|
"add_to_set",
|
||||||
|
"set_on_insert",
|
||||||
|
"min",
|
||||||
|
"max",
|
||||||
|
"rename",
|
||||||
|
}
|
||||||
|
|
||||||
ALLOW_INHERITANCE = False
|
|
||||||
|
|
||||||
_document_registry = {}
|
_document_registry = {}
|
||||||
|
|
||||||
|
|
||||||
def get_document(name):
|
def get_document(name):
|
||||||
|
"""Get a registered Document class by name."""
|
||||||
doc = _document_registry.get(name, None)
|
doc = _document_registry.get(name, None)
|
||||||
if not doc:
|
if not doc:
|
||||||
# Possible old style name
|
# Possible old style name
|
||||||
single_end = name.split('.')[-1]
|
single_end = name.split(".")[-1]
|
||||||
compound_end = '.%s' % single_end
|
compound_end = ".%s" % single_end
|
||||||
possible_match = [k for k in _document_registry.keys()
|
possible_match = [
|
||||||
if k.endswith(compound_end) or k == single_end]
|
k for k in _document_registry if k.endswith(compound_end) or k == single_end
|
||||||
|
]
|
||||||
if len(possible_match) == 1:
|
if len(possible_match) == 1:
|
||||||
doc = _document_registry.get(possible_match.pop(), None)
|
doc = _document_registry.get(possible_match.pop(), None)
|
||||||
if not doc:
|
if not doc:
|
||||||
raise NotRegistered("""
|
raise NotRegistered(
|
||||||
|
"""
|
||||||
`%s` has not been registered in the document registry.
|
`%s` has not been registered in the document registry.
|
||||||
Importing the document class automatically registers it, has it
|
Importing the document class automatically registers it, has it
|
||||||
been imported?
|
been imported?
|
||||||
""".strip() % name)
|
""".strip()
|
||||||
|
% name
|
||||||
|
)
|
||||||
return doc
|
return doc
|
||||||
|
|
||||||
|
|
||||||
|
def _get_documents_by_db(connection_alias, default_connection_alias):
|
||||||
|
"""Get all registered Documents class attached to a given database"""
|
||||||
|
|
||||||
|
def get_doc_alias(doc_cls):
|
||||||
|
return doc_cls._meta.get("db_alias", default_connection_alias)
|
||||||
|
|
||||||
|
return [
|
||||||
|
doc_cls
|
||||||
|
for doc_cls in _document_registry.values()
|
||||||
|
if get_doc_alias(doc_cls) == connection_alias
|
||||||
|
]
|
||||||
|
|||||||
@@ -1,50 +1,81 @@
|
|||||||
import weakref
|
import weakref
|
||||||
from mongoengine.common import _import_class
|
|
||||||
|
|
||||||
__all__ = ("BaseDict", "BaseList")
|
from bson import DBRef
|
||||||
|
|
||||||
|
from mongoengine.common import _import_class
|
||||||
|
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"BaseDict",
|
||||||
|
"StrictDict",
|
||||||
|
"BaseList",
|
||||||
|
"EmbeddedDocumentList",
|
||||||
|
"LazyReference",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def mark_as_changed_wrapper(parent_method):
|
||||||
|
"""Decorator that ensures _mark_as_changed method gets called."""
|
||||||
|
|
||||||
|
def wrapper(self, *args, **kwargs):
|
||||||
|
# Can't use super() in the decorator.
|
||||||
|
result = parent_method(self, *args, **kwargs)
|
||||||
|
self._mark_as_changed()
|
||||||
|
return result
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def mark_key_as_changed_wrapper(parent_method):
|
||||||
|
"""Decorator that ensures _mark_as_changed method gets called with the key argument"""
|
||||||
|
|
||||||
|
def wrapper(self, key, *args, **kwargs):
|
||||||
|
# Can't use super() in the decorator.
|
||||||
|
result = parent_method(self, key, *args, **kwargs)
|
||||||
|
self._mark_as_changed(key)
|
||||||
|
return result
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
class BaseDict(dict):
|
class BaseDict(dict):
|
||||||
"""A special dict so we can watch any changes
|
"""A special dict so we can watch any changes."""
|
||||||
"""
|
|
||||||
|
|
||||||
_dereferenced = False
|
_dereferenced = False
|
||||||
_instance = None
|
_instance = None
|
||||||
_name = None
|
_name = None
|
||||||
|
|
||||||
def __init__(self, dict_items, instance, name):
|
def __init__(self, dict_items, instance, name):
|
||||||
Document = _import_class('Document')
|
BaseDocument = _import_class("BaseDocument")
|
||||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
|
||||||
|
|
||||||
if isinstance(instance, (Document, EmbeddedDocument)):
|
if isinstance(instance, BaseDocument):
|
||||||
self._instance = weakref.proxy(instance)
|
self._instance = weakref.proxy(instance)
|
||||||
self._name = name
|
self._name = name
|
||||||
return super(BaseDict, self).__init__(dict_items)
|
super().__init__(dict_items)
|
||||||
|
|
||||||
def __getitem__(self, *args, **kwargs):
|
def get(self, key, default=None):
|
||||||
value = super(BaseDict, self).__getitem__(*args, **kwargs)
|
# get does not use __getitem__ by default so we must override it as well
|
||||||
|
try:
|
||||||
|
return self.__getitem__(key)
|
||||||
|
except KeyError:
|
||||||
|
return default
|
||||||
|
|
||||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
def __getitem__(self, key):
|
||||||
|
value = super().__getitem__(key)
|
||||||
|
|
||||||
|
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||||
value._instance = self._instance
|
value._instance = self._instance
|
||||||
|
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||||
|
value = BaseDict(value, None, "{}.{}".format(self._name, key))
|
||||||
|
super().__setitem__(key, value)
|
||||||
|
value._instance = self._instance
|
||||||
|
elif isinstance(value, list) and not isinstance(value, BaseList):
|
||||||
|
value = BaseList(value, None, "{}.{}".format(self._name, key))
|
||||||
|
super().__setitem__(key, value)
|
||||||
|
value._instance = self._instance
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def __setitem__(self, *args, **kwargs):
|
|
||||||
self._mark_as_changed()
|
|
||||||
return super(BaseDict, self).__setitem__(*args, **kwargs)
|
|
||||||
|
|
||||||
def __delete__(self, *args, **kwargs):
|
|
||||||
self._mark_as_changed()
|
|
||||||
return super(BaseDict, self).__delete__(*args, **kwargs)
|
|
||||||
|
|
||||||
def __delitem__(self, *args, **kwargs):
|
|
||||||
self._mark_as_changed()
|
|
||||||
return super(BaseDict, self).__delitem__(*args, **kwargs)
|
|
||||||
|
|
||||||
def __delattr__(self, *args, **kwargs):
|
|
||||||
self._mark_as_changed()
|
|
||||||
return super(BaseDict, self).__delattr__(*args, **kwargs)
|
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
self.instance = None
|
self.instance = None
|
||||||
self._dereferenced = False
|
self._dereferenced = False
|
||||||
@@ -54,67 +85,66 @@ class BaseDict(dict):
|
|||||||
self = state
|
self = state
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def clear(self, *args, **kwargs):
|
__setitem__ = mark_key_as_changed_wrapper(dict.__setitem__)
|
||||||
self._mark_as_changed()
|
__delattr__ = mark_key_as_changed_wrapper(dict.__delattr__)
|
||||||
return super(BaseDict, self).clear(*args, **kwargs)
|
__delitem__ = mark_key_as_changed_wrapper(dict.__delitem__)
|
||||||
|
pop = mark_as_changed_wrapper(dict.pop)
|
||||||
|
clear = mark_as_changed_wrapper(dict.clear)
|
||||||
|
update = mark_as_changed_wrapper(dict.update)
|
||||||
|
popitem = mark_as_changed_wrapper(dict.popitem)
|
||||||
|
setdefault = mark_as_changed_wrapper(dict.setdefault)
|
||||||
|
|
||||||
def pop(self, *args, **kwargs):
|
def _mark_as_changed(self, key=None):
|
||||||
self._mark_as_changed()
|
if hasattr(self._instance, "_mark_as_changed"):
|
||||||
return super(BaseDict, self).pop(*args, **kwargs)
|
if key:
|
||||||
|
self._instance._mark_as_changed("{}.{}".format(self._name, key))
|
||||||
def popitem(self, *args, **kwargs):
|
else:
|
||||||
self._mark_as_changed()
|
self._instance._mark_as_changed(self._name)
|
||||||
return super(BaseDict, self).popitem(*args, **kwargs)
|
|
||||||
|
|
||||||
def update(self, *args, **kwargs):
|
|
||||||
self._mark_as_changed()
|
|
||||||
return super(BaseDict, self).update(*args, **kwargs)
|
|
||||||
|
|
||||||
def _mark_as_changed(self):
|
|
||||||
if hasattr(self._instance, '_mark_as_changed'):
|
|
||||||
self._instance._mark_as_changed(self._name)
|
|
||||||
|
|
||||||
|
|
||||||
class BaseList(list):
|
class BaseList(list):
|
||||||
"""A special list so we can watch any changes
|
"""A special list so we can watch any changes."""
|
||||||
"""
|
|
||||||
|
|
||||||
_dereferenced = False
|
_dereferenced = False
|
||||||
_instance = None
|
_instance = None
|
||||||
_name = None
|
_name = None
|
||||||
|
|
||||||
def __init__(self, list_items, instance, name):
|
def __init__(self, list_items, instance, name):
|
||||||
Document = _import_class('Document')
|
BaseDocument = _import_class("BaseDocument")
|
||||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
|
||||||
|
|
||||||
if isinstance(instance, (Document, EmbeddedDocument)):
|
if isinstance(instance, BaseDocument):
|
||||||
self._instance = weakref.proxy(instance)
|
self._instance = weakref.proxy(instance)
|
||||||
self._name = name
|
self._name = name
|
||||||
return super(BaseList, self).__init__(list_items)
|
super().__init__(list_items)
|
||||||
|
|
||||||
def __getitem__(self, *args, **kwargs):
|
def __getitem__(self, key):
|
||||||
value = super(BaseList, self).__getitem__(*args, **kwargs)
|
# change index to positive value because MongoDB does not support negative one
|
||||||
|
if isinstance(key, int) and key < 0:
|
||||||
|
key = len(self) + key
|
||||||
|
value = super().__getitem__(key)
|
||||||
|
|
||||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
if isinstance(key, slice):
|
||||||
|
# When receiving a slice operator, we don't convert the structure and bind
|
||||||
|
# to parent's instance. This is buggy for now but would require more work to be handled properly
|
||||||
|
return value
|
||||||
|
|
||||||
|
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||||
value._instance = self._instance
|
value._instance = self._instance
|
||||||
|
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||||
|
# Replace dict by BaseDict
|
||||||
|
value = BaseDict(value, None, "{}.{}".format(self._name, key))
|
||||||
|
super().__setitem__(key, value)
|
||||||
|
value._instance = self._instance
|
||||||
|
elif isinstance(value, list) and not isinstance(value, BaseList):
|
||||||
|
# Replace list by BaseList
|
||||||
|
value = BaseList(value, None, "{}.{}".format(self._name, key))
|
||||||
|
super().__setitem__(key, value)
|
||||||
|
value._instance = self._instance
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def __setitem__(self, *args, **kwargs):
|
def __iter__(self):
|
||||||
self._mark_as_changed()
|
yield from super().__iter__()
|
||||||
return super(BaseList, self).__setitem__(*args, **kwargs)
|
|
||||||
|
|
||||||
def __delitem__(self, *args, **kwargs):
|
|
||||||
self._mark_as_changed()
|
|
||||||
return super(BaseList, self).__delitem__(*args, **kwargs)
|
|
||||||
|
|
||||||
def __setslice__(self, *args, **kwargs):
|
|
||||||
self._mark_as_changed()
|
|
||||||
return super(BaseList, self).__setslice__(*args, **kwargs)
|
|
||||||
|
|
||||||
def __delslice__(self, *args, **kwargs):
|
|
||||||
self._mark_as_changed()
|
|
||||||
return super(BaseList, self).__delslice__(*args, **kwargs)
|
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
self.instance = None
|
self.instance = None
|
||||||
@@ -125,34 +155,321 @@ class BaseList(list):
|
|||||||
self = state
|
self = state
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def append(self, *args, **kwargs):
|
def __setitem__(self, key, value):
|
||||||
self._mark_as_changed()
|
changed_key = key
|
||||||
return super(BaseList, self).append(*args, **kwargs)
|
if isinstance(key, slice):
|
||||||
|
# In case of slice, we don't bother to identify the exact elements being updated
|
||||||
|
# instead, we simply marks the whole list as changed
|
||||||
|
changed_key = None
|
||||||
|
|
||||||
def extend(self, *args, **kwargs):
|
result = super().__setitem__(key, value)
|
||||||
self._mark_as_changed()
|
self._mark_as_changed(changed_key)
|
||||||
return super(BaseList, self).extend(*args, **kwargs)
|
return result
|
||||||
|
|
||||||
def insert(self, *args, **kwargs):
|
append = mark_as_changed_wrapper(list.append)
|
||||||
self._mark_as_changed()
|
extend = mark_as_changed_wrapper(list.extend)
|
||||||
return super(BaseList, self).insert(*args, **kwargs)
|
insert = mark_as_changed_wrapper(list.insert)
|
||||||
|
pop = mark_as_changed_wrapper(list.pop)
|
||||||
|
remove = mark_as_changed_wrapper(list.remove)
|
||||||
|
reverse = mark_as_changed_wrapper(list.reverse)
|
||||||
|
sort = mark_as_changed_wrapper(list.sort)
|
||||||
|
__delitem__ = mark_as_changed_wrapper(list.__delitem__)
|
||||||
|
__iadd__ = mark_as_changed_wrapper(list.__iadd__)
|
||||||
|
__imul__ = mark_as_changed_wrapper(list.__imul__)
|
||||||
|
|
||||||
def pop(self, *args, **kwargs):
|
def _mark_as_changed(self, key=None):
|
||||||
self._mark_as_changed()
|
if hasattr(self._instance, "_mark_as_changed"):
|
||||||
return super(BaseList, self).pop(*args, **kwargs)
|
if key is not None:
|
||||||
|
self._instance._mark_as_changed(
|
||||||
|
"{}.{}".format(self._name, key % len(self))
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self._instance._mark_as_changed(self._name)
|
||||||
|
|
||||||
def remove(self, *args, **kwargs):
|
|
||||||
self._mark_as_changed()
|
|
||||||
return super(BaseList, self).remove(*args, **kwargs)
|
|
||||||
|
|
||||||
def reverse(self, *args, **kwargs):
|
class EmbeddedDocumentList(BaseList):
|
||||||
self._mark_as_changed()
|
def __init__(self, list_items, instance, name):
|
||||||
return super(BaseList, self).reverse(*args, **kwargs)
|
super().__init__(list_items, instance, name)
|
||||||
|
self._instance = instance
|
||||||
|
|
||||||
def sort(self, *args, **kwargs):
|
@classmethod
|
||||||
self._mark_as_changed()
|
def __match_all(cls, embedded_doc, kwargs):
|
||||||
return super(BaseList, self).sort(*args, **kwargs)
|
"""Return True if a given embedded doc matches all the filter
|
||||||
|
kwargs. If it doesn't return False.
|
||||||
|
"""
|
||||||
|
for key, expected_value in kwargs.items():
|
||||||
|
doc_val = getattr(embedded_doc, key)
|
||||||
|
if doc_val != expected_value and str(doc_val) != expected_value:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
def _mark_as_changed(self):
|
@classmethod
|
||||||
if hasattr(self._instance, '_mark_as_changed'):
|
def __only_matches(cls, embedded_docs, kwargs):
|
||||||
self._instance._mark_as_changed(self._name)
|
"""Return embedded docs that match the filter kwargs."""
|
||||||
|
if not kwargs:
|
||||||
|
return embedded_docs
|
||||||
|
return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)]
|
||||||
|
|
||||||
|
def filter(self, **kwargs):
|
||||||
|
"""
|
||||||
|
Filters the list by only including embedded documents with the
|
||||||
|
given keyword arguments.
|
||||||
|
|
||||||
|
This method only supports simple comparison (e.g. .filter(name='John Doe'))
|
||||||
|
and does not support operators like __gte, __lte, __icontains like queryset.filter does
|
||||||
|
|
||||||
|
:param kwargs: The keyword arguments corresponding to the fields to
|
||||||
|
filter on. *Multiple arguments are treated as if they are ANDed
|
||||||
|
together.*
|
||||||
|
:return: A new ``EmbeddedDocumentList`` containing the matching
|
||||||
|
embedded documents.
|
||||||
|
|
||||||
|
Raises ``AttributeError`` if a given keyword is not a valid field for
|
||||||
|
the embedded document class.
|
||||||
|
"""
|
||||||
|
values = self.__only_matches(self, kwargs)
|
||||||
|
return EmbeddedDocumentList(values, self._instance, self._name)
|
||||||
|
|
||||||
|
def exclude(self, **kwargs):
|
||||||
|
"""
|
||||||
|
Filters the list by excluding embedded documents with the given
|
||||||
|
keyword arguments.
|
||||||
|
|
||||||
|
:param kwargs: The keyword arguments corresponding to the fields to
|
||||||
|
exclude on. *Multiple arguments are treated as if they are ANDed
|
||||||
|
together.*
|
||||||
|
:return: A new ``EmbeddedDocumentList`` containing the non-matching
|
||||||
|
embedded documents.
|
||||||
|
|
||||||
|
Raises ``AttributeError`` if a given keyword is not a valid field for
|
||||||
|
the embedded document class.
|
||||||
|
"""
|
||||||
|
exclude = self.__only_matches(self, kwargs)
|
||||||
|
values = [item for item in self if item not in exclude]
|
||||||
|
return EmbeddedDocumentList(values, self._instance, self._name)
|
||||||
|
|
||||||
|
def count(self):
|
||||||
|
"""
|
||||||
|
The number of embedded documents in the list.
|
||||||
|
|
||||||
|
:return: The length of the list, equivalent to the result of ``len()``.
|
||||||
|
"""
|
||||||
|
return len(self)
|
||||||
|
|
||||||
|
def get(self, **kwargs):
|
||||||
|
"""
|
||||||
|
Retrieves an embedded document determined by the given keyword
|
||||||
|
arguments.
|
||||||
|
|
||||||
|
:param kwargs: The keyword arguments corresponding to the fields to
|
||||||
|
search on. *Multiple arguments are treated as if they are ANDed
|
||||||
|
together.*
|
||||||
|
:return: The embedded document matched by the given keyword arguments.
|
||||||
|
|
||||||
|
Raises ``DoesNotExist`` if the arguments used to query an embedded
|
||||||
|
document returns no results. ``MultipleObjectsReturned`` if more
|
||||||
|
than one result is returned.
|
||||||
|
"""
|
||||||
|
values = self.__only_matches(self, kwargs)
|
||||||
|
if len(values) == 0:
|
||||||
|
raise DoesNotExist("%s matching query does not exist." % self._name)
|
||||||
|
elif len(values) > 1:
|
||||||
|
raise MultipleObjectsReturned(
|
||||||
|
"%d items returned, instead of 1" % len(values)
|
||||||
|
)
|
||||||
|
|
||||||
|
return values[0]
|
||||||
|
|
||||||
|
def first(self):
|
||||||
|
"""Return the first embedded document in the list, or ``None``
|
||||||
|
if empty.
|
||||||
|
"""
|
||||||
|
if len(self) > 0:
|
||||||
|
return self[0]
|
||||||
|
|
||||||
|
def create(self, **values):
|
||||||
|
"""
|
||||||
|
Creates a new instance of the EmbeddedDocument and appends it to this EmbeddedDocumentList.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
the instance of the EmbeddedDocument is not automatically saved to the database.
|
||||||
|
You still need to call .save() on the parent Document.
|
||||||
|
|
||||||
|
:param values: A dictionary of values for the embedded document.
|
||||||
|
:return: The new embedded document instance.
|
||||||
|
"""
|
||||||
|
name = self._name
|
||||||
|
EmbeddedClass = self._instance._fields[name].field.document_type_obj
|
||||||
|
self._instance[self._name].append(EmbeddedClass(**values))
|
||||||
|
|
||||||
|
return self._instance[self._name][-1]
|
||||||
|
|
||||||
|
def save(self, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Saves the ancestor document.
|
||||||
|
|
||||||
|
:param args: Arguments passed up to the ancestor Document's save
|
||||||
|
method.
|
||||||
|
:param kwargs: Keyword arguments passed up to the ancestor Document's
|
||||||
|
save method.
|
||||||
|
"""
|
||||||
|
self._instance.save(*args, **kwargs)
|
||||||
|
|
||||||
|
def delete(self):
|
||||||
|
"""
|
||||||
|
Deletes the embedded documents from the database.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
The embedded document changes are not automatically saved
|
||||||
|
to the database after calling this method.
|
||||||
|
|
||||||
|
:return: The number of entries deleted.
|
||||||
|
"""
|
||||||
|
values = list(self)
|
||||||
|
for item in values:
|
||||||
|
self._instance[self._name].remove(item)
|
||||||
|
|
||||||
|
return len(values)
|
||||||
|
|
||||||
|
def update(self, **update):
|
||||||
|
"""
|
||||||
|
Updates the embedded documents with the given replacement values. This
|
||||||
|
function does not support mongoDB update operators such as ``inc__``.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
The embedded document changes are not automatically saved
|
||||||
|
to the database after calling this method.
|
||||||
|
|
||||||
|
:param update: A dictionary of update values to apply to each
|
||||||
|
embedded document.
|
||||||
|
:return: The number of entries updated.
|
||||||
|
"""
|
||||||
|
if len(update) == 0:
|
||||||
|
return 0
|
||||||
|
values = list(self)
|
||||||
|
for item in values:
|
||||||
|
for k, v in update.items():
|
||||||
|
setattr(item, k, v)
|
||||||
|
|
||||||
|
return len(values)
|
||||||
|
|
||||||
|
|
||||||
|
class StrictDict:
|
||||||
|
__slots__ = ()
|
||||||
|
_special_fields = {"get", "pop", "iteritems", "items", "keys", "create"}
|
||||||
|
_classes = {}
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
for k, v in kwargs.items():
|
||||||
|
setattr(self, k, v)
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
key = "_reserved_" + key if key in self._special_fields else key
|
||||||
|
try:
|
||||||
|
return getattr(self, key)
|
||||||
|
except AttributeError:
|
||||||
|
raise KeyError(key)
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
key = "_reserved_" + key if key in self._special_fields else key
|
||||||
|
return setattr(self, key, value)
|
||||||
|
|
||||||
|
def __contains__(self, key):
|
||||||
|
return hasattr(self, key)
|
||||||
|
|
||||||
|
def get(self, key, default=None):
|
||||||
|
try:
|
||||||
|
return self[key]
|
||||||
|
except KeyError:
|
||||||
|
return default
|
||||||
|
|
||||||
|
def pop(self, key, default=None):
|
||||||
|
v = self.get(key, default)
|
||||||
|
try:
|
||||||
|
delattr(self, key)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
return v
|
||||||
|
|
||||||
|
def iteritems(self):
|
||||||
|
for key in self:
|
||||||
|
yield key, self[key]
|
||||||
|
|
||||||
|
def items(self):
|
||||||
|
return [(k, self[k]) for k in iter(self)]
|
||||||
|
|
||||||
|
def iterkeys(self):
|
||||||
|
return iter(self)
|
||||||
|
|
||||||
|
def keys(self):
|
||||||
|
return list(iter(self))
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return (key for key in self.__slots__ if hasattr(self, key))
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(list(self.items()))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return list(self.items()) == list(other.items())
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create(cls, allowed_keys):
|
||||||
|
allowed_keys_tuple = tuple(
|
||||||
|
("_reserved_" + k if k in cls._special_fields else k) for k in allowed_keys
|
||||||
|
)
|
||||||
|
allowed_keys = frozenset(allowed_keys_tuple)
|
||||||
|
if allowed_keys not in cls._classes:
|
||||||
|
|
||||||
|
class SpecificStrictDict(cls):
|
||||||
|
__slots__ = allowed_keys_tuple
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "{%s}" % ", ".join(
|
||||||
|
'"{!s}": {!r}'.format(k, v) for k, v in self.items()
|
||||||
|
)
|
||||||
|
|
||||||
|
cls._classes[allowed_keys] = SpecificStrictDict
|
||||||
|
return cls._classes[allowed_keys]
|
||||||
|
|
||||||
|
|
||||||
|
class LazyReference(DBRef):
|
||||||
|
__slots__ = ("_cached_doc", "passthrough", "document_type")
|
||||||
|
|
||||||
|
def fetch(self, force=False):
|
||||||
|
if not self._cached_doc or force:
|
||||||
|
self._cached_doc = self.document_type.objects.get(pk=self.pk)
|
||||||
|
if not self._cached_doc:
|
||||||
|
raise DoesNotExist("Trying to dereference unknown document %s" % (self))
|
||||||
|
return self._cached_doc
|
||||||
|
|
||||||
|
@property
|
||||||
|
def pk(self):
|
||||||
|
return self.id
|
||||||
|
|
||||||
|
def __init__(self, document_type, pk, cached_doc=None, passthrough=False):
|
||||||
|
self.document_type = document_type
|
||||||
|
self._cached_doc = cached_doc
|
||||||
|
self.passthrough = passthrough
|
||||||
|
super().__init__(self.document_type._get_collection_name(), pk)
|
||||||
|
|
||||||
|
def __getitem__(self, name):
|
||||||
|
if not self.passthrough:
|
||||||
|
raise KeyError()
|
||||||
|
document = self.fetch()
|
||||||
|
return document[name]
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
if not object.__getattribute__(self, "passthrough"):
|
||||||
|
raise AttributeError()
|
||||||
|
document = self.fetch()
|
||||||
|
try:
|
||||||
|
return document[name]
|
||||||
|
except KeyError:
|
||||||
|
raise AttributeError()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<LazyReference({}, {!r})>".format(self.document_type, self.pk)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -5,16 +5,15 @@ import weakref
|
|||||||
from bson import DBRef, ObjectId, SON
|
from bson import DBRef, ObjectId, SON
|
||||||
import pymongo
|
import pymongo
|
||||||
|
|
||||||
|
from mongoengine.base.common import UPDATE_OPERATORS
|
||||||
|
from mongoengine.base.datastructures import BaseDict, BaseList, EmbeddedDocumentList
|
||||||
from mongoengine.common import _import_class
|
from mongoengine.common import _import_class
|
||||||
from mongoengine.errors import ValidationError
|
from mongoengine.errors import DeprecatedError, ValidationError
|
||||||
|
|
||||||
from mongoengine.base.common import ALLOW_INHERITANCE
|
|
||||||
from mongoengine.base.datastructures import BaseDict, BaseList
|
|
||||||
|
|
||||||
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField")
|
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField")
|
||||||
|
|
||||||
|
|
||||||
class BaseField(object):
|
class BaseField:
|
||||||
"""A base class for fields in a MongoDB document. Instances of this class
|
"""A base class for fields in a MongoDB document. Instances of this class
|
||||||
may be added to subclasses of `Document` to define a document's schema.
|
may be added to subclasses of `Document` to define a document's schema.
|
||||||
|
|
||||||
@@ -32,37 +31,48 @@ class BaseField(object):
|
|||||||
creation_counter = 0
|
creation_counter = 0
|
||||||
auto_creation_counter = -1
|
auto_creation_counter = -1
|
||||||
|
|
||||||
def __init__(self, db_field=None, name=None, required=False, default=None,
|
def __init__(
|
||||||
unique=False, unique_with=None, primary_key=False,
|
self,
|
||||||
validation=None, choices=None, verbose_name=None,
|
db_field=None,
|
||||||
help_text=None):
|
required=False,
|
||||||
|
default=None,
|
||||||
|
unique=False,
|
||||||
|
unique_with=None,
|
||||||
|
primary_key=False,
|
||||||
|
validation=None,
|
||||||
|
choices=None,
|
||||||
|
null=False,
|
||||||
|
sparse=False,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
:param db_field: The database field to store this field in
|
:param db_field: The database field to store this field in
|
||||||
(defaults to the name of the field)
|
(defaults to the name of the field)
|
||||||
:param name: Depreciated - use db_field
|
|
||||||
:param required: If the field is required. Whether it has to have a
|
:param required: If the field is required. Whether it has to have a
|
||||||
value or not. Defaults to False.
|
value or not. Defaults to False.
|
||||||
:param default: (optional) The default value for this field if no value
|
:param default: (optional) The default value for this field if no value
|
||||||
has been set (or if the value has been unset). It Can be a
|
has been set (or if the value has been unset). It can be a
|
||||||
callable.
|
callable.
|
||||||
:param unique: Is the field value unique or not. Defaults to False.
|
:param unique: Is the field value unique or not. Defaults to False.
|
||||||
:param unique_with: (optional) The other field this field should be
|
:param unique_with: (optional) The other field this field should be
|
||||||
unique with.
|
unique with.
|
||||||
:param primary_key: Mark this field as the primary key. Defaults to False.
|
:param primary_key: Mark this field as the primary key. Defaults to False.
|
||||||
:param validation: (optional) A callable to validate the value of the
|
:param validation: (optional) A callable to validate the value of the
|
||||||
field. Generally this is deprecated in favour of the
|
field. The callable takes the value as parameter and should raise
|
||||||
`FIELD.validate` method
|
a ValidationError if validation fails
|
||||||
:param choices: (optional) The valid choices
|
:param choices: (optional) The valid choices
|
||||||
:param verbose_name: (optional) The verbose name for the field.
|
:param null: (optional) If the field value can be null. If no and there is a default value
|
||||||
Designed to be human readable and is often used when generating
|
then the default value is set
|
||||||
model forms from the document model.
|
:param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False`
|
||||||
:param help_text: (optional) The help text for this field and is often
|
means that uniqueness won't be enforced for `None` values
|
||||||
used when generating model forms from the document model.
|
:param **kwargs: (optional) Arbitrary indirection-free metadata for
|
||||||
|
this field can be supplied as additional keyword arguments and
|
||||||
|
accessed as attributes of the field. Must not conflict with any
|
||||||
|
existing attributes. Common metadata includes `verbose_name` and
|
||||||
|
`help_text`.
|
||||||
"""
|
"""
|
||||||
self.db_field = (db_field or name) if not primary_key else '_id'
|
self.db_field = db_field if not primary_key else "_id"
|
||||||
if name:
|
|
||||||
msg = "Fields' 'name' attribute deprecated in favour of 'db_field'"
|
|
||||||
warnings.warn(msg, DeprecationWarning)
|
|
||||||
self.required = required or primary_key
|
self.required = required or primary_key
|
||||||
self.default = default
|
self.default = default
|
||||||
self.unique = bool(unique or unique_with)
|
self.unique = bool(unique or unique_with)
|
||||||
@@ -70,11 +80,39 @@ class BaseField(object):
|
|||||||
self.primary_key = primary_key
|
self.primary_key = primary_key
|
||||||
self.validation = validation
|
self.validation = validation
|
||||||
self.choices = choices
|
self.choices = choices
|
||||||
self.verbose_name = verbose_name
|
self.null = null
|
||||||
self.help_text = help_text
|
self.sparse = sparse
|
||||||
|
self._owner_document = None
|
||||||
|
|
||||||
|
# Make sure db_field is a string (if it's explicitly defined).
|
||||||
|
if self.db_field is not None and not isinstance(self.db_field, str):
|
||||||
|
raise TypeError("db_field should be a string.")
|
||||||
|
|
||||||
|
# Make sure db_field doesn't contain any forbidden characters.
|
||||||
|
if isinstance(self.db_field, str) and (
|
||||||
|
"." in self.db_field
|
||||||
|
or "\0" in self.db_field
|
||||||
|
or self.db_field.startswith("$")
|
||||||
|
):
|
||||||
|
raise ValueError(
|
||||||
|
'field names cannot contain dots (".") or null characters '
|
||||||
|
'("\\0"), and they must not start with a dollar sign ("$").'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Detect and report conflicts between metadata and base properties.
|
||||||
|
conflicts = set(dir(self)) & set(kwargs)
|
||||||
|
if conflicts:
|
||||||
|
raise TypeError(
|
||||||
|
"%s already has attribute(s): %s"
|
||||||
|
% (self.__class__.__name__, ", ".join(conflicts))
|
||||||
|
)
|
||||||
|
|
||||||
|
# Assign metadata to the instance
|
||||||
|
# This efficient method is available because no __slots__ are defined.
|
||||||
|
self.__dict__.update(kwargs)
|
||||||
|
|
||||||
# Adjust the appropriate creation counter, and save our local copy.
|
# Adjust the appropriate creation counter, and save our local copy.
|
||||||
if self.db_field == '_id':
|
if self.db_field == "_id":
|
||||||
self.creation_counter = BaseField.auto_creation_counter
|
self.creation_counter = BaseField.auto_creation_counter
|
||||||
BaseField.auto_creation_counter -= 1
|
BaseField.auto_creation_counter -= 1
|
||||||
else:
|
else:
|
||||||
@@ -92,87 +130,134 @@ class BaseField(object):
|
|||||||
return instance._data.get(self.name)
|
return instance._data.get(self.name)
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
"""Descriptor for assigning a value to a field in a document.
|
"""Descriptor for assigning a value to a field in a document."""
|
||||||
"""
|
# If setting to None and there is a default value provided for this
|
||||||
|
# field, then set the value to the default value.
|
||||||
# If setting to None and theres a default
|
if value is None:
|
||||||
# Then set the value to the default value
|
if self.null:
|
||||||
if value is None and self.default is not None:
|
value = None
|
||||||
value = self.default
|
elif self.default is not None:
|
||||||
if callable(value):
|
value = self.default
|
||||||
value = value()
|
if callable(value):
|
||||||
|
value = value()
|
||||||
|
|
||||||
if instance._initialised:
|
if instance._initialised:
|
||||||
try:
|
try:
|
||||||
if (self.name not in instance._data or
|
value_has_changed = (
|
||||||
instance._data[self.name] != value):
|
self.name not in instance._data
|
||||||
|
or instance._data[self.name] != value
|
||||||
|
)
|
||||||
|
if value_has_changed:
|
||||||
instance._mark_as_changed(self.name)
|
instance._mark_as_changed(self.name)
|
||||||
except:
|
except Exception:
|
||||||
# Values cant be compared eg: naive and tz datetimes
|
# Some values can't be compared and throw an error when we
|
||||||
# So mark it as changed
|
# attempt to do so (e.g. tz-naive and tz-aware datetimes).
|
||||||
|
# Mark the field as changed in such cases.
|
||||||
instance._mark_as_changed(self.name)
|
instance._mark_as_changed(self.name)
|
||||||
|
|
||||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
if isinstance(value, EmbeddedDocument):
|
||||||
value._instance = weakref.proxy(instance)
|
value._instance = weakref.proxy(instance)
|
||||||
|
elif isinstance(value, (list, tuple)):
|
||||||
|
for v in value:
|
||||||
|
if isinstance(v, EmbeddedDocument):
|
||||||
|
v._instance = weakref.proxy(instance)
|
||||||
|
|
||||||
instance._data[self.name] = value
|
instance._data[self.name] = value
|
||||||
|
|
||||||
def error(self, message="", errors=None, field_name=None):
|
def error(self, message="", errors=None, field_name=None):
|
||||||
"""Raises a ValidationError.
|
"""Raise a ValidationError."""
|
||||||
"""
|
|
||||||
field_name = field_name if field_name else self.name
|
field_name = field_name if field_name else self.name
|
||||||
raise ValidationError(message, errors=errors, field_name=field_name)
|
raise ValidationError(message, errors=errors, field_name=field_name)
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
"""Convert a MongoDB-compatible type to a Python type.
|
"""Convert a MongoDB-compatible type to a Python type."""
|
||||||
"""
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
"""Convert a Python type to a MongoDB-compatible type.
|
"""Convert a Python type to a MongoDB-compatible type."""
|
||||||
"""
|
|
||||||
return self.to_python(value)
|
return self.to_python(value)
|
||||||
|
|
||||||
|
def _to_mongo_safe_call(self, value, use_db_field=True, fields=None):
|
||||||
|
"""Helper method to call to_mongo with proper inputs."""
|
||||||
|
f_inputs = self.to_mongo.__code__.co_varnames
|
||||||
|
ex_vars = {}
|
||||||
|
if "fields" in f_inputs:
|
||||||
|
ex_vars["fields"] = fields
|
||||||
|
|
||||||
|
if "use_db_field" in f_inputs:
|
||||||
|
ex_vars["use_db_field"] = use_db_field
|
||||||
|
|
||||||
|
return self.to_mongo(value, **ex_vars)
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
"""Prepare a value that is being used in a query for PyMongo.
|
"""Prepare a value that is being used in a query for PyMongo."""
|
||||||
"""
|
if op in UPDATE_OPERATORS:
|
||||||
|
self.validate(value)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def validate(self, value, clean=True):
|
def validate(self, value, clean=True):
|
||||||
"""Perform validation on a value.
|
"""Perform validation on a value."""
|
||||||
"""
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def _validate_choices(self, value):
|
||||||
|
Document = _import_class("Document")
|
||||||
|
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||||
|
|
||||||
|
choice_list = self.choices
|
||||||
|
if isinstance(next(iter(choice_list)), (list, tuple)):
|
||||||
|
# next(iter) is useful for sets
|
||||||
|
choice_list = [k for k, _ in choice_list]
|
||||||
|
|
||||||
|
# Choices which are other types of Documents
|
||||||
|
if isinstance(value, (Document, EmbeddedDocument)):
|
||||||
|
if not any(isinstance(value, c) for c in choice_list):
|
||||||
|
self.error("Value must be an instance of %s" % (choice_list))
|
||||||
|
# Choices which are types other than Documents
|
||||||
|
else:
|
||||||
|
values = value if isinstance(value, (list, tuple)) else [value]
|
||||||
|
if len(set(values) - set(choice_list)):
|
||||||
|
self.error("Value must be one of %s" % str(choice_list))
|
||||||
|
|
||||||
def _validate(self, value, **kwargs):
|
def _validate(self, value, **kwargs):
|
||||||
Document = _import_class('Document')
|
# Check the Choices Constraint
|
||||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
|
||||||
# check choices
|
|
||||||
if self.choices:
|
if self.choices:
|
||||||
is_cls = isinstance(value, (Document, EmbeddedDocument))
|
self._validate_choices(value)
|
||||||
value_to_check = value.__class__ if is_cls else value
|
|
||||||
err_msg = 'an instance' if is_cls else 'one'
|
|
||||||
if isinstance(self.choices[0], (list, tuple)):
|
|
||||||
option_keys = [k for k, v in self.choices]
|
|
||||||
if value_to_check not in option_keys:
|
|
||||||
msg = ('Value must be %s of %s' %
|
|
||||||
(err_msg, unicode(option_keys)))
|
|
||||||
self.error(msg)
|
|
||||||
elif value_to_check not in self.choices:
|
|
||||||
msg = ('Value must be %s of %s' %
|
|
||||||
(err_msg, unicode(self.choices)))
|
|
||||||
self.error(msg)
|
|
||||||
|
|
||||||
# check validation argument
|
# check validation argument
|
||||||
if self.validation is not None:
|
if self.validation is not None:
|
||||||
if callable(self.validation):
|
if callable(self.validation):
|
||||||
if not self.validation(value):
|
try:
|
||||||
self.error('Value does not match custom validation method')
|
# breaking change of 0.18
|
||||||
|
# Get rid of True/False-type return for the validation method
|
||||||
|
# in favor of having validation raising a ValidationError
|
||||||
|
ret = self.validation(value)
|
||||||
|
if ret is not None:
|
||||||
|
raise DeprecatedError(
|
||||||
|
"validation argument for `%s` must not return anything, "
|
||||||
|
"it should raise a ValidationError if validation fails"
|
||||||
|
% self.name
|
||||||
|
)
|
||||||
|
except ValidationError as ex:
|
||||||
|
self.error(str(ex))
|
||||||
else:
|
else:
|
||||||
raise ValueError('validation argument for "%s" must be a '
|
raise ValueError(
|
||||||
'callable.' % self.name)
|
'validation argument for `"%s"` must be a ' "callable." % self.name
|
||||||
|
)
|
||||||
|
|
||||||
self.validate(value, **kwargs)
|
self.validate(value, **kwargs)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def owner_document(self):
|
||||||
|
return self._owner_document
|
||||||
|
|
||||||
|
def _set_owner_document(self, owner_document):
|
||||||
|
self._owner_document = owner_document
|
||||||
|
|
||||||
|
@owner_document.setter
|
||||||
|
def owner_document(self, owner_document):
|
||||||
|
self._set_owner_document(owner_document)
|
||||||
|
|
||||||
|
|
||||||
class ComplexBaseField(BaseField):
|
class ComplexBaseField(BaseField):
|
||||||
"""Handles complex fields, such as lists / dictionaries.
|
"""Handles complex fields, such as lists / dictionaries.
|
||||||
@@ -187,182 +272,209 @@ class ComplexBaseField(BaseField):
|
|||||||
field = None
|
field = None
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
def __get__(self, instance, owner):
|
||||||
"""Descriptor to automatically dereference references.
|
"""Descriptor to automatically dereference references."""
|
||||||
"""
|
|
||||||
if instance is None:
|
if instance is None:
|
||||||
# Document class being used rather than a document object
|
# Document class being used rather than a document object
|
||||||
return self
|
return self
|
||||||
|
|
||||||
ReferenceField = _import_class('ReferenceField')
|
ReferenceField = _import_class("ReferenceField")
|
||||||
GenericReferenceField = _import_class('GenericReferenceField')
|
GenericReferenceField = _import_class("GenericReferenceField")
|
||||||
dereference = (self._auto_dereference and
|
EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField")
|
||||||
(self.field is None or isinstance(self.field,
|
|
||||||
(GenericReferenceField, ReferenceField))))
|
auto_dereference = instance._fields[self.name]._auto_dereference
|
||||||
|
|
||||||
|
dereference = auto_dereference and (
|
||||||
|
self.field is None
|
||||||
|
or isinstance(self.field, (GenericReferenceField, ReferenceField))
|
||||||
|
)
|
||||||
|
|
||||||
_dereference = _import_class("DeReference")()
|
_dereference = _import_class("DeReference")()
|
||||||
|
|
||||||
self._auto_dereference = instance._fields[self.name]._auto_dereference
|
if (
|
||||||
if instance._initialised and dereference and instance._data.get(self.name):
|
instance._initialised
|
||||||
|
and dereference
|
||||||
|
and instance._data.get(self.name)
|
||||||
|
and not getattr(instance._data[self.name], "_dereferenced", False)
|
||||||
|
):
|
||||||
instance._data[self.name] = _dereference(
|
instance._data[self.name] = _dereference(
|
||||||
instance._data.get(self.name), max_depth=1, instance=instance,
|
instance._data.get(self.name),
|
||||||
name=self.name
|
max_depth=1,
|
||||||
|
instance=instance,
|
||||||
|
name=self.name,
|
||||||
)
|
)
|
||||||
|
if hasattr(instance._data[self.name], "_dereferenced"):
|
||||||
|
instance._data[self.name]._dereferenced = True
|
||||||
|
|
||||||
value = super(ComplexBaseField, self).__get__(instance, owner)
|
value = super().__get__(instance, owner)
|
||||||
|
|
||||||
# Convert lists / values so we can watch for any changes on them
|
# Convert lists / values so we can watch for any changes on them
|
||||||
if (isinstance(value, (list, tuple)) and
|
if isinstance(value, (list, tuple)):
|
||||||
not isinstance(value, BaseList)):
|
if issubclass(type(self), EmbeddedDocumentListField) and not isinstance(
|
||||||
value = BaseList(value, instance, self.name)
|
value, EmbeddedDocumentList
|
||||||
|
):
|
||||||
|
value = EmbeddedDocumentList(value, instance, self.name)
|
||||||
|
elif not isinstance(value, BaseList):
|
||||||
|
value = BaseList(value, instance, self.name)
|
||||||
instance._data[self.name] = value
|
instance._data[self.name] = value
|
||||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||||
value = BaseDict(value, instance, self.name)
|
value = BaseDict(value, instance, self.name)
|
||||||
instance._data[self.name] = value
|
instance._data[self.name] = value
|
||||||
|
|
||||||
if (self._auto_dereference and instance._initialised and
|
if (
|
||||||
isinstance(value, (BaseList, BaseDict))
|
auto_dereference
|
||||||
and not value._dereferenced):
|
and instance._initialised
|
||||||
value = _dereference(
|
and isinstance(value, (BaseList, BaseDict))
|
||||||
value, max_depth=1, instance=instance, name=self.name
|
and not value._dereferenced
|
||||||
)
|
):
|
||||||
|
value = _dereference(value, max_depth=1, instance=instance, name=self.name)
|
||||||
value._dereferenced = True
|
value._dereferenced = True
|
||||||
instance._data[self.name] = value
|
instance._data[self.name] = value
|
||||||
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
"""Convert a MongoDB-compatible type to a Python type.
|
"""Convert a MongoDB-compatible type to a Python type."""
|
||||||
"""
|
if isinstance(value, str):
|
||||||
Document = _import_class('Document')
|
|
||||||
|
|
||||||
if isinstance(value, basestring):
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
if hasattr(value, 'to_python'):
|
if hasattr(value, "to_python"):
|
||||||
return value.to_python()
|
return value.to_python()
|
||||||
|
|
||||||
|
BaseDocument = _import_class("BaseDocument")
|
||||||
|
if isinstance(value, BaseDocument):
|
||||||
|
# Something is wrong, return the value as it is
|
||||||
|
return value
|
||||||
|
|
||||||
is_list = False
|
is_list = False
|
||||||
if not hasattr(value, 'items'):
|
if not hasattr(value, "items"):
|
||||||
try:
|
try:
|
||||||
is_list = True
|
is_list = True
|
||||||
value = dict([(k, v) for k, v in enumerate(value)])
|
value = {idx: v for idx, v in enumerate(value)}
|
||||||
except TypeError: # Not iterable return the value
|
except TypeError: # Not iterable return the value
|
||||||
return value
|
return value
|
||||||
|
|
||||||
if self.field:
|
if self.field:
|
||||||
value_dict = dict([(key, self.field.to_python(item))
|
self.field._auto_dereference = self._auto_dereference
|
||||||
for key, item in value.items()])
|
value_dict = {
|
||||||
|
key: self.field.to_python(item) for key, item in value.items()
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
Document = _import_class("Document")
|
||||||
|
value_dict = {}
|
||||||
|
for k, v in value.items():
|
||||||
|
if isinstance(v, Document):
|
||||||
|
# We need the id from the saved object to create the DBRef
|
||||||
|
if v.pk is None:
|
||||||
|
self.error(
|
||||||
|
"You can only reference documents once they"
|
||||||
|
" have been saved to the database"
|
||||||
|
)
|
||||||
|
collection = v._get_collection_name()
|
||||||
|
value_dict[k] = DBRef(collection, v.pk)
|
||||||
|
elif hasattr(v, "to_python"):
|
||||||
|
value_dict[k] = v.to_python()
|
||||||
|
else:
|
||||||
|
value_dict[k] = self.to_python(v)
|
||||||
|
|
||||||
|
if is_list: # Convert back to a list
|
||||||
|
return [
|
||||||
|
v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0))
|
||||||
|
]
|
||||||
|
return value_dict
|
||||||
|
|
||||||
|
def to_mongo(self, value, use_db_field=True, fields=None):
|
||||||
|
"""Convert a Python type to a MongoDB-compatible type."""
|
||||||
|
Document = _import_class("Document")
|
||||||
|
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||||
|
GenericReferenceField = _import_class("GenericReferenceField")
|
||||||
|
|
||||||
|
if isinstance(value, str):
|
||||||
|
return value
|
||||||
|
|
||||||
|
if hasattr(value, "to_mongo"):
|
||||||
|
if isinstance(value, Document):
|
||||||
|
return GenericReferenceField().to_mongo(value)
|
||||||
|
cls = value.__class__
|
||||||
|
val = value.to_mongo(use_db_field, fields)
|
||||||
|
# If it's a document that is not inherited add _cls
|
||||||
|
if isinstance(value, EmbeddedDocument):
|
||||||
|
val["_cls"] = cls.__name__
|
||||||
|
return val
|
||||||
|
|
||||||
|
is_list = False
|
||||||
|
if not hasattr(value, "items"):
|
||||||
|
try:
|
||||||
|
is_list = True
|
||||||
|
value = {k: v for k, v in enumerate(value)}
|
||||||
|
except TypeError: # Not iterable return the value
|
||||||
|
return value
|
||||||
|
|
||||||
|
if self.field:
|
||||||
|
value_dict = {
|
||||||
|
key: self.field._to_mongo_safe_call(item, use_db_field, fields)
|
||||||
|
for key, item in value.items()
|
||||||
|
}
|
||||||
else:
|
else:
|
||||||
value_dict = {}
|
value_dict = {}
|
||||||
for k, v in value.items():
|
for k, v in value.items():
|
||||||
if isinstance(v, Document):
|
if isinstance(v, Document):
|
||||||
# We need the id from the saved object to create the DBRef
|
# We need the id from the saved object to create the DBRef
|
||||||
if v.pk is None:
|
if v.pk is None:
|
||||||
self.error('You can only reference documents once they'
|
self.error(
|
||||||
' have been saved to the database')
|
"You can only reference documents once they"
|
||||||
collection = v._get_collection_name()
|
" have been saved to the database"
|
||||||
value_dict[k] = DBRef(collection, v.pk)
|
)
|
||||||
elif hasattr(v, 'to_python'):
|
|
||||||
value_dict[k] = v.to_python()
|
|
||||||
else:
|
|
||||||
value_dict[k] = self.to_python(v)
|
|
||||||
|
|
||||||
if is_list: # Convert back to a list
|
|
||||||
return [v for k, v in sorted(value_dict.items(),
|
|
||||||
key=operator.itemgetter(0))]
|
|
||||||
return value_dict
|
|
||||||
|
|
||||||
def to_mongo(self, value):
|
|
||||||
"""Convert a Python type to a MongoDB-compatible type.
|
|
||||||
"""
|
|
||||||
Document = _import_class("Document")
|
|
||||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
|
||||||
GenericReferenceField = _import_class("GenericReferenceField")
|
|
||||||
|
|
||||||
if isinstance(value, basestring):
|
|
||||||
return value
|
|
||||||
|
|
||||||
if hasattr(value, 'to_mongo'):
|
|
||||||
if isinstance(value, Document):
|
|
||||||
return GenericReferenceField().to_mongo(value)
|
|
||||||
cls = value.__class__
|
|
||||||
val = value.to_mongo()
|
|
||||||
# If we its a document thats not inherited add _cls
|
|
||||||
if (isinstance(value, EmbeddedDocument)):
|
|
||||||
val['_cls'] = cls.__name__
|
|
||||||
return val
|
|
||||||
|
|
||||||
is_list = False
|
|
||||||
if not hasattr(value, 'items'):
|
|
||||||
try:
|
|
||||||
is_list = True
|
|
||||||
value = dict([(k, v) for k, v in enumerate(value)])
|
|
||||||
except TypeError: # Not iterable return the value
|
|
||||||
return value
|
|
||||||
|
|
||||||
if self.field:
|
|
||||||
value_dict = dict([(key, self.field.to_mongo(item))
|
|
||||||
for key, item in value.iteritems()])
|
|
||||||
else:
|
|
||||||
value_dict = {}
|
|
||||||
for k, v in value.iteritems():
|
|
||||||
if isinstance(v, Document):
|
|
||||||
# We need the id from the saved object to create the DBRef
|
|
||||||
if v.pk is None:
|
|
||||||
self.error('You can only reference documents once they'
|
|
||||||
' have been saved to the database')
|
|
||||||
|
|
||||||
# If its a document that is not inheritable it won't have
|
# If its a document that is not inheritable it won't have
|
||||||
# any _cls data so make it a generic reference allows
|
# any _cls data so make it a generic reference allows
|
||||||
# us to dereference
|
# us to dereference
|
||||||
meta = getattr(v, '_meta', {})
|
meta = getattr(v, "_meta", {})
|
||||||
allow_inheritance = (
|
allow_inheritance = meta.get("allow_inheritance")
|
||||||
meta.get('allow_inheritance', ALLOW_INHERITANCE)
|
|
||||||
is True)
|
|
||||||
if not allow_inheritance and not self.field:
|
if not allow_inheritance and not self.field:
|
||||||
value_dict[k] = GenericReferenceField().to_mongo(v)
|
value_dict[k] = GenericReferenceField().to_mongo(v)
|
||||||
else:
|
else:
|
||||||
collection = v._get_collection_name()
|
collection = v._get_collection_name()
|
||||||
value_dict[k] = DBRef(collection, v.pk)
|
value_dict[k] = DBRef(collection, v.pk)
|
||||||
elif hasattr(v, 'to_mongo'):
|
elif hasattr(v, "to_mongo"):
|
||||||
cls = v.__class__
|
cls = v.__class__
|
||||||
val = v.to_mongo()
|
val = v.to_mongo(use_db_field, fields)
|
||||||
# If we its a document thats not inherited add _cls
|
# If it's a document that is not inherited add _cls
|
||||||
if (isinstance(v, (Document, EmbeddedDocument))):
|
if isinstance(v, (Document, EmbeddedDocument)):
|
||||||
val['_cls'] = cls.__name__
|
val["_cls"] = cls.__name__
|
||||||
value_dict[k] = val
|
value_dict[k] = val
|
||||||
else:
|
else:
|
||||||
value_dict[k] = self.to_mongo(v)
|
value_dict[k] = self.to_mongo(v, use_db_field, fields)
|
||||||
|
|
||||||
if is_list: # Convert back to a list
|
if is_list: # Convert back to a list
|
||||||
return [v for k, v in sorted(value_dict.items(),
|
return [
|
||||||
key=operator.itemgetter(0))]
|
v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0))
|
||||||
|
]
|
||||||
return value_dict
|
return value_dict
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
"""If field is provided ensure the value is valid.
|
"""If field is provided ensure the value is valid."""
|
||||||
"""
|
|
||||||
errors = {}
|
errors = {}
|
||||||
if self.field:
|
if self.field:
|
||||||
if hasattr(value, 'iteritems') or hasattr(value, 'items'):
|
if hasattr(value, "items"):
|
||||||
sequence = value.iteritems()
|
sequence = value.items()
|
||||||
else:
|
else:
|
||||||
sequence = enumerate(value)
|
sequence = enumerate(value)
|
||||||
for k, v in sequence:
|
for k, v in sequence:
|
||||||
try:
|
try:
|
||||||
self.field._validate(v)
|
self.field._validate(v)
|
||||||
except ValidationError, error:
|
except ValidationError as error:
|
||||||
errors[k] = error.errors or error
|
errors[k] = error.errors or error
|
||||||
except (ValueError, AssertionError), error:
|
except (ValueError, AssertionError) as error:
|
||||||
errors[k] = error
|
errors[k] = error
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
field_class = self.field.__class__.__name__
|
field_class = self.field.__class__.__name__
|
||||||
self.error('Invalid %s item (%s)' % (field_class, value),
|
self.error(
|
||||||
errors=errors)
|
"Invalid {} item ({})".format(field_class, value), errors=errors
|
||||||
|
)
|
||||||
# Don't allow empty values if required
|
# Don't allow empty values if required
|
||||||
if self.required and not value:
|
if self.required and not value:
|
||||||
self.error('Field is required and cannot be empty')
|
self.error("Field is required and cannot be empty")
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
return self.to_mongo(value)
|
return self.to_mongo(value)
|
||||||
@@ -377,28 +489,24 @@ class ComplexBaseField(BaseField):
|
|||||||
self.field.owner_document = owner_document
|
self.field.owner_document = owner_document
|
||||||
self._owner_document = owner_document
|
self._owner_document = owner_document
|
||||||
|
|
||||||
def _get_owner_document(self, owner_document):
|
|
||||||
self._owner_document = owner_document
|
|
||||||
|
|
||||||
owner_document = property(_get_owner_document, _set_owner_document)
|
|
||||||
|
|
||||||
|
|
||||||
class ObjectIdField(BaseField):
|
class ObjectIdField(BaseField):
|
||||||
"""A field wrapper around MongoDB's ObjectIds.
|
"""A field wrapper around MongoDB's ObjectIds."""
|
||||||
"""
|
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
if not isinstance(value, ObjectId):
|
try:
|
||||||
value = ObjectId(value)
|
if not isinstance(value, ObjectId):
|
||||||
|
value = ObjectId(value)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
if not isinstance(value, ObjectId):
|
if not isinstance(value, ObjectId):
|
||||||
try:
|
try:
|
||||||
return ObjectId(unicode(value))
|
return ObjectId(str(value))
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
# e.message attribute has been deprecated since Python 2.6
|
self.error(str(e))
|
||||||
self.error(unicode(e))
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
@@ -406,13 +514,14 @@ class ObjectIdField(BaseField):
|
|||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
try:
|
try:
|
||||||
ObjectId(unicode(value))
|
ObjectId(str(value))
|
||||||
except:
|
except Exception:
|
||||||
self.error('Invalid Object ID')
|
self.error("Invalid ObjectID")
|
||||||
|
|
||||||
|
|
||||||
class GeoJsonBaseField(BaseField):
|
class GeoJsonBaseField(BaseField):
|
||||||
"""A geo json field storing a geojson style object.
|
"""A geo json field storing a geojson style object.
|
||||||
|
|
||||||
.. versionadded:: 0.8
|
.. versionadded:: 0.8
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -421,28 +530,29 @@ class GeoJsonBaseField(BaseField):
|
|||||||
|
|
||||||
def __init__(self, auto_index=True, *args, **kwargs):
|
def __init__(self, auto_index=True, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
:param auto_index: Automatically create a "2dsphere" index. Defaults
|
:param bool auto_index: Automatically create a '2dsphere' index.\
|
||||||
to `True`.
|
Defaults to `True`.
|
||||||
"""
|
"""
|
||||||
self._name = "%sField" % self._type
|
self._name = "%sField" % self._type
|
||||||
if not auto_index:
|
if not auto_index:
|
||||||
self._geo_index = False
|
self._geo_index = False
|
||||||
super(GeoJsonBaseField, self).__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
"""Validate the GeoJson object based on its type
|
"""Validate the GeoJson object based on its type."""
|
||||||
"""
|
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
if set(value.keys()) == set(['type', 'coordinates']):
|
if set(value.keys()) == {"type", "coordinates"}:
|
||||||
if value['type'] != self._type:
|
if value["type"] != self._type:
|
||||||
self.error('%s type must be "%s"' % (self._name, self._type))
|
self.error('{} type must be "{}"'.format(self._name, self._type))
|
||||||
return self.validate(value['coordinates'])
|
return self.validate(value["coordinates"])
|
||||||
else:
|
else:
|
||||||
self.error('%s can only accept a valid GeoJson dictionary'
|
self.error(
|
||||||
' or lists of (x, y)' % self._name)
|
"%s can only accept a valid GeoJson dictionary"
|
||||||
|
" or lists of (x, y)" % self._name
|
||||||
|
)
|
||||||
return
|
return
|
||||||
elif not isinstance(value, (list, tuple)):
|
elif not isinstance(value, (list, tuple)):
|
||||||
self.error('%s can only accept lists of [x, y]' % self._name)
|
self.error("%s can only accept lists of [x, y]" % self._name)
|
||||||
return
|
return
|
||||||
|
|
||||||
validate = getattr(self, "_validate_%s" % self._type.lower())
|
validate = getattr(self, "_validate_%s" % self._type.lower())
|
||||||
@@ -450,35 +560,38 @@ class GeoJsonBaseField(BaseField):
|
|||||||
if error:
|
if error:
|
||||||
self.error(error)
|
self.error(error)
|
||||||
|
|
||||||
def _validate_polygon(self, value):
|
def _validate_polygon(self, value, top_level=True):
|
||||||
if not isinstance(value, (list, tuple)):
|
if not isinstance(value, (list, tuple)):
|
||||||
return 'Polygons must contain list of linestrings'
|
return "Polygons must contain list of linestrings"
|
||||||
|
|
||||||
# Quick and dirty validator
|
# Quick and dirty validator
|
||||||
try:
|
try:
|
||||||
value[0][0][0]
|
value[0][0][0]
|
||||||
except:
|
except (TypeError, IndexError):
|
||||||
return "Invalid Polygon must contain at least one valid linestring"
|
return "Invalid Polygon must contain at least one valid linestring"
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
for val in value:
|
for val in value:
|
||||||
error = self._validate_linestring(val, False)
|
error = self._validate_linestring(val, False)
|
||||||
if not error and val[0] != val[-1]:
|
if not error and val[0] != val[-1]:
|
||||||
error = 'LineStrings must start and end at the same point'
|
error = "LineStrings must start and end at the same point"
|
||||||
if error and error not in errors:
|
if error and error not in errors:
|
||||||
errors.append(error)
|
errors.append(error)
|
||||||
if errors:
|
if errors:
|
||||||
return "Invalid Polygon:\n%s" % ", ".join(errors)
|
if top_level:
|
||||||
|
return "Invalid Polygon:\n%s" % ", ".join(errors)
|
||||||
|
else:
|
||||||
|
return "%s" % ", ".join(errors)
|
||||||
|
|
||||||
def _validate_linestring(self, value, top_level=True):
|
def _validate_linestring(self, value, top_level=True):
|
||||||
"""Validates a linestring"""
|
"""Validate a linestring."""
|
||||||
if not isinstance(value, (list, tuple)):
|
if not isinstance(value, (list, tuple)):
|
||||||
return 'LineStrings must contain list of coordinate pairs'
|
return "LineStrings must contain list of coordinate pairs"
|
||||||
|
|
||||||
# Quick and dirty validator
|
# Quick and dirty validator
|
||||||
try:
|
try:
|
||||||
value[0][0]
|
value[0][0]
|
||||||
except:
|
except (TypeError, IndexError):
|
||||||
return "Invalid LineString must contain at least one valid point"
|
return "Invalid LineString must contain at least one valid point"
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
@@ -495,13 +608,74 @@ class GeoJsonBaseField(BaseField):
|
|||||||
def _validate_point(self, value):
|
def _validate_point(self, value):
|
||||||
"""Validate each set of coords"""
|
"""Validate each set of coords"""
|
||||||
if not isinstance(value, (list, tuple)):
|
if not isinstance(value, (list, tuple)):
|
||||||
return 'Points must be a list of coordinate pairs'
|
return "Points must be a list of coordinate pairs"
|
||||||
elif not len(value) == 2:
|
elif not len(value) == 2:
|
||||||
return "Value (%s) must be a two-dimensional point" % repr(value)
|
return "Value (%s) must be a two-dimensional point" % repr(value)
|
||||||
elif (not isinstance(value[0], (float, int)) or
|
elif not isinstance(value[0], (float, int)) or not isinstance(
|
||||||
not isinstance(value[1], (float, int))):
|
value[1], (float, int)
|
||||||
|
):
|
||||||
return "Both values (%s) in point must be float or int" % repr(value)
|
return "Both values (%s) in point must be float or int" % repr(value)
|
||||||
|
|
||||||
|
def _validate_multipoint(self, value):
|
||||||
|
if not isinstance(value, (list, tuple)):
|
||||||
|
return "MultiPoint must be a list of Point"
|
||||||
|
|
||||||
|
# Quick and dirty validator
|
||||||
|
try:
|
||||||
|
value[0][0]
|
||||||
|
except (TypeError, IndexError):
|
||||||
|
return "Invalid MultiPoint must contain at least one valid point"
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for point in value:
|
||||||
|
error = self._validate_point(point)
|
||||||
|
if error and error not in errors:
|
||||||
|
errors.append(error)
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
return "%s" % ", ".join(errors)
|
||||||
|
|
||||||
|
def _validate_multilinestring(self, value, top_level=True):
|
||||||
|
if not isinstance(value, (list, tuple)):
|
||||||
|
return "MultiLineString must be a list of LineString"
|
||||||
|
|
||||||
|
# Quick and dirty validator
|
||||||
|
try:
|
||||||
|
value[0][0][0]
|
||||||
|
except (TypeError, IndexError):
|
||||||
|
return "Invalid MultiLineString must contain at least one valid linestring"
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for linestring in value:
|
||||||
|
error = self._validate_linestring(linestring, False)
|
||||||
|
if error and error not in errors:
|
||||||
|
errors.append(error)
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
if top_level:
|
||||||
|
return "Invalid MultiLineString:\n%s" % ", ".join(errors)
|
||||||
|
else:
|
||||||
|
return "%s" % ", ".join(errors)
|
||||||
|
|
||||||
|
def _validate_multipolygon(self, value):
|
||||||
|
if not isinstance(value, (list, tuple)):
|
||||||
|
return "MultiPolygon must be a list of Polygon"
|
||||||
|
|
||||||
|
# Quick and dirty validator
|
||||||
|
try:
|
||||||
|
value[0][0][0][0]
|
||||||
|
except (TypeError, IndexError):
|
||||||
|
return "Invalid MultiPolygon must contain at least one valid Polygon"
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for polygon in value:
|
||||||
|
error = self._validate_polygon(polygon, False)
|
||||||
|
if error and error not in errors:
|
||||||
|
errors.append(error)
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
return "Invalid MultiPolygon:\n%s" % ", ".join(errors)
|
||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
return value
|
return value
|
||||||
|
|||||||
@@ -1,62 +1,72 @@
|
|||||||
|
import itertools
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
import pymongo
|
from mongoengine.base.common import _document_registry
|
||||||
|
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
|
||||||
from mongoengine.common import _import_class
|
from mongoengine.common import _import_class
|
||||||
from mongoengine.errors import InvalidDocumentError
|
from mongoengine.errors import InvalidDocumentError
|
||||||
from mongoengine.python_support import PY3
|
from mongoengine.queryset import (
|
||||||
from mongoengine.queryset import (DO_NOTHING, DoesNotExist,
|
DO_NOTHING,
|
||||||
MultipleObjectsReturned,
|
DoesNotExist,
|
||||||
QuerySet, QuerySetManager)
|
MultipleObjectsReturned,
|
||||||
|
QuerySetManager,
|
||||||
|
)
|
||||||
|
|
||||||
from mongoengine.base.common import _document_registry, ALLOW_INHERITANCE
|
|
||||||
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
|
|
||||||
|
|
||||||
__all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass')
|
__all__ = ("DocumentMetaclass", "TopLevelDocumentMetaclass")
|
||||||
|
|
||||||
|
|
||||||
class DocumentMetaclass(type):
|
class DocumentMetaclass(type):
|
||||||
"""Metaclass for all documents.
|
"""Metaclass for all documents."""
|
||||||
"""
|
|
||||||
|
|
||||||
def __new__(cls, name, bases, attrs):
|
# TODO lower complexity of this method
|
||||||
flattened_bases = cls._get_bases(bases)
|
def __new__(mcs, name, bases, attrs):
|
||||||
super_new = super(DocumentMetaclass, cls).__new__
|
flattened_bases = mcs._get_bases(bases)
|
||||||
|
super_new = super().__new__
|
||||||
|
|
||||||
# If a base class just call super
|
# If a base class just call super
|
||||||
metaclass = attrs.get('my_metaclass')
|
metaclass = attrs.get("my_metaclass")
|
||||||
if metaclass and issubclass(metaclass, DocumentMetaclass):
|
if metaclass and issubclass(metaclass, DocumentMetaclass):
|
||||||
return super_new(cls, name, bases, attrs)
|
return super_new(mcs, name, bases, attrs)
|
||||||
|
|
||||||
attrs['_is_document'] = attrs.get('_is_document', False)
|
attrs["_is_document"] = attrs.get("_is_document", False)
|
||||||
|
attrs["_cached_reference_fields"] = []
|
||||||
|
|
||||||
# EmbeddedDocuments could have meta data for inheritance
|
# EmbeddedDocuments could have meta data for inheritance
|
||||||
if 'meta' in attrs:
|
if "meta" in attrs:
|
||||||
attrs['_meta'] = attrs.pop('meta')
|
attrs["_meta"] = attrs.pop("meta")
|
||||||
|
|
||||||
# EmbeddedDocuments should inherit meta data
|
# EmbeddedDocuments should inherit meta data
|
||||||
if '_meta' not in attrs:
|
if "_meta" not in attrs:
|
||||||
meta = MetaDict()
|
meta = MetaDict()
|
||||||
for base in flattened_bases[::-1]:
|
for base in flattened_bases[::-1]:
|
||||||
# Add any mixin metadata from plain objects
|
# Add any mixin metadata from plain objects
|
||||||
if hasattr(base, 'meta'):
|
if hasattr(base, "meta"):
|
||||||
meta.merge(base.meta)
|
meta.merge(base.meta)
|
||||||
elif hasattr(base, '_meta'):
|
elif hasattr(base, "_meta"):
|
||||||
meta.merge(base._meta)
|
meta.merge(base._meta)
|
||||||
attrs['_meta'] = meta
|
attrs["_meta"] = meta
|
||||||
|
attrs["_meta"][
|
||||||
|
"abstract"
|
||||||
|
] = False # 789: EmbeddedDocument shouldn't inherit abstract
|
||||||
|
|
||||||
|
# If allow_inheritance is True, add a "_cls" string field to the attrs
|
||||||
|
if attrs["_meta"].get("allow_inheritance"):
|
||||||
|
StringField = _import_class("StringField")
|
||||||
|
attrs["_cls"] = StringField()
|
||||||
|
|
||||||
# Handle document Fields
|
# Handle document Fields
|
||||||
|
|
||||||
# Merge all fields from subclasses
|
# Merge all fields from subclasses
|
||||||
doc_fields = {}
|
doc_fields = {}
|
||||||
for base in flattened_bases[::-1]:
|
for base in flattened_bases[::-1]:
|
||||||
if hasattr(base, '_fields'):
|
if hasattr(base, "_fields"):
|
||||||
doc_fields.update(base._fields)
|
doc_fields.update(base._fields)
|
||||||
|
|
||||||
# Standard object mixin - merge in any Fields
|
# Standard object mixin - merge in any Fields
|
||||||
if not hasattr(base, '_meta'):
|
if not hasattr(base, "_meta"):
|
||||||
base_fields = {}
|
base_fields = {}
|
||||||
for attr_name, attr_value in base.__dict__.iteritems():
|
for attr_name, attr_value in base.__dict__.items():
|
||||||
if not isinstance(attr_value, BaseField):
|
if not isinstance(attr_value, BaseField):
|
||||||
continue
|
continue
|
||||||
attr_value.name = attr_name
|
attr_value.name = attr_name
|
||||||
@@ -68,7 +78,7 @@ class DocumentMetaclass(type):
|
|||||||
|
|
||||||
# Discover any document fields
|
# Discover any document fields
|
||||||
field_names = {}
|
field_names = {}
|
||||||
for attr_name, attr_value in attrs.iteritems():
|
for attr_name, attr_value in attrs.items():
|
||||||
if not isinstance(attr_value, BaseField):
|
if not isinstance(attr_value, BaseField):
|
||||||
continue
|
continue
|
||||||
attr_value.name = attr_name
|
attr_value.name = attr_name
|
||||||
@@ -77,26 +87,29 @@ class DocumentMetaclass(type):
|
|||||||
doc_fields[attr_name] = attr_value
|
doc_fields[attr_name] = attr_value
|
||||||
|
|
||||||
# Count names to ensure no db_field redefinitions
|
# Count names to ensure no db_field redefinitions
|
||||||
field_names[attr_value.db_field] = field_names.get(
|
field_names[attr_value.db_field] = (
|
||||||
attr_value.db_field, 0) + 1
|
field_names.get(attr_value.db_field, 0) + 1
|
||||||
|
)
|
||||||
|
|
||||||
# Ensure no duplicate db_fields
|
# Ensure no duplicate db_fields
|
||||||
duplicate_db_fields = [k for k, v in field_names.items() if v > 1]
|
duplicate_db_fields = [k for k, v in field_names.items() if v > 1]
|
||||||
if duplicate_db_fields:
|
if duplicate_db_fields:
|
||||||
msg = ("Multiple db_fields defined for: %s " %
|
msg = "Multiple db_fields defined for: %s " % ", ".join(duplicate_db_fields)
|
||||||
", ".join(duplicate_db_fields))
|
|
||||||
raise InvalidDocumentError(msg)
|
raise InvalidDocumentError(msg)
|
||||||
|
|
||||||
# Set _fields and db_field maps
|
# Set _fields and db_field maps
|
||||||
attrs['_fields'] = doc_fields
|
attrs["_fields"] = doc_fields
|
||||||
attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k))
|
attrs["_db_field_map"] = {
|
||||||
for k, v in doc_fields.iteritems()])
|
k: getattr(v, "db_field", k) for k, v in doc_fields.items()
|
||||||
attrs['_reverse_db_field_map'] = dict(
|
}
|
||||||
(v, k) for k, v in attrs['_db_field_map'].iteritems())
|
attrs["_reverse_db_field_map"] = {
|
||||||
|
v: k for k, v in attrs["_db_field_map"].items()
|
||||||
|
}
|
||||||
|
|
||||||
attrs['_fields_ordered'] = tuple(i[1] for i in sorted(
|
attrs["_fields_ordered"] = tuple(
|
||||||
(v.creation_counter, v.name)
|
i[1]
|
||||||
for v in doc_fields.itervalues()))
|
for i in sorted((v.creation_counter, v.name) for v in doc_fields.values())
|
||||||
|
)
|
||||||
|
|
||||||
#
|
#
|
||||||
# Set document hierarchy
|
# Set document hierarchy
|
||||||
@@ -104,44 +117,50 @@ class DocumentMetaclass(type):
|
|||||||
superclasses = ()
|
superclasses = ()
|
||||||
class_name = [name]
|
class_name = [name]
|
||||||
for base in flattened_bases:
|
for base in flattened_bases:
|
||||||
if (not getattr(base, '_is_base_cls', True) and
|
if not getattr(base, "_is_base_cls", True) and not getattr(
|
||||||
not getattr(base, '_meta', {}).get('abstract', True)):
|
base, "_meta", {}
|
||||||
# Collate heirarchy for _cls and _subclasses
|
).get("abstract", True):
|
||||||
|
# Collate hierarchy for _cls and _subclasses
|
||||||
class_name.append(base.__name__)
|
class_name.append(base.__name__)
|
||||||
|
|
||||||
if hasattr(base, '_meta'):
|
if hasattr(base, "_meta"):
|
||||||
# Warn if allow_inheritance isn't set and prevent
|
# Warn if allow_inheritance isn't set and prevent
|
||||||
# inheritance of classes where inheritance is set to False
|
# inheritance of classes where inheritance is set to False
|
||||||
allow_inheritance = base._meta.get('allow_inheritance',
|
allow_inheritance = base._meta.get("allow_inheritance")
|
||||||
ALLOW_INHERITANCE)
|
if not allow_inheritance and not base._meta.get("abstract"):
|
||||||
if (allow_inheritance is not True and
|
raise ValueError(
|
||||||
not base._meta.get('abstract')):
|
"Document %s may not be subclassed. "
|
||||||
raise ValueError('Document %s may not be subclassed' %
|
'To enable inheritance, use the "allow_inheritance" meta attribute.'
|
||||||
base.__name__)
|
% base.__name__
|
||||||
|
)
|
||||||
|
|
||||||
# Get superclasses from last base superclass
|
# Get superclasses from last base superclass
|
||||||
document_bases = [b for b in flattened_bases
|
document_bases = [b for b in flattened_bases if hasattr(b, "_class_name")]
|
||||||
if hasattr(b, '_class_name')]
|
|
||||||
if document_bases:
|
if document_bases:
|
||||||
superclasses = document_bases[0]._superclasses
|
superclasses = document_bases[0]._superclasses
|
||||||
superclasses += (document_bases[0]._class_name, )
|
superclasses += (document_bases[0]._class_name,)
|
||||||
|
|
||||||
_cls = '.'.join(reversed(class_name))
|
_cls = ".".join(reversed(class_name))
|
||||||
attrs['_class_name'] = _cls
|
attrs["_class_name"] = _cls
|
||||||
attrs['_superclasses'] = superclasses
|
attrs["_superclasses"] = superclasses
|
||||||
attrs['_subclasses'] = (_cls, )
|
attrs["_subclasses"] = (_cls,)
|
||||||
attrs['_types'] = attrs['_subclasses'] # TODO depreciate _types
|
attrs["_types"] = attrs["_subclasses"] # TODO depreciate _types
|
||||||
|
|
||||||
# Create the new_class
|
# Create the new_class
|
||||||
new_class = super_new(cls, name, bases, attrs)
|
new_class = super_new(mcs, name, bases, attrs)
|
||||||
|
|
||||||
# Set _subclasses
|
# Set _subclasses
|
||||||
for base in document_bases:
|
for base in document_bases:
|
||||||
if _cls not in base._subclasses:
|
if _cls not in base._subclasses:
|
||||||
base._subclasses += (_cls,)
|
base._subclasses += (_cls,)
|
||||||
base._types = base._subclasses # TODO depreciate _types
|
base._types = base._subclasses # TODO depreciate _types
|
||||||
|
|
||||||
Document, EmbeddedDocument, DictField = cls._import_classes()
|
(
|
||||||
|
Document,
|
||||||
|
EmbeddedDocument,
|
||||||
|
DictField,
|
||||||
|
CachedReferenceField,
|
||||||
|
) = mcs._import_classes()
|
||||||
|
|
||||||
if issubclass(new_class, Document):
|
if issubclass(new_class, Document):
|
||||||
new_class._collection = None
|
new_class._collection = None
|
||||||
@@ -149,82 +168,78 @@ class DocumentMetaclass(type):
|
|||||||
# Add class to the _document_registry
|
# Add class to the _document_registry
|
||||||
_document_registry[new_class._class_name] = new_class
|
_document_registry[new_class._class_name] = new_class
|
||||||
|
|
||||||
# In Python 2, User-defined methods objects have special read-only
|
|
||||||
# attributes 'im_func' and 'im_self' which contain the function obj
|
|
||||||
# and class instance object respectively. With Python 3 these special
|
|
||||||
# attributes have been replaced by __func__ and __self__. The Blinker
|
|
||||||
# module continues to use im_func and im_self, so the code below
|
|
||||||
# copies __func__ into im_func and __self__ into im_self for
|
|
||||||
# classmethod objects in Document derived classes.
|
|
||||||
if PY3:
|
|
||||||
for key, val in new_class.__dict__.items():
|
|
||||||
if isinstance(val, classmethod):
|
|
||||||
f = val.__get__(new_class)
|
|
||||||
if hasattr(f, '__func__') and not hasattr(f, 'im_func'):
|
|
||||||
f.__dict__.update({'im_func': getattr(f, '__func__')})
|
|
||||||
if hasattr(f, '__self__') and not hasattr(f, 'im_self'):
|
|
||||||
f.__dict__.update({'im_self': getattr(f, '__self__')})
|
|
||||||
|
|
||||||
# Handle delete rules
|
# Handle delete rules
|
||||||
for field in new_class._fields.itervalues():
|
for field in new_class._fields.values():
|
||||||
f = field
|
f = field
|
||||||
f.owner_document = new_class
|
if f.owner_document is None:
|
||||||
delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING)
|
f.owner_document = new_class
|
||||||
if isinstance(f, ComplexBaseField) and hasattr(f, 'field'):
|
delete_rule = getattr(f, "reverse_delete_rule", DO_NOTHING)
|
||||||
delete_rule = getattr(f.field,
|
if isinstance(f, CachedReferenceField):
|
||||||
'reverse_delete_rule',
|
|
||||||
DO_NOTHING)
|
if issubclass(new_class, EmbeddedDocument):
|
||||||
|
raise InvalidDocumentError(
|
||||||
|
"CachedReferenceFields is not allowed in EmbeddedDocuments"
|
||||||
|
)
|
||||||
|
|
||||||
|
if f.auto_sync:
|
||||||
|
f.start_listener()
|
||||||
|
|
||||||
|
f.document_type._cached_reference_fields.append(f)
|
||||||
|
|
||||||
|
if isinstance(f, ComplexBaseField) and hasattr(f, "field"):
|
||||||
|
delete_rule = getattr(f.field, "reverse_delete_rule", DO_NOTHING)
|
||||||
if isinstance(f, DictField) and delete_rule != DO_NOTHING:
|
if isinstance(f, DictField) and delete_rule != DO_NOTHING:
|
||||||
msg = ("Reverse delete rules are not supported "
|
msg = (
|
||||||
"for %s (field: %s)" %
|
"Reverse delete rules are not supported "
|
||||||
(field.__class__.__name__, field.name))
|
"for %s (field: %s)" % (field.__class__.__name__, field.name)
|
||||||
|
)
|
||||||
raise InvalidDocumentError(msg)
|
raise InvalidDocumentError(msg)
|
||||||
|
|
||||||
f = field.field
|
f = field.field
|
||||||
|
|
||||||
if delete_rule != DO_NOTHING:
|
if delete_rule != DO_NOTHING:
|
||||||
if issubclass(new_class, EmbeddedDocument):
|
if issubclass(new_class, EmbeddedDocument):
|
||||||
msg = ("Reverse delete rules are not supported for "
|
msg = (
|
||||||
"EmbeddedDocuments (field: %s)" % field.name)
|
"Reverse delete rules are not supported for "
|
||||||
|
"EmbeddedDocuments (field: %s)" % field.name
|
||||||
|
)
|
||||||
raise InvalidDocumentError(msg)
|
raise InvalidDocumentError(msg)
|
||||||
f.document_type.register_delete_rule(new_class,
|
f.document_type.register_delete_rule(new_class, field.name, delete_rule)
|
||||||
field.name, delete_rule)
|
|
||||||
|
|
||||||
if (field.name and hasattr(Document, field.name) and
|
if (
|
||||||
EmbeddedDocument not in new_class.mro()):
|
field.name
|
||||||
msg = ("%s is a document method and not a valid "
|
and hasattr(Document, field.name)
|
||||||
"field name" % field.name)
|
and EmbeddedDocument not in new_class.mro()
|
||||||
|
):
|
||||||
|
msg = "%s is a document method and not a valid field name" % field.name
|
||||||
raise InvalidDocumentError(msg)
|
raise InvalidDocumentError(msg)
|
||||||
|
|
||||||
return new_class
|
return new_class
|
||||||
|
|
||||||
def add_to_class(self, name, value):
|
|
||||||
setattr(self, name, value)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_bases(cls, bases):
|
def _get_bases(mcs, bases):
|
||||||
if isinstance(bases, BasesTuple):
|
if isinstance(bases, BasesTuple):
|
||||||
return bases
|
return bases
|
||||||
seen = []
|
seen = []
|
||||||
bases = cls.__get_bases(bases)
|
bases = mcs.__get_bases(bases)
|
||||||
unique_bases = (b for b in bases if not (b in seen or seen.append(b)))
|
unique_bases = (b for b in bases if not (b in seen or seen.append(b)))
|
||||||
return BasesTuple(unique_bases)
|
return BasesTuple(unique_bases)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def __get_bases(cls, bases):
|
def __get_bases(mcs, bases):
|
||||||
for base in bases:
|
for base in bases:
|
||||||
if base is object:
|
if base is object:
|
||||||
continue
|
continue
|
||||||
yield base
|
yield base
|
||||||
for child_base in cls.__get_bases(base.__bases__):
|
yield from mcs.__get_bases(base.__bases__)
|
||||||
yield child_base
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _import_classes(cls):
|
def _import_classes(mcs):
|
||||||
Document = _import_class('Document')
|
Document = _import_class("Document")
|
||||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||||
DictField = _import_class('DictField')
|
DictField = _import_class("DictField")
|
||||||
return (Document, EmbeddedDocument, DictField)
|
CachedReferenceField = _import_class("CachedReferenceField")
|
||||||
|
return Document, EmbeddedDocument, DictField, CachedReferenceField
|
||||||
|
|
||||||
|
|
||||||
class TopLevelDocumentMetaclass(DocumentMetaclass):
|
class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||||
@@ -232,167 +247,213 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
collection in the database.
|
collection in the database.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __new__(cls, name, bases, attrs):
|
def __new__(mcs, name, bases, attrs):
|
||||||
flattened_bases = cls._get_bases(bases)
|
flattened_bases = mcs._get_bases(bases)
|
||||||
super_new = super(TopLevelDocumentMetaclass, cls).__new__
|
super_new = super().__new__
|
||||||
|
|
||||||
# Set default _meta data if base class, otherwise get user defined meta
|
# Set default _meta data if base class, otherwise get user defined meta
|
||||||
if (attrs.get('my_metaclass') == TopLevelDocumentMetaclass):
|
if attrs.get("my_metaclass") == TopLevelDocumentMetaclass:
|
||||||
# defaults
|
# defaults
|
||||||
attrs['_meta'] = {
|
attrs["_meta"] = {
|
||||||
'abstract': True,
|
"abstract": True,
|
||||||
'max_documents': None,
|
"max_documents": None,
|
||||||
'max_size': None,
|
"max_size": None,
|
||||||
'ordering': [], # default ordering applied at runtime
|
"ordering": [], # default ordering applied at runtime
|
||||||
'indexes': [], # indexes to be ensured at runtime
|
"indexes": [], # indexes to be ensured at runtime
|
||||||
'id_field': None,
|
"id_field": None,
|
||||||
'index_background': False,
|
"index_background": False,
|
||||||
'index_drop_dups': False,
|
"index_opts": None,
|
||||||
'index_opts': None,
|
"delete_rules": None,
|
||||||
'delete_rules': None,
|
# allow_inheritance can be True, False, and None. True means
|
||||||
'allow_inheritance': None,
|
# "allow inheritance", False means "don't allow inheritance",
|
||||||
|
# None means "do whatever your parent does, or don't allow
|
||||||
|
# inheritance if you're a top-level class".
|
||||||
|
"allow_inheritance": None,
|
||||||
}
|
}
|
||||||
attrs['_is_base_cls'] = True
|
attrs["_is_base_cls"] = True
|
||||||
attrs['_meta'].update(attrs.get('meta', {}))
|
attrs["_meta"].update(attrs.get("meta", {}))
|
||||||
else:
|
else:
|
||||||
attrs['_meta'] = attrs.get('meta', {})
|
attrs["_meta"] = attrs.get("meta", {})
|
||||||
# Explictly set abstract to false unless set
|
# Explicitly set abstract to false unless set
|
||||||
attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False)
|
attrs["_meta"]["abstract"] = attrs["_meta"].get("abstract", False)
|
||||||
attrs['_is_base_cls'] = False
|
attrs["_is_base_cls"] = False
|
||||||
|
|
||||||
# Set flag marking as document class - as opposed to an object mixin
|
# Set flag marking as document class - as opposed to an object mixin
|
||||||
attrs['_is_document'] = True
|
attrs["_is_document"] = True
|
||||||
|
|
||||||
# Ensure queryset_class is inherited
|
# Ensure queryset_class is inherited
|
||||||
if 'objects' in attrs:
|
if "objects" in attrs:
|
||||||
manager = attrs['objects']
|
manager = attrs["objects"]
|
||||||
if hasattr(manager, 'queryset_class'):
|
if hasattr(manager, "queryset_class"):
|
||||||
attrs['_meta']['queryset_class'] = manager.queryset_class
|
attrs["_meta"]["queryset_class"] = manager.queryset_class
|
||||||
|
|
||||||
# Clean up top level meta
|
# Clean up top level meta
|
||||||
if 'meta' in attrs:
|
if "meta" in attrs:
|
||||||
del(attrs['meta'])
|
del attrs["meta"]
|
||||||
|
|
||||||
# Find the parent document class
|
# Find the parent document class
|
||||||
parent_doc_cls = [b for b in flattened_bases
|
parent_doc_cls = [
|
||||||
if b.__class__ == TopLevelDocumentMetaclass]
|
b for b in flattened_bases if b.__class__ == TopLevelDocumentMetaclass
|
||||||
|
]
|
||||||
parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0]
|
parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0]
|
||||||
|
|
||||||
# Prevent classes setting collection different to their parents
|
# Prevent classes setting collection different to their parents
|
||||||
# If parent wasn't an abstract class
|
# If parent wasn't an abstract class
|
||||||
if (parent_doc_cls and 'collection' in attrs.get('_meta', {})
|
if (
|
||||||
and not parent_doc_cls._meta.get('abstract', True)):
|
parent_doc_cls
|
||||||
msg = "Trying to set a collection on a subclass (%s)" % name
|
and "collection" in attrs.get("_meta", {})
|
||||||
warnings.warn(msg, SyntaxWarning)
|
and not parent_doc_cls._meta.get("abstract", True)
|
||||||
del(attrs['_meta']['collection'])
|
):
|
||||||
|
msg = "Trying to set a collection on a subclass (%s)" % name
|
||||||
|
warnings.warn(msg, SyntaxWarning)
|
||||||
|
del attrs["_meta"]["collection"]
|
||||||
|
|
||||||
# Ensure abstract documents have abstract bases
|
# Ensure abstract documents have abstract bases
|
||||||
if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
|
if attrs.get("_is_base_cls") or attrs["_meta"].get("abstract"):
|
||||||
if (parent_doc_cls and
|
if parent_doc_cls and not parent_doc_cls._meta.get("abstract", False):
|
||||||
not parent_doc_cls._meta.get('abstract', False)):
|
|
||||||
msg = "Abstract document cannot have non-abstract base"
|
msg = "Abstract document cannot have non-abstract base"
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
return super_new(cls, name, bases, attrs)
|
return super_new(mcs, name, bases, attrs)
|
||||||
|
|
||||||
# Merge base class metas.
|
# Merge base class metas.
|
||||||
# Uses a special MetaDict that handles various merging rules
|
# Uses a special MetaDict that handles various merging rules
|
||||||
meta = MetaDict()
|
meta = MetaDict()
|
||||||
for base in flattened_bases[::-1]:
|
for base in flattened_bases[::-1]:
|
||||||
# Add any mixin metadata from plain objects
|
# Add any mixin metadata from plain objects
|
||||||
if hasattr(base, 'meta'):
|
if hasattr(base, "meta"):
|
||||||
meta.merge(base.meta)
|
meta.merge(base.meta)
|
||||||
elif hasattr(base, '_meta'):
|
elif hasattr(base, "_meta"):
|
||||||
meta.merge(base._meta)
|
meta.merge(base._meta)
|
||||||
|
|
||||||
# Set collection in the meta if its callable
|
# Set collection in the meta if its callable
|
||||||
if (getattr(base, '_is_document', False) and
|
if getattr(base, "_is_document", False) and not base._meta.get("abstract"):
|
||||||
not base._meta.get('abstract')):
|
collection = meta.get("collection", None)
|
||||||
collection = meta.get('collection', None)
|
|
||||||
if callable(collection):
|
if callable(collection):
|
||||||
meta['collection'] = collection(base)
|
meta["collection"] = collection(base)
|
||||||
|
|
||||||
meta.merge(attrs.get('_meta', {})) # Top level meta
|
meta.merge(attrs.get("_meta", {})) # Top level meta
|
||||||
|
|
||||||
# Only simple classes (direct subclasses of Document)
|
# Only simple classes (i.e. direct subclasses of Document) may set
|
||||||
# may set allow_inheritance to False
|
# allow_inheritance to False. If the base Document allows inheritance,
|
||||||
simple_class = all([b._meta.get('abstract')
|
# none of its subclasses can override allow_inheritance to False.
|
||||||
for b in flattened_bases if hasattr(b, '_meta')])
|
simple_class = all(
|
||||||
if (not simple_class and meta['allow_inheritance'] is False and
|
[b._meta.get("abstract") for b in flattened_bases if hasattr(b, "_meta")]
|
||||||
not meta['abstract']):
|
)
|
||||||
raise ValueError('Only direct subclasses of Document may set '
|
if (
|
||||||
'"allow_inheritance" to False')
|
not simple_class
|
||||||
|
and meta["allow_inheritance"] is False
|
||||||
|
and not meta["abstract"]
|
||||||
|
):
|
||||||
|
raise ValueError(
|
||||||
|
"Only direct subclasses of Document may set "
|
||||||
|
'"allow_inheritance" to False'
|
||||||
|
)
|
||||||
|
|
||||||
# Set default collection name
|
# Set default collection name
|
||||||
if 'collection' not in meta:
|
if "collection" not in meta:
|
||||||
meta['collection'] = ''.join('_%s' % c if c.isupper() else c
|
meta["collection"] = (
|
||||||
for c in name).strip('_').lower()
|
"".join("_%s" % c if c.isupper() else c for c in name)
|
||||||
attrs['_meta'] = meta
|
.strip("_")
|
||||||
|
.lower()
|
||||||
|
)
|
||||||
|
attrs["_meta"] = meta
|
||||||
|
|
||||||
# Call super and get the new class
|
# Call super and get the new class
|
||||||
new_class = super_new(cls, name, bases, attrs)
|
new_class = super_new(mcs, name, bases, attrs)
|
||||||
|
|
||||||
meta = new_class._meta
|
meta = new_class._meta
|
||||||
|
|
||||||
# Set index specifications
|
# Set index specifications
|
||||||
meta['index_specs'] = new_class._build_index_specs(meta['indexes'])
|
meta["index_specs"] = new_class._build_index_specs(meta["indexes"])
|
||||||
|
|
||||||
# If collection is a callable - call it and set the value
|
# If collection is a callable - call it and set the value
|
||||||
collection = meta.get('collection')
|
collection = meta.get("collection")
|
||||||
if callable(collection):
|
if callable(collection):
|
||||||
new_class._meta['collection'] = collection(new_class)
|
new_class._meta["collection"] = collection(new_class)
|
||||||
|
|
||||||
# Provide a default queryset unless exists or one has been set
|
# Provide a default queryset unless exists or one has been set
|
||||||
if 'objects' not in dir(new_class):
|
if "objects" not in dir(new_class):
|
||||||
new_class.objects = QuerySetManager()
|
new_class.objects = QuerySetManager()
|
||||||
|
|
||||||
# Validate the fields and set primary key if needed
|
# Validate the fields and set primary key if needed
|
||||||
for field_name, field in new_class._fields.iteritems():
|
for field_name, field in new_class._fields.items():
|
||||||
if field.primary_key:
|
if field.primary_key:
|
||||||
# Ensure only one primary key is set
|
# Ensure only one primary key is set
|
||||||
current_pk = new_class._meta.get('id_field')
|
current_pk = new_class._meta.get("id_field")
|
||||||
if current_pk and current_pk != field_name:
|
if current_pk and current_pk != field_name:
|
||||||
raise ValueError('Cannot override primary key field')
|
raise ValueError("Cannot override primary key field")
|
||||||
|
|
||||||
# Set primary key
|
# Set primary key
|
||||||
if not current_pk:
|
if not current_pk:
|
||||||
new_class._meta['id_field'] = field_name
|
new_class._meta["id_field"] = field_name
|
||||||
new_class.id = field
|
new_class.id = field
|
||||||
|
|
||||||
# Set primary key if not defined by the document
|
# If the document doesn't explicitly define a primary key field, create
|
||||||
new_class._auto_id_field = False
|
# one. Make it an ObjectIdField and give it a non-clashing name ("id"
|
||||||
if not new_class._meta.get('id_field'):
|
# by default, but can be different if that one's taken).
|
||||||
new_class._auto_id_field = True
|
if not new_class._meta.get("id_field"):
|
||||||
new_class._meta['id_field'] = 'id'
|
id_name, id_db_name = mcs.get_auto_id_names(new_class)
|
||||||
new_class._fields['id'] = ObjectIdField(db_field='_id')
|
new_class._meta["id_field"] = id_name
|
||||||
new_class._fields['id'].name = 'id'
|
new_class._fields[id_name] = ObjectIdField(db_field=id_db_name)
|
||||||
new_class.id = new_class._fields['id']
|
new_class._fields[id_name].name = id_name
|
||||||
|
new_class.id = new_class._fields[id_name]
|
||||||
|
new_class._db_field_map[id_name] = id_db_name
|
||||||
|
new_class._reverse_db_field_map[id_db_name] = id_name
|
||||||
|
|
||||||
# Prepend id field to _fields_ordered
|
# Prepend the ID field to _fields_ordered (so that it's *always*
|
||||||
if 'id' in new_class._fields and 'id' not in new_class._fields_ordered:
|
# the first field).
|
||||||
new_class._fields_ordered = ('id', ) + new_class._fields_ordered
|
new_class._fields_ordered = (id_name,) + new_class._fields_ordered
|
||||||
|
|
||||||
# Merge in exceptions with parent hierarchy
|
# Merge in exceptions with parent hierarchy.
|
||||||
exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned)
|
exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned)
|
||||||
module = attrs.get('__module__')
|
module = attrs.get("__module__")
|
||||||
for exc in exceptions_to_merge:
|
for exc in exceptions_to_merge:
|
||||||
name = exc.__name__
|
name = exc.__name__
|
||||||
parents = tuple(getattr(base, name) for base in flattened_bases
|
parents = tuple(
|
||||||
if hasattr(base, name)) or (exc,)
|
getattr(base, name) for base in flattened_bases if hasattr(base, name)
|
||||||
# Create new exception and set to new_class
|
) or (exc,)
|
||||||
exception = type(name, parents, {'__module__': module})
|
|
||||||
|
# Create a new exception and set it as an attribute on the new
|
||||||
|
# class.
|
||||||
|
exception = type(name, parents, {"__module__": module})
|
||||||
setattr(new_class, name, exception)
|
setattr(new_class, name, exception)
|
||||||
|
|
||||||
return new_class
|
return new_class
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_auto_id_names(mcs, new_class):
|
||||||
|
"""Find a name for the automatic ID field for the given new class.
|
||||||
|
|
||||||
|
Return a two-element tuple where the first item is the field name (i.e.
|
||||||
|
the attribute name on the object) and the second element is the DB
|
||||||
|
field name (i.e. the name of the key stored in MongoDB).
|
||||||
|
|
||||||
|
Defaults to ('id', '_id'), or generates a non-clashing name in the form
|
||||||
|
of ('auto_id_X', '_auto_id_X') if the default name is already taken.
|
||||||
|
"""
|
||||||
|
id_name, id_db_name = ("id", "_id")
|
||||||
|
existing_fields = {field_name for field_name in new_class._fields}
|
||||||
|
existing_db_fields = {v.db_field for v in new_class._fields.values()}
|
||||||
|
if id_name not in existing_fields and id_db_name not in existing_db_fields:
|
||||||
|
return id_name, id_db_name
|
||||||
|
|
||||||
|
id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0)
|
||||||
|
for i in itertools.count():
|
||||||
|
id_name = "{}_{}".format(id_basename, i)
|
||||||
|
id_db_name = "{}_{}".format(id_db_basename, i)
|
||||||
|
if id_name not in existing_fields and id_db_name not in existing_db_fields:
|
||||||
|
return id_name, id_db_name
|
||||||
|
|
||||||
|
|
||||||
class MetaDict(dict):
|
class MetaDict(dict):
|
||||||
"""Custom dictionary for meta classes.
|
"""Custom dictionary for meta classes.
|
||||||
Handles the merging of set indexes
|
Handles the merging of set indexes
|
||||||
"""
|
"""
|
||||||
_merge_options = ('indexes',)
|
|
||||||
|
_merge_options = ("indexes",)
|
||||||
|
|
||||||
def merge(self, new_options):
|
def merge(self, new_options):
|
||||||
for k, v in new_options.iteritems():
|
for k, v in new_options.items():
|
||||||
if k in self._merge_options:
|
if k in self._merge_options:
|
||||||
self[k] = self.get(k, []) + v
|
self[k] = self.get(k, []) + v
|
||||||
else:
|
else:
|
||||||
@@ -401,4 +462,5 @@ class MetaDict(dict):
|
|||||||
|
|
||||||
class BasesTuple(tuple):
|
class BasesTuple(tuple):
|
||||||
"""Special class to handle introspection of bases tuple in __new__"""
|
"""Special class to handle introspection of bases tuple in __new__"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|||||||
22
mongoengine/base/utils.py
Normal file
22
mongoengine/base/utils.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
class LazyRegexCompiler:
|
||||||
|
"""Descriptor to allow lazy compilation of regex"""
|
||||||
|
|
||||||
|
def __init__(self, pattern, flags=0):
|
||||||
|
self._pattern = pattern
|
||||||
|
self._flags = flags
|
||||||
|
self._compiled_regex = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def compiled_regex(self):
|
||||||
|
if self._compiled_regex is None:
|
||||||
|
self._compiled_regex = re.compile(self._pattern, self._flags)
|
||||||
|
return self._compiled_regex
|
||||||
|
|
||||||
|
def __get__(self, instance, owner):
|
||||||
|
return self.compiled_regex
|
||||||
|
|
||||||
|
def __set__(self, instance, value):
|
||||||
|
raise AttributeError("Can not set attribute LazyRegexCompiler")
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
_class_registry_cache = {}
|
_class_registry_cache = {}
|
||||||
|
_field_list_cache = []
|
||||||
|
|
||||||
|
|
||||||
def _import_class(cls_name):
|
def _import_class(cls_name):
|
||||||
@@ -18,31 +19,44 @@ def _import_class(cls_name):
|
|||||||
if cls_name in _class_registry_cache:
|
if cls_name in _class_registry_cache:
|
||||||
return _class_registry_cache.get(cls_name)
|
return _class_registry_cache.get(cls_name)
|
||||||
|
|
||||||
doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument',
|
doc_classes = (
|
||||||
'MapReduceDocument')
|
"Document",
|
||||||
field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField',
|
"DynamicEmbeddedDocument",
|
||||||
'FileField', 'GenericReferenceField',
|
"EmbeddedDocument",
|
||||||
'GenericEmbeddedDocumentField', 'GeoPointField',
|
"MapReduceDocument",
|
||||||
'PointField', 'LineStringField', 'ListField',
|
)
|
||||||
'PolygonField', 'ReferenceField', 'StringField',
|
|
||||||
'ComplexBaseField', 'GeoJsonBaseField')
|
|
||||||
queryset_classes = ('OperationError',)
|
|
||||||
deref_classes = ('DeReference',)
|
|
||||||
|
|
||||||
if cls_name in doc_classes:
|
# Field Classes
|
||||||
|
if not _field_list_cache:
|
||||||
|
from mongoengine.fields import __all__ as fields
|
||||||
|
|
||||||
|
_field_list_cache.extend(fields)
|
||||||
|
from mongoengine.base.fields import __all__ as fields
|
||||||
|
|
||||||
|
_field_list_cache.extend(fields)
|
||||||
|
|
||||||
|
field_classes = _field_list_cache
|
||||||
|
|
||||||
|
deref_classes = ("DeReference",)
|
||||||
|
|
||||||
|
if cls_name == "BaseDocument":
|
||||||
|
from mongoengine.base import document as module
|
||||||
|
|
||||||
|
import_classes = ["BaseDocument"]
|
||||||
|
elif cls_name in doc_classes:
|
||||||
from mongoengine import document as module
|
from mongoengine import document as module
|
||||||
|
|
||||||
import_classes = doc_classes
|
import_classes = doc_classes
|
||||||
elif cls_name in field_classes:
|
elif cls_name in field_classes:
|
||||||
from mongoengine import fields as module
|
from mongoengine import fields as module
|
||||||
|
|
||||||
import_classes = field_classes
|
import_classes = field_classes
|
||||||
elif cls_name in queryset_classes:
|
|
||||||
from mongoengine import queryset as module
|
|
||||||
import_classes = queryset_classes
|
|
||||||
elif cls_name in deref_classes:
|
elif cls_name in deref_classes:
|
||||||
from mongoengine import dereference as module
|
from mongoengine import dereference as module
|
||||||
|
|
||||||
import_classes = deref_classes
|
import_classes = deref_classes
|
||||||
else:
|
else:
|
||||||
raise ValueError('No import set for: ' % cls_name)
|
raise ValueError("No import set for: %s" % cls_name)
|
||||||
|
|
||||||
for cls in import_classes:
|
for cls in import_classes:
|
||||||
_class_registry_cache[cls] = getattr(module, cls)
|
_class_registry_cache[cls] = getattr(module, cls)
|
||||||
|
|||||||
@@ -1,161 +1,405 @@
|
|||||||
import pymongo
|
from pymongo import MongoClient, ReadPreference, uri_parser
|
||||||
from pymongo import MongoClient, MongoReplicaSetClient, uri_parser
|
from pymongo.database import _check_name
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"DEFAULT_CONNECTION_NAME",
|
||||||
|
"DEFAULT_DATABASE_NAME",
|
||||||
|
"ConnectionFailure",
|
||||||
|
"connect",
|
||||||
|
"disconnect",
|
||||||
|
"disconnect_all",
|
||||||
|
"get_connection",
|
||||||
|
"get_db",
|
||||||
|
"register_connection",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['ConnectionError', 'connect', 'register_connection',
|
DEFAULT_CONNECTION_NAME = "default"
|
||||||
'DEFAULT_CONNECTION_NAME']
|
DEFAULT_DATABASE_NAME = "test"
|
||||||
|
DEFAULT_HOST = "localhost"
|
||||||
|
DEFAULT_PORT = 27017
|
||||||
DEFAULT_CONNECTION_NAME = 'default'
|
|
||||||
|
|
||||||
|
|
||||||
class ConnectionError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
_connection_settings = {}
|
_connection_settings = {}
|
||||||
_connections = {}
|
_connections = {}
|
||||||
_dbs = {}
|
_dbs = {}
|
||||||
|
|
||||||
|
READ_PREFERENCE = ReadPreference.PRIMARY
|
||||||
|
|
||||||
def register_connection(alias, name, host=None, port=None,
|
|
||||||
is_slave=False, read_preference=False, slaves=None,
|
|
||||||
username=None, password=None, **kwargs):
|
|
||||||
"""Add a connection.
|
|
||||||
|
|
||||||
:param alias: the name that will be used to refer to this connection
|
|
||||||
throughout MongoEngine
|
|
||||||
:param name: the name of the specific database to use
|
|
||||||
:param host: the host name of the :program:`mongod` instance to connect to
|
|
||||||
:param port: the port that the :program:`mongod` instance is running on
|
|
||||||
:param is_slave: whether the connection can act as a slave
|
|
||||||
** Depreciated pymongo 2.0.1+
|
|
||||||
:param read_preference: The read preference for the collection
|
|
||||||
** Added pymongo 2.1
|
|
||||||
:param slaves: a list of aliases of slave connections; each of these must
|
|
||||||
be a registered connection that has :attr:`is_slave` set to ``True``
|
|
||||||
:param username: username to authenticate with
|
|
||||||
:param password: password to authenticate with
|
|
||||||
:param kwargs: allow ad-hoc parameters to be passed into the pymongo driver
|
|
||||||
|
|
||||||
|
class ConnectionFailure(Exception):
|
||||||
|
"""Error raised when the database connection can't be established or
|
||||||
|
when a connection with a requested alias can't be retrieved.
|
||||||
"""
|
"""
|
||||||
global _connection_settings
|
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _check_db_name(name):
|
||||||
|
"""Check if a database name is valid.
|
||||||
|
This functionality is copied from pymongo Database class constructor.
|
||||||
|
"""
|
||||||
|
if not isinstance(name, str):
|
||||||
|
raise TypeError("name must be an instance of %s" % str)
|
||||||
|
elif name != "$external":
|
||||||
|
_check_name(name)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_connection_settings(
|
||||||
|
db=None,
|
||||||
|
name=None,
|
||||||
|
host=None,
|
||||||
|
port=None,
|
||||||
|
read_preference=READ_PREFERENCE,
|
||||||
|
username=None,
|
||||||
|
password=None,
|
||||||
|
authentication_source=None,
|
||||||
|
authentication_mechanism=None,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
"""Get the connection settings as a dict
|
||||||
|
|
||||||
|
: param db: the name of the database to use, for compatibility with connect
|
||||||
|
: param name: the name of the specific database to use
|
||||||
|
: param host: the host name of the: program: `mongod` instance to connect to
|
||||||
|
: param port: the port that the: program: `mongod` instance is running on
|
||||||
|
: param read_preference: The read preference for the collection
|
||||||
|
: param username: username to authenticate with
|
||||||
|
: param password: password to authenticate with
|
||||||
|
: param authentication_source: database to authenticate against
|
||||||
|
: param authentication_mechanism: database authentication mechanisms.
|
||||||
|
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
|
||||||
|
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
|
||||||
|
: param is_mock: explicitly use mongomock for this connection
|
||||||
|
(can also be done by using `mongomock: // ` as db host prefix)
|
||||||
|
: param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
||||||
|
for example maxpoolsize, tz_aware, etc. See the documentation
|
||||||
|
for pymongo's `MongoClient` for a full list.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.10.6 - added mongomock support
|
||||||
|
"""
|
||||||
conn_settings = {
|
conn_settings = {
|
||||||
'name': name,
|
"name": name or db or DEFAULT_DATABASE_NAME,
|
||||||
'host': host or 'localhost',
|
"host": host or DEFAULT_HOST,
|
||||||
'port': port or 27017,
|
"port": port or DEFAULT_PORT,
|
||||||
'is_slave': is_slave,
|
"read_preference": read_preference,
|
||||||
'slaves': slaves or [],
|
"username": username,
|
||||||
'username': username,
|
"password": password,
|
||||||
'password': password,
|
"authentication_source": authentication_source,
|
||||||
'read_preference': read_preference
|
"authentication_mechanism": authentication_mechanism,
|
||||||
}
|
}
|
||||||
|
|
||||||
# Handle uri style connections
|
_check_db_name(conn_settings["name"])
|
||||||
if "://" in conn_settings['host']:
|
conn_host = conn_settings["host"]
|
||||||
uri_dict = uri_parser.parse_uri(conn_settings['host'])
|
|
||||||
conn_settings.update({
|
# Host can be a list or a string, so if string, force to a list.
|
||||||
'name': uri_dict.get('database') or name,
|
if isinstance(conn_host, str):
|
||||||
'username': uri_dict.get('username'),
|
conn_host = [conn_host]
|
||||||
'password': uri_dict.get('password'),
|
|
||||||
'read_preference': read_preference,
|
resolved_hosts = []
|
||||||
})
|
for entity in conn_host:
|
||||||
if "replicaSet" in conn_settings['host']:
|
|
||||||
conn_settings['replicaSet'] = True
|
# Handle Mongomock
|
||||||
|
if entity.startswith("mongomock://"):
|
||||||
|
conn_settings["is_mock"] = True
|
||||||
|
# `mongomock://` is not a valid url prefix and must be replaced by `mongodb://`
|
||||||
|
new_entity = entity.replace("mongomock://", "mongodb://", 1)
|
||||||
|
resolved_hosts.append(new_entity)
|
||||||
|
|
||||||
|
uri_dict = uri_parser.parse_uri(new_entity)
|
||||||
|
|
||||||
|
database = uri_dict.get("database")
|
||||||
|
if database:
|
||||||
|
conn_settings["name"] = database
|
||||||
|
|
||||||
|
# Handle URI style connections, only updating connection params which
|
||||||
|
# were explicitly specified in the URI.
|
||||||
|
elif "://" in entity:
|
||||||
|
uri_dict = uri_parser.parse_uri(entity)
|
||||||
|
resolved_hosts.append(entity)
|
||||||
|
|
||||||
|
database = uri_dict.get("database")
|
||||||
|
if database:
|
||||||
|
conn_settings["name"] = database
|
||||||
|
|
||||||
|
for param in ("read_preference", "username", "password"):
|
||||||
|
if uri_dict.get(param):
|
||||||
|
conn_settings[param] = uri_dict[param]
|
||||||
|
|
||||||
|
uri_options = uri_dict["options"]
|
||||||
|
if "replicaset" in uri_options:
|
||||||
|
conn_settings["replicaSet"] = uri_options["replicaset"]
|
||||||
|
if "authsource" in uri_options:
|
||||||
|
conn_settings["authentication_source"] = uri_options["authsource"]
|
||||||
|
if "authmechanism" in uri_options:
|
||||||
|
conn_settings["authentication_mechanism"] = uri_options["authmechanism"]
|
||||||
|
if "readpreference" in uri_options:
|
||||||
|
read_preferences = (
|
||||||
|
ReadPreference.NEAREST,
|
||||||
|
ReadPreference.PRIMARY,
|
||||||
|
ReadPreference.PRIMARY_PREFERRED,
|
||||||
|
ReadPreference.SECONDARY,
|
||||||
|
ReadPreference.SECONDARY_PREFERRED,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Starting with PyMongo v3.5, the "readpreference" option is
|
||||||
|
# returned as a string (e.g. "secondaryPreferred") and not an
|
||||||
|
# int (e.g. 3).
|
||||||
|
# TODO simplify the code below once we drop support for
|
||||||
|
# PyMongo v3.4.
|
||||||
|
read_pf_mode = uri_options["readpreference"]
|
||||||
|
if isinstance(read_pf_mode, str):
|
||||||
|
read_pf_mode = read_pf_mode.lower()
|
||||||
|
for preference in read_preferences:
|
||||||
|
if (
|
||||||
|
preference.name.lower() == read_pf_mode
|
||||||
|
or preference.mode == read_pf_mode
|
||||||
|
):
|
||||||
|
conn_settings["read_preference"] = preference
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
resolved_hosts.append(entity)
|
||||||
|
conn_settings["host"] = resolved_hosts
|
||||||
|
|
||||||
|
# Deprecated parameters that should not be passed on
|
||||||
|
kwargs.pop("slaves", None)
|
||||||
|
kwargs.pop("is_slave", None)
|
||||||
|
|
||||||
conn_settings.update(kwargs)
|
conn_settings.update(kwargs)
|
||||||
|
return conn_settings
|
||||||
|
|
||||||
|
|
||||||
|
def register_connection(
|
||||||
|
alias,
|
||||||
|
db=None,
|
||||||
|
name=None,
|
||||||
|
host=None,
|
||||||
|
port=None,
|
||||||
|
read_preference=READ_PREFERENCE,
|
||||||
|
username=None,
|
||||||
|
password=None,
|
||||||
|
authentication_source=None,
|
||||||
|
authentication_mechanism=None,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
"""Register the connection settings.
|
||||||
|
|
||||||
|
: param alias: the name that will be used to refer to this connection
|
||||||
|
throughout MongoEngine
|
||||||
|
: param db: the name of the database to use, for compatibility with connect
|
||||||
|
: param name: the name of the specific database to use
|
||||||
|
: param host: the host name of the: program: `mongod` instance to connect to
|
||||||
|
: param port: the port that the: program: `mongod` instance is running on
|
||||||
|
: param read_preference: The read preference for the collection
|
||||||
|
: param username: username to authenticate with
|
||||||
|
: param password: password to authenticate with
|
||||||
|
: param authentication_source: database to authenticate against
|
||||||
|
: param authentication_mechanism: database authentication mechanisms.
|
||||||
|
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
|
||||||
|
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
|
||||||
|
: param is_mock: explicitly use mongomock for this connection
|
||||||
|
(can also be done by using `mongomock: // ` as db host prefix)
|
||||||
|
: param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
||||||
|
for example maxpoolsize, tz_aware, etc. See the documentation
|
||||||
|
for pymongo's `MongoClient` for a full list.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.10.6 - added mongomock support
|
||||||
|
"""
|
||||||
|
conn_settings = _get_connection_settings(
|
||||||
|
db=db,
|
||||||
|
name=name,
|
||||||
|
host=host,
|
||||||
|
port=port,
|
||||||
|
read_preference=read_preference,
|
||||||
|
username=username,
|
||||||
|
password=password,
|
||||||
|
authentication_source=authentication_source,
|
||||||
|
authentication_mechanism=authentication_mechanism,
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
_connection_settings[alias] = conn_settings
|
_connection_settings[alias] = conn_settings
|
||||||
|
|
||||||
|
|
||||||
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
||||||
global _connections
|
"""Close the connection with a given alias."""
|
||||||
global _dbs
|
from mongoengine.base.common import _get_documents_by_db
|
||||||
|
from mongoengine import Document
|
||||||
|
|
||||||
if alias in _connections:
|
if alias in _connections:
|
||||||
get_connection(alias=alias).disconnect()
|
get_connection(alias=alias).close()
|
||||||
del _connections[alias]
|
del _connections[alias]
|
||||||
|
|
||||||
if alias in _dbs:
|
if alias in _dbs:
|
||||||
|
# Detach all cached collections in Documents
|
||||||
|
for doc_cls in _get_documents_by_db(alias, DEFAULT_CONNECTION_NAME):
|
||||||
|
if issubclass(doc_cls, Document): # Skip EmbeddedDocument
|
||||||
|
doc_cls._disconnect()
|
||||||
|
|
||||||
del _dbs[alias]
|
del _dbs[alias]
|
||||||
|
|
||||||
|
if alias in _connection_settings:
|
||||||
|
del _connection_settings[alias]
|
||||||
|
|
||||||
|
|
||||||
|
def disconnect_all():
|
||||||
|
"""Close all registered database."""
|
||||||
|
for alias in list(_connections.keys()):
|
||||||
|
disconnect(alias)
|
||||||
|
|
||||||
|
|
||||||
def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||||
global _connections
|
"""Return a connection with a given alias."""
|
||||||
|
|
||||||
# Connect to the database if not already connected
|
# Connect to the database if not already connected
|
||||||
if reconnect:
|
if reconnect:
|
||||||
disconnect(alias)
|
disconnect(alias)
|
||||||
|
|
||||||
if alias not in _connections:
|
# If the requested alias already exists in the _connections list, return
|
||||||
if alias not in _connection_settings:
|
# it immediately.
|
||||||
msg = 'Connection with alias "%s" has not been defined' % alias
|
if alias in _connections:
|
||||||
if alias == DEFAULT_CONNECTION_NAME:
|
return _connections[alias]
|
||||||
msg = 'You have not defined a default connection'
|
|
||||||
raise ConnectionError(msg)
|
|
||||||
conn_settings = _connection_settings[alias].copy()
|
|
||||||
|
|
||||||
if hasattr(pymongo, 'version_tuple'): # Support for 2.1+
|
# Validate that the requested alias exists in the _connection_settings.
|
||||||
conn_settings.pop('name', None)
|
# Raise ConnectionFailure if it doesn't.
|
||||||
conn_settings.pop('slaves', None)
|
if alias not in _connection_settings:
|
||||||
conn_settings.pop('is_slave', None)
|
if alias == DEFAULT_CONNECTION_NAME:
|
||||||
conn_settings.pop('username', None)
|
msg = "You have not defined a default connection"
|
||||||
conn_settings.pop('password', None)
|
|
||||||
else:
|
else:
|
||||||
# Get all the slave connections
|
msg = 'Connection with alias "%s" has not been defined' % alias
|
||||||
if 'slaves' in conn_settings:
|
raise ConnectionFailure(msg)
|
||||||
slaves = []
|
|
||||||
for slave_alias in conn_settings['slaves']:
|
|
||||||
slaves.append(get_connection(slave_alias))
|
|
||||||
conn_settings['slaves'] = slaves
|
|
||||||
conn_settings.pop('read_preference', None)
|
|
||||||
|
|
||||||
connection_class = MongoClient
|
def _clean_settings(settings_dict):
|
||||||
if 'replicaSet' in conn_settings:
|
irrelevant_fields_set = {
|
||||||
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
|
"name",
|
||||||
# Discard port since it can't be used on MongoReplicaSetClient
|
"username",
|
||||||
conn_settings.pop('port', None)
|
"password",
|
||||||
# Discard replicaSet if not base string
|
"authentication_source",
|
||||||
if not isinstance(conn_settings['replicaSet'], basestring):
|
"authentication_mechanism",
|
||||||
conn_settings.pop('replicaSet', None)
|
}
|
||||||
connection_class = MongoReplicaSetClient
|
return {
|
||||||
|
k: v for k, v in settings_dict.items() if k not in irrelevant_fields_set
|
||||||
|
}
|
||||||
|
|
||||||
|
raw_conn_settings = _connection_settings[alias].copy()
|
||||||
|
|
||||||
|
# Retrieve a copy of the connection settings associated with the requested
|
||||||
|
# alias and remove the database name and authentication info (we don't
|
||||||
|
# care about them at this point).
|
||||||
|
conn_settings = _clean_settings(raw_conn_settings)
|
||||||
|
|
||||||
|
# Determine if we should use PyMongo's or mongomock's MongoClient.
|
||||||
|
is_mock = conn_settings.pop("is_mock", False)
|
||||||
|
if is_mock:
|
||||||
try:
|
try:
|
||||||
_connections[alias] = connection_class(**conn_settings)
|
import mongomock
|
||||||
except Exception, e:
|
except ImportError:
|
||||||
raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e))
|
raise RuntimeError("You need mongomock installed to mock MongoEngine.")
|
||||||
|
connection_class = mongomock.MongoClient
|
||||||
|
else:
|
||||||
|
connection_class = MongoClient
|
||||||
|
|
||||||
|
# Re-use existing connection if one is suitable.
|
||||||
|
existing_connection = _find_existing_connection(raw_conn_settings)
|
||||||
|
if existing_connection:
|
||||||
|
connection = existing_connection
|
||||||
|
else:
|
||||||
|
connection = _create_connection(
|
||||||
|
alias=alias, connection_class=connection_class, **conn_settings
|
||||||
|
)
|
||||||
|
_connections[alias] = connection
|
||||||
return _connections[alias]
|
return _connections[alias]
|
||||||
|
|
||||||
|
|
||||||
|
def _create_connection(alias, connection_class, **connection_settings):
|
||||||
|
"""
|
||||||
|
Create the new connection for this alias. Raise
|
||||||
|
ConnectionFailure if it can't be established.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return connection_class(**connection_settings)
|
||||||
|
except Exception as e:
|
||||||
|
raise ConnectionFailure("Cannot connect to database {} :\n{}".format(alias, e))
|
||||||
|
|
||||||
|
|
||||||
|
def _find_existing_connection(connection_settings):
|
||||||
|
"""
|
||||||
|
Check if an existing connection could be reused
|
||||||
|
|
||||||
|
Iterate over all of the connection settings and if an existing connection
|
||||||
|
with the same parameters is suitable, return it
|
||||||
|
|
||||||
|
:param connection_settings: the settings of the new connection
|
||||||
|
:return: An existing connection or None
|
||||||
|
"""
|
||||||
|
connection_settings_bis = (
|
||||||
|
(db_alias, settings.copy())
|
||||||
|
for db_alias, settings in _connection_settings.items()
|
||||||
|
)
|
||||||
|
|
||||||
|
def _clean_settings(settings_dict):
|
||||||
|
# Only remove the name but it's important to
|
||||||
|
# keep the username/password/authentication_source/authentication_mechanism
|
||||||
|
# to identify if the connection could be shared (cfr https://github.com/MongoEngine/mongoengine/issues/2047)
|
||||||
|
return {k: v for k, v in settings_dict.items() if k != "name"}
|
||||||
|
|
||||||
|
cleaned_conn_settings = _clean_settings(connection_settings)
|
||||||
|
for db_alias, connection_settings in connection_settings_bis:
|
||||||
|
db_conn_settings = _clean_settings(connection_settings)
|
||||||
|
if cleaned_conn_settings == db_conn_settings and _connections.get(db_alias):
|
||||||
|
return _connections[db_alias]
|
||||||
|
|
||||||
|
|
||||||
def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||||
global _dbs
|
|
||||||
if reconnect:
|
if reconnect:
|
||||||
disconnect(alias)
|
disconnect(alias)
|
||||||
|
|
||||||
if alias not in _dbs:
|
if alias not in _dbs:
|
||||||
conn = get_connection(alias)
|
conn = get_connection(alias)
|
||||||
conn_settings = _connection_settings[alias]
|
conn_settings = _connection_settings[alias]
|
||||||
db = conn[conn_settings['name']]
|
db = conn[conn_settings["name"]]
|
||||||
|
auth_kwargs = {"source": conn_settings["authentication_source"]}
|
||||||
|
if conn_settings["authentication_mechanism"] is not None:
|
||||||
|
auth_kwargs["mechanism"] = conn_settings["authentication_mechanism"]
|
||||||
# Authenticate if necessary
|
# Authenticate if necessary
|
||||||
if conn_settings['username'] and conn_settings['password']:
|
if conn_settings["username"] and (
|
||||||
db.authenticate(conn_settings['username'],
|
conn_settings["password"]
|
||||||
conn_settings['password'])
|
or conn_settings["authentication_mechanism"] == "MONGODB-X509"
|
||||||
|
):
|
||||||
|
db.authenticate(
|
||||||
|
conn_settings["username"], conn_settings["password"], **auth_kwargs
|
||||||
|
)
|
||||||
_dbs[alias] = db
|
_dbs[alias] = db
|
||||||
return _dbs[alias]
|
return _dbs[alias]
|
||||||
|
|
||||||
|
|
||||||
def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||||
"""Connect to the database specified by the 'db' argument.
|
"""Connect to the database specified by the 'db' argument.
|
||||||
|
|
||||||
Connection settings may be provided here as well if the database is not
|
Connection settings may be provided here as well if the database is not
|
||||||
running on the default port on localhost. If authentication is needed,
|
running on the default port on localhost. If authentication is needed,
|
||||||
provide username and password arguments as well.
|
provide username and password arguments as well.
|
||||||
|
|
||||||
Multiple databases are supported by using aliases. Provide a separate
|
Multiple databases are supported by using aliases. Provide a separate
|
||||||
`alias` to connect to a different instance of :program:`mongod`.
|
`alias` to connect to a different instance of: program: `mongod`.
|
||||||
|
|
||||||
|
In order to replace a connection identified by a given alias, you'll
|
||||||
|
need to call ``disconnect`` first
|
||||||
|
|
||||||
|
See the docstring for `register_connection` for more details about all
|
||||||
|
supported kwargs.
|
||||||
|
|
||||||
.. versionchanged:: 0.6 - added multiple database support.
|
.. versionchanged:: 0.6 - added multiple database support.
|
||||||
"""
|
"""
|
||||||
global _connections
|
if alias in _connections:
|
||||||
if alias not in _connections:
|
prev_conn_setting = _connection_settings[alias]
|
||||||
|
new_conn_settings = _get_connection_settings(db, **kwargs)
|
||||||
|
|
||||||
|
if new_conn_settings != prev_conn_setting:
|
||||||
|
err_msg = (
|
||||||
|
"A different connection with alias `{}` was already "
|
||||||
|
"registered. Use disconnect() first"
|
||||||
|
).format(alias)
|
||||||
|
raise ConnectionFailure(err_msg)
|
||||||
|
else:
|
||||||
register_connection(alias, db, **kwargs)
|
register_connection(alias, db, **kwargs)
|
||||||
|
|
||||||
return get_connection(alias)
|
return get_connection(alias)
|
||||||
|
|||||||
@@ -1,14 +1,25 @@
|
|||||||
|
from contextlib import contextmanager
|
||||||
|
|
||||||
|
from pymongo.read_concern import ReadConcern
|
||||||
|
from pymongo.write_concern import WriteConcern
|
||||||
|
|
||||||
from mongoengine.common import _import_class
|
from mongoengine.common import _import_class
|
||||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||||
from mongoengine.queryset import QuerySet
|
from mongoengine.pymongo_support import count_documents
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"switch_db",
|
||||||
|
"switch_collection",
|
||||||
|
"no_dereference",
|
||||||
|
"no_sub_classes",
|
||||||
|
"query_counter",
|
||||||
|
"set_write_concern",
|
||||||
|
"set_read_write_concern",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
__all__ = ("switch_db", "switch_collection", "no_dereference",
|
class switch_db:
|
||||||
"no_sub_classes", "query_counter")
|
"""switch_db alias context manager.
|
||||||
|
|
||||||
|
|
||||||
class switch_db(object):
|
|
||||||
""" switch_db alias context manager.
|
|
||||||
|
|
||||||
Example ::
|
Example ::
|
||||||
|
|
||||||
@@ -19,15 +30,14 @@ class switch_db(object):
|
|||||||
class Group(Document):
|
class Group(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
|
||||||
Group(name="test").save() # Saves in the default db
|
Group(name='test').save() # Saves in the default db
|
||||||
|
|
||||||
with switch_db(Group, 'testdb-1') as Group:
|
with switch_db(Group, 'testdb-1') as Group:
|
||||||
Group(name="hello testdb!").save() # Saves in testdb-1
|
Group(name='hello testdb!').save() # Saves in testdb-1
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, cls, db_alias):
|
def __init__(self, cls, db_alias):
|
||||||
""" Construct the switch_db context manager
|
"""Construct the switch_db context manager
|
||||||
|
|
||||||
:param cls: the class to change the registered db
|
:param cls: the class to change the registered db
|
||||||
:param db_alias: the name of the specific database to use
|
:param db_alias: the name of the specific database to use
|
||||||
@@ -38,34 +48,33 @@ class switch_db(object):
|
|||||||
self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME)
|
self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME)
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
""" change the db_alias and clear the cached collection """
|
"""Change the db_alias and clear the cached collection."""
|
||||||
self.cls._meta["db_alias"] = self.db_alias
|
self.cls._meta["db_alias"] = self.db_alias
|
||||||
self.cls._collection = None
|
self.cls._collection = None
|
||||||
return self.cls
|
return self.cls
|
||||||
|
|
||||||
def __exit__(self, t, value, traceback):
|
def __exit__(self, t, value, traceback):
|
||||||
""" Reset the db_alias and collection """
|
"""Reset the db_alias and collection."""
|
||||||
self.cls._meta["db_alias"] = self.ori_db_alias
|
self.cls._meta["db_alias"] = self.ori_db_alias
|
||||||
self.cls._collection = self.collection
|
self.cls._collection = self.collection
|
||||||
|
|
||||||
|
|
||||||
class switch_collection(object):
|
class switch_collection:
|
||||||
""" switch_collection alias context manager.
|
"""switch_collection alias context manager.
|
||||||
|
|
||||||
Example ::
|
Example ::
|
||||||
|
|
||||||
class Group(Document):
|
class Group(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
|
||||||
Group(name="test").save() # Saves in the default db
|
Group(name='test').save() # Saves in the default db
|
||||||
|
|
||||||
with switch_collection(Group, 'group1') as Group:
|
with switch_collection(Group, 'group1') as Group:
|
||||||
Group(name="hello testdb!").save() # Saves in group1 collection
|
Group(name='hello testdb!').save() # Saves in group1 collection
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, cls, collection_name):
|
def __init__(self, cls, collection_name):
|
||||||
""" Construct the switch_collection context manager
|
"""Construct the switch_collection context manager.
|
||||||
|
|
||||||
:param cls: the class to change the registered db
|
:param cls: the class to change the registered db
|
||||||
:param collection_name: the name of the collection to use
|
:param collection_name: the name of the collection to use
|
||||||
@@ -76,7 +85,7 @@ class switch_collection(object):
|
|||||||
self.collection_name = collection_name
|
self.collection_name = collection_name
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
""" change the _get_collection_name and clear the cached collection """
|
"""Change the _get_collection_name and clear the cached collection."""
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_collection_name(cls):
|
def _get_collection_name(cls):
|
||||||
@@ -87,142 +96,183 @@ class switch_collection(object):
|
|||||||
return self.cls
|
return self.cls
|
||||||
|
|
||||||
def __exit__(self, t, value, traceback):
|
def __exit__(self, t, value, traceback):
|
||||||
""" Reset the collection """
|
"""Reset the collection."""
|
||||||
self.cls._collection = self.ori_collection
|
self.cls._collection = self.ori_collection
|
||||||
self.cls._get_collection_name = self.ori_get_collection_name
|
self.cls._get_collection_name = self.ori_get_collection_name
|
||||||
|
|
||||||
|
|
||||||
class no_dereference(object):
|
class no_dereference:
|
||||||
""" no_dereference context manager.
|
"""no_dereference context manager.
|
||||||
|
|
||||||
Turns off all dereferencing in Documents for the duration of the context
|
Turns off all dereferencing in Documents for the duration of the context
|
||||||
manager::
|
manager::
|
||||||
|
|
||||||
with no_dereference(Group) as Group:
|
with no_dereference(Group) as Group:
|
||||||
Group.objects.find()
|
Group.objects.find()
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, cls):
|
def __init__(self, cls):
|
||||||
""" Construct the no_dereference context manager.
|
"""Construct the no_dereference context manager.
|
||||||
|
|
||||||
:param cls: the class to turn dereferencing off on
|
:param cls: the class to turn dereferencing off on
|
||||||
"""
|
"""
|
||||||
self.cls = cls
|
self.cls = cls
|
||||||
|
|
||||||
ReferenceField = _import_class('ReferenceField')
|
ReferenceField = _import_class("ReferenceField")
|
||||||
GenericReferenceField = _import_class('GenericReferenceField')
|
GenericReferenceField = _import_class("GenericReferenceField")
|
||||||
ComplexBaseField = _import_class('ComplexBaseField')
|
ComplexBaseField = _import_class("ComplexBaseField")
|
||||||
|
|
||||||
self.deref_fields = [k for k, v in self.cls._fields.iteritems()
|
self.deref_fields = [
|
||||||
if isinstance(v, (ReferenceField,
|
k
|
||||||
GenericReferenceField,
|
for k, v in self.cls._fields.items()
|
||||||
ComplexBaseField))]
|
if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField))
|
||||||
|
]
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
""" change the objects default and _auto_dereference values"""
|
"""Change the objects default and _auto_dereference values."""
|
||||||
for field in self.deref_fields:
|
for field in self.deref_fields:
|
||||||
self.cls._fields[field]._auto_dereference = False
|
self.cls._fields[field]._auto_dereference = False
|
||||||
return self.cls
|
return self.cls
|
||||||
|
|
||||||
def __exit__(self, t, value, traceback):
|
def __exit__(self, t, value, traceback):
|
||||||
""" Reset the default and _auto_dereference values"""
|
"""Reset the default and _auto_dereference values."""
|
||||||
for field in self.deref_fields:
|
for field in self.deref_fields:
|
||||||
self.cls._fields[field]._auto_dereference = True
|
self.cls._fields[field]._auto_dereference = True
|
||||||
return self.cls
|
return self.cls
|
||||||
|
|
||||||
|
|
||||||
class no_sub_classes(object):
|
class no_sub_classes:
|
||||||
""" no_sub_classes context manager.
|
"""no_sub_classes context manager.
|
||||||
|
|
||||||
Only returns instances of this class and no sub (inherited) classes::
|
Only returns instances of this class and no sub (inherited) classes::
|
||||||
|
|
||||||
with no_sub_classes(Group) as Group:
|
with no_sub_classes(Group) as Group:
|
||||||
Group.objects.find()
|
Group.objects.find()
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, cls):
|
def __init__(self, cls):
|
||||||
""" Construct the no_sub_classes context manager.
|
"""Construct the no_sub_classes context manager.
|
||||||
|
|
||||||
:param cls: the class to turn querying sub classes on
|
:param cls: the class to turn querying sub classes on
|
||||||
"""
|
"""
|
||||||
self.cls = cls
|
self.cls = cls
|
||||||
|
self.cls_initial_subclasses = None
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
""" change the objects default and _auto_dereference values"""
|
"""Change the objects default and _auto_dereference values."""
|
||||||
self.cls._all_subclasses = self.cls._subclasses
|
self.cls_initial_subclasses = self.cls._subclasses
|
||||||
self.cls._subclasses = (self.cls,)
|
self.cls._subclasses = (self.cls._class_name,)
|
||||||
return self.cls
|
return self.cls
|
||||||
|
|
||||||
def __exit__(self, t, value, traceback):
|
def __exit__(self, t, value, traceback):
|
||||||
""" Reset the default and _auto_dereference values"""
|
"""Reset the default and _auto_dereference values."""
|
||||||
self.cls._subclasses = self.cls._all_subclasses
|
self.cls._subclasses = self.cls_initial_subclasses
|
||||||
delattr(self.cls, '_all_subclasses')
|
|
||||||
return self.cls
|
|
||||||
|
|
||||||
|
|
||||||
class QuerySetNoDeRef(QuerySet):
|
class query_counter:
|
||||||
"""Special no_dereference QuerySet"""
|
"""Query_counter context manager to get the number of queries.
|
||||||
def __dereference(items, max_depth=1, instance=None, name=None):
|
This works by updating the `profiling_level` of the database so that all queries get logged,
|
||||||
return items
|
resetting the db.system.profile collection at the beginning of the context and counting the new entries.
|
||||||
|
|
||||||
|
This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes
|
||||||
|
can interfere with it
|
||||||
|
|
||||||
class query_counter(object):
|
Be aware that:
|
||||||
""" Query_counter context manager to get the number of queries. """
|
- Iterating over large amount of documents (>101) makes pymongo issue `getmore` queries to fetch the next batch of
|
||||||
|
documents (https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches)
|
||||||
|
- Some queries are ignored by default by the counter (killcursors, db.system.indexes)
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self, alias=DEFAULT_CONNECTION_NAME):
|
||||||
""" Construct the query_counter. """
|
"""Construct the query_counter
|
||||||
self.counter = 0
|
"""
|
||||||
self.db = get_db()
|
self.db = get_db(alias=alias)
|
||||||
|
self.initial_profiling_level = None
|
||||||
|
self._ctx_query_counter = 0 # number of queries issued by the context
|
||||||
|
|
||||||
def __enter__(self):
|
self._ignored_query = {
|
||||||
""" On every with block we need to drop the profile collection. """
|
"ns": {"$ne": "%s.system.indexes" % self.db.name},
|
||||||
|
"op": {"$ne": "killcursors"}, # MONGODB < 3.2
|
||||||
|
"command.killCursors": {"$exists": False}, # MONGODB >= 3.2
|
||||||
|
}
|
||||||
|
|
||||||
|
def _turn_on_profiling(self):
|
||||||
|
self.initial_profiling_level = self.db.profiling_level()
|
||||||
self.db.set_profiling_level(0)
|
self.db.set_profiling_level(0)
|
||||||
self.db.system.profile.drop()
|
self.db.system.profile.drop()
|
||||||
self.db.set_profiling_level(2)
|
self.db.set_profiling_level(2)
|
||||||
|
|
||||||
|
def _resets_profiling(self):
|
||||||
|
self.db.set_profiling_level(self.initial_profiling_level)
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
self._turn_on_profiling()
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(self, t, value, traceback):
|
def __exit__(self, t, value, traceback):
|
||||||
""" Reset the profiling level. """
|
self._resets_profiling()
|
||||||
self.db.set_profiling_level(0)
|
|
||||||
|
|
||||||
def __eq__(self, value):
|
def __eq__(self, value):
|
||||||
""" == Compare querycounter. """
|
|
||||||
counter = self._get_count()
|
counter = self._get_count()
|
||||||
return value == counter
|
return value == counter
|
||||||
|
|
||||||
def __ne__(self, value):
|
def __ne__(self, value):
|
||||||
""" != Compare querycounter. """
|
|
||||||
return not self.__eq__(value)
|
return not self.__eq__(value)
|
||||||
|
|
||||||
def __lt__(self, value):
|
def __lt__(self, value):
|
||||||
""" < Compare querycounter. """
|
|
||||||
return self._get_count() < value
|
return self._get_count() < value
|
||||||
|
|
||||||
def __le__(self, value):
|
def __le__(self, value):
|
||||||
""" <= Compare querycounter. """
|
|
||||||
return self._get_count() <= value
|
return self._get_count() <= value
|
||||||
|
|
||||||
def __gt__(self, value):
|
def __gt__(self, value):
|
||||||
""" > Compare querycounter. """
|
|
||||||
return self._get_count() > value
|
return self._get_count() > value
|
||||||
|
|
||||||
def __ge__(self, value):
|
def __ge__(self, value):
|
||||||
""" >= Compare querycounter. """
|
|
||||||
return self._get_count() >= value
|
return self._get_count() >= value
|
||||||
|
|
||||||
def __int__(self):
|
def __int__(self):
|
||||||
""" int representation. """
|
|
||||||
return self._get_count()
|
return self._get_count()
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
""" repr query_counter as the number of queries. """
|
"""repr query_counter as the number of queries."""
|
||||||
return u"%s" % self._get_count()
|
return "%s" % self._get_count()
|
||||||
|
|
||||||
def _get_count(self):
|
def _get_count(self):
|
||||||
""" Get the number of queries. """
|
"""Get the number of queries by counting the current number of entries in db.system.profile
|
||||||
ignore_query = {"ns": {"$ne": "%s.system.indexes" % self.db.name}}
|
and substracting the queries issued by this context. In fact everytime this is called, 1 query is
|
||||||
count = self.db.system.profile.find(ignore_query).count() - self.counter
|
issued so we need to balance that
|
||||||
self.counter += 1
|
"""
|
||||||
|
count = (
|
||||||
|
count_documents(self.db.system.profile, self._ignored_query)
|
||||||
|
- self._ctx_query_counter
|
||||||
|
)
|
||||||
|
self._ctx_query_counter += (
|
||||||
|
1 # Account for the query we just issued to gather the information
|
||||||
|
)
|
||||||
return count
|
return count
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def set_write_concern(collection, write_concerns):
|
||||||
|
combined_concerns = dict(collection.write_concern.document.items())
|
||||||
|
combined_concerns.update(write_concerns)
|
||||||
|
yield collection.with_options(write_concern=WriteConcern(**combined_concerns))
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def set_read_write_concern(collection, write_concerns, read_concerns):
|
||||||
|
combined_write_concerns = dict(collection.write_concern.document.items())
|
||||||
|
|
||||||
|
if write_concerns is not None:
|
||||||
|
combined_write_concerns.update(write_concerns)
|
||||||
|
|
||||||
|
combined_read_concerns = dict(collection.read_concern.document.items())
|
||||||
|
|
||||||
|
if read_concerns is not None:
|
||||||
|
combined_read_concerns.update(read_concerns)
|
||||||
|
|
||||||
|
yield collection.with_options(
|
||||||
|
write_concern=WriteConcern(**combined_write_concerns),
|
||||||
|
read_concern=ReadConcern(**combined_read_concerns),
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,18 +1,24 @@
|
|||||||
from bson import DBRef, SON
|
from bson import DBRef, SON
|
||||||
|
|
||||||
from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document)
|
from mongoengine.base import (
|
||||||
from fields import (ReferenceField, ListField, DictField, MapField)
|
BaseDict,
|
||||||
from connection import get_db
|
BaseList,
|
||||||
from queryset import QuerySet
|
EmbeddedDocumentList,
|
||||||
from document import Document, EmbeddedDocument
|
TopLevelDocumentMetaclass,
|
||||||
|
get_document,
|
||||||
|
)
|
||||||
|
from mongoengine.base.datastructures import LazyReference
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
from mongoengine.document import Document, EmbeddedDocument
|
||||||
|
from mongoengine.fields import DictField, ListField, MapField, ReferenceField
|
||||||
|
from mongoengine.queryset import QuerySet
|
||||||
|
|
||||||
|
|
||||||
class DeReference(object):
|
class DeReference:
|
||||||
|
|
||||||
def __call__(self, items, max_depth=1, instance=None, name=None):
|
def __call__(self, items, max_depth=1, instance=None, name=None):
|
||||||
"""
|
"""
|
||||||
Cheaply dereferences the items to a set depth.
|
Cheaply dereferences the items to a set depth.
|
||||||
Also handles the convertion of complex data types.
|
Also handles the conversion of complex data types.
|
||||||
|
|
||||||
:param items: The iterable (dict, list, queryset) to be dereferenced.
|
:param items: The iterable (dict, list, queryset) to be dereferenced.
|
||||||
:param max_depth: The maximum depth to recurse to
|
:param max_depth: The maximum depth to recurse to
|
||||||
@@ -22,7 +28,7 @@ class DeReference(object):
|
|||||||
:class:`~mongoengine.base.ComplexBaseField`
|
:class:`~mongoengine.base.ComplexBaseField`
|
||||||
:param get: A boolean determining if being called by __get__
|
:param get: A boolean determining if being called by __get__
|
||||||
"""
|
"""
|
||||||
if items is None or isinstance(items, basestring):
|
if items is None or isinstance(items, str):
|
||||||
return items
|
return items
|
||||||
|
|
||||||
# cheapest way to convert a queryset to a list
|
# cheapest way to convert a queryset to a list
|
||||||
@@ -33,33 +39,59 @@ class DeReference(object):
|
|||||||
self.max_depth = max_depth
|
self.max_depth = max_depth
|
||||||
doc_type = None
|
doc_type = None
|
||||||
|
|
||||||
if instance and isinstance(instance, (Document, EmbeddedDocument,
|
if instance and isinstance(
|
||||||
TopLevelDocumentMetaclass)):
|
instance, (Document, EmbeddedDocument, TopLevelDocumentMetaclass)
|
||||||
|
):
|
||||||
doc_type = instance._fields.get(name)
|
doc_type = instance._fields.get(name)
|
||||||
if hasattr(doc_type, 'field'):
|
while hasattr(doc_type, "field"):
|
||||||
doc_type = doc_type.field
|
doc_type = doc_type.field
|
||||||
|
|
||||||
if isinstance(doc_type, ReferenceField):
|
if isinstance(doc_type, ReferenceField):
|
||||||
field = doc_type
|
field = doc_type
|
||||||
doc_type = doc_type.document_type
|
doc_type = doc_type.document_type
|
||||||
is_list = not hasattr(items, 'items')
|
is_list = not hasattr(items, "items")
|
||||||
|
|
||||||
if is_list and all([i.__class__ == doc_type for i in items]):
|
if is_list and all([i.__class__ == doc_type for i in items]):
|
||||||
return items
|
return items
|
||||||
elif not is_list and all([i.__class__ == doc_type
|
elif not is_list and all(
|
||||||
for i in items.values()]):
|
[i.__class__ == doc_type for i in items.values()]
|
||||||
|
):
|
||||||
return items
|
return items
|
||||||
elif not field.dbref:
|
elif not field.dbref:
|
||||||
if not hasattr(items, 'items'):
|
# We must turn the ObjectIds into DBRefs
|
||||||
items = [field.to_python(v)
|
|
||||||
if not isinstance(v, (DBRef, Document)) else v
|
# Recursively dig into the sub items of a list/dict
|
||||||
for v in items]
|
# to turn the ObjectIds into DBRefs
|
||||||
|
def _get_items_from_list(items):
|
||||||
|
new_items = []
|
||||||
|
for v in items:
|
||||||
|
value = v
|
||||||
|
if isinstance(v, dict):
|
||||||
|
value = _get_items_from_dict(v)
|
||||||
|
elif isinstance(v, list):
|
||||||
|
value = _get_items_from_list(v)
|
||||||
|
elif not isinstance(v, (DBRef, Document)):
|
||||||
|
value = field.to_python(v)
|
||||||
|
new_items.append(value)
|
||||||
|
return new_items
|
||||||
|
|
||||||
|
def _get_items_from_dict(items):
|
||||||
|
new_items = {}
|
||||||
|
for k, v in items.items():
|
||||||
|
value = v
|
||||||
|
if isinstance(v, list):
|
||||||
|
value = _get_items_from_list(v)
|
||||||
|
elif isinstance(v, dict):
|
||||||
|
value = _get_items_from_dict(v)
|
||||||
|
elif not isinstance(v, (DBRef, Document)):
|
||||||
|
value = field.to_python(v)
|
||||||
|
new_items[k] = value
|
||||||
|
return new_items
|
||||||
|
|
||||||
|
if not hasattr(items, "items"):
|
||||||
|
items = _get_items_from_list(items)
|
||||||
else:
|
else:
|
||||||
items = dict([
|
items = _get_items_from_dict(items)
|
||||||
(k, field.to_python(v))
|
|
||||||
if not isinstance(v, (DBRef, Document)) else (k, v)
|
|
||||||
for k, v in items.iteritems()]
|
|
||||||
)
|
|
||||||
|
|
||||||
self.reference_map = self._find_references(items)
|
self.reference_map = self._find_references(items)
|
||||||
self.object_map = self._fetch_objects(doc_type=doc_type)
|
self.object_map = self._fetch_objects(doc_type=doc_type)
|
||||||
@@ -77,36 +109,50 @@ class DeReference(object):
|
|||||||
return reference_map
|
return reference_map
|
||||||
|
|
||||||
# Determine the iterator to use
|
# Determine the iterator to use
|
||||||
if not hasattr(items, 'items'):
|
if isinstance(items, dict):
|
||||||
iterator = enumerate(items)
|
iterator = items.values()
|
||||||
else:
|
else:
|
||||||
iterator = items.iteritems()
|
iterator = items
|
||||||
|
|
||||||
# Recursively find dbreferences
|
# Recursively find dbreferences
|
||||||
depth += 1
|
depth += 1
|
||||||
for k, item in iterator:
|
for item in iterator:
|
||||||
if isinstance(item, Document):
|
if isinstance(item, (Document, EmbeddedDocument)):
|
||||||
for field_name, field in item._fields.iteritems():
|
for field_name, field in item._fields.items():
|
||||||
v = item._data.get(field_name, None)
|
v = item._data.get(field_name, None)
|
||||||
if isinstance(v, (DBRef)):
|
if isinstance(v, LazyReference):
|
||||||
reference_map.setdefault(field.document_type, []).append(v.id)
|
# LazyReference inherits DBRef but should not be dereferenced here !
|
||||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
continue
|
||||||
reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id)
|
elif isinstance(v, DBRef):
|
||||||
|
reference_map.setdefault(field.document_type, set()).add(v.id)
|
||||||
|
elif isinstance(v, (dict, SON)) and "_ref" in v:
|
||||||
|
reference_map.setdefault(get_document(v["_cls"]), set()).add(
|
||||||
|
v["_ref"].id
|
||||||
|
)
|
||||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||||
field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
|
field_cls = getattr(
|
||||||
|
getattr(field, "field", None), "document_type", None
|
||||||
|
)
|
||||||
references = self._find_references(v, depth)
|
references = self._find_references(v, depth)
|
||||||
for key, refs in references.iteritems():
|
for key, refs in references.items():
|
||||||
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
if isinstance(
|
||||||
|
field_cls, (Document, TopLevelDocumentMetaclass)
|
||||||
|
):
|
||||||
key = field_cls
|
key = field_cls
|
||||||
reference_map.setdefault(key, []).extend(refs)
|
reference_map.setdefault(key, set()).update(refs)
|
||||||
elif isinstance(item, (DBRef)):
|
elif isinstance(item, LazyReference):
|
||||||
reference_map.setdefault(item.collection, []).append(item.id)
|
# LazyReference inherits DBRef but should not be dereferenced here !
|
||||||
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
continue
|
||||||
reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id)
|
elif isinstance(item, DBRef):
|
||||||
|
reference_map.setdefault(item.collection, set()).add(item.id)
|
||||||
|
elif isinstance(item, (dict, SON)) and "_ref" in item:
|
||||||
|
reference_map.setdefault(get_document(item["_cls"]), set()).add(
|
||||||
|
item["_ref"].id
|
||||||
|
)
|
||||||
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
||||||
references = self._find_references(item, depth - 1)
|
references = self._find_references(item, depth - 1)
|
||||||
for key, refs in references.iteritems():
|
for key, refs in references.items():
|
||||||
reference_map.setdefault(key, []).extend(refs)
|
reference_map.setdefault(key, set()).update(refs)
|
||||||
|
|
||||||
return reference_map
|
return reference_map
|
||||||
|
|
||||||
@@ -114,34 +160,47 @@ class DeReference(object):
|
|||||||
"""Fetch all references and convert to their document objects
|
"""Fetch all references and convert to their document objects
|
||||||
"""
|
"""
|
||||||
object_map = {}
|
object_map = {}
|
||||||
for col, dbrefs in self.reference_map.iteritems():
|
for collection, dbrefs in self.reference_map.items():
|
||||||
keys = object_map.keys()
|
|
||||||
refs = list(set([dbref for dbref in dbrefs if unicode(dbref).encode('utf-8') not in keys]))
|
# we use getattr instead of hasattr because hasattr swallows any exception under python2
|
||||||
if hasattr(col, 'objects'): # We have a document class for the refs
|
# so it could hide nasty things without raising exceptions (cfr bug #1688))
|
||||||
references = col.objects.in_bulk(refs)
|
ref_document_cls_exists = getattr(collection, "objects", None) is not None
|
||||||
for key, doc in references.iteritems():
|
|
||||||
object_map[key] = doc
|
if ref_document_cls_exists:
|
||||||
|
col_name = collection._get_collection_name()
|
||||||
|
refs = [
|
||||||
|
dbref for dbref in dbrefs if (col_name, dbref) not in object_map
|
||||||
|
]
|
||||||
|
references = collection.objects.in_bulk(refs)
|
||||||
|
for key, doc in references.items():
|
||||||
|
object_map[(col_name, key)] = doc
|
||||||
else: # Generic reference: use the refs data to convert to document
|
else: # Generic reference: use the refs data to convert to document
|
||||||
if isinstance(doc_type, (ListField, DictField, MapField,)):
|
if isinstance(doc_type, (ListField, DictField, MapField)):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
refs = [
|
||||||
|
dbref for dbref in dbrefs if (collection, dbref) not in object_map
|
||||||
|
]
|
||||||
|
|
||||||
if doc_type:
|
if doc_type:
|
||||||
references = doc_type._get_db()[col].find({'_id': {'$in': refs}})
|
references = doc_type._get_db()[collection].find(
|
||||||
|
{"_id": {"$in": refs}}
|
||||||
|
)
|
||||||
for ref in references:
|
for ref in references:
|
||||||
doc = doc_type._from_son(ref)
|
doc = doc_type._from_son(ref)
|
||||||
object_map[doc.id] = doc
|
object_map[(collection, doc.id)] = doc
|
||||||
else:
|
else:
|
||||||
references = get_db()[col].find({'_id': {'$in': refs}})
|
references = get_db()[collection].find({"_id": {"$in": refs}})
|
||||||
for ref in references:
|
for ref in references:
|
||||||
if '_cls' in ref:
|
if "_cls" in ref:
|
||||||
doc = get_document(ref["_cls"])._from_son(ref)
|
doc = get_document(ref["_cls"])._from_son(ref)
|
||||||
elif doc_type is None:
|
elif doc_type is None:
|
||||||
doc = get_document(
|
doc = get_document(
|
||||||
''.join(x.capitalize()
|
"".join(x.capitalize() for x in collection.split("_"))
|
||||||
for x in col.split('_')))._from_son(ref)
|
)._from_son(ref)
|
||||||
else:
|
else:
|
||||||
doc = doc_type._from_son(ref)
|
doc = doc_type._from_son(ref)
|
||||||
object_map[doc.id] = doc
|
object_map[(collection, doc.id)] = doc
|
||||||
return object_map
|
return object_map
|
||||||
|
|
||||||
def _attach_objects(self, items, depth=0, instance=None, name=None):
|
def _attach_objects(self, items, depth=0, instance=None, name=None):
|
||||||
@@ -166,21 +225,30 @@ class DeReference(object):
|
|||||||
return BaseList(items, instance, name)
|
return BaseList(items, instance, name)
|
||||||
|
|
||||||
if isinstance(items, (dict, SON)):
|
if isinstance(items, (dict, SON)):
|
||||||
if '_ref' in items:
|
if "_ref" in items:
|
||||||
return self.object_map.get(items['_ref'].id, items)
|
return self.object_map.get(
|
||||||
elif '_cls' in items:
|
(items["_ref"].collection, items["_ref"].id), items
|
||||||
doc = get_document(items['_cls'])._from_son(items)
|
)
|
||||||
|
elif "_cls" in items:
|
||||||
|
doc = get_document(items["_cls"])._from_son(items)
|
||||||
|
_cls = doc._data.pop("_cls", None)
|
||||||
|
del items["_cls"]
|
||||||
doc._data = self._attach_objects(doc._data, depth, doc, None)
|
doc._data = self._attach_objects(doc._data, depth, doc, None)
|
||||||
|
if _cls is not None:
|
||||||
|
doc._data["_cls"] = _cls
|
||||||
return doc
|
return doc
|
||||||
|
|
||||||
if not hasattr(items, 'items'):
|
if not hasattr(items, "items"):
|
||||||
is_list = True
|
is_list = True
|
||||||
|
list_type = BaseList
|
||||||
|
if isinstance(items, EmbeddedDocumentList):
|
||||||
|
list_type = EmbeddedDocumentList
|
||||||
as_tuple = isinstance(items, tuple)
|
as_tuple = isinstance(items, tuple)
|
||||||
iterator = enumerate(items)
|
iterator = enumerate(items)
|
||||||
data = []
|
data = []
|
||||||
else:
|
else:
|
||||||
is_list = False
|
is_list = False
|
||||||
iterator = items.iteritems()
|
iterator = items.items()
|
||||||
data = {}
|
data = {}
|
||||||
|
|
||||||
depth += 1
|
depth += 1
|
||||||
@@ -192,25 +260,33 @@ class DeReference(object):
|
|||||||
|
|
||||||
if k in self.object_map and not is_list:
|
if k in self.object_map and not is_list:
|
||||||
data[k] = self.object_map[k]
|
data[k] = self.object_map[k]
|
||||||
elif isinstance(v, Document):
|
elif isinstance(v, (Document, EmbeddedDocument)):
|
||||||
for field_name, field in v._fields.iteritems():
|
for field_name in v._fields:
|
||||||
v = data[k]._data.get(field_name, None)
|
v = data[k]._data.get(field_name, None)
|
||||||
if isinstance(v, (DBRef)):
|
if isinstance(v, DBRef):
|
||||||
data[k]._data[field_name] = self.object_map.get(v.id, v)
|
data[k]._data[field_name] = self.object_map.get(
|
||||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
(v.collection, v.id), v
|
||||||
data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v)
|
)
|
||||||
elif isinstance(v, dict) and depth <= self.max_depth:
|
elif isinstance(v, (dict, SON)) and "_ref" in v:
|
||||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
data[k]._data[field_name] = self.object_map.get(
|
||||||
elif isinstance(v, (list, tuple)) and depth <= self.max_depth:
|
(v["_ref"].collection, v["_ref"].id), v
|
||||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
)
|
||||||
|
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||||
|
item_name = "{}.{}.{}".format(name, k, field_name)
|
||||||
|
data[k]._data[field_name] = self._attach_objects(
|
||||||
|
v, depth, instance=instance, name=item_name
|
||||||
|
)
|
||||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||||
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name)
|
item_name = "{}.{}".format(name, k) if name else name
|
||||||
elif hasattr(v, 'id'):
|
data[k] = self._attach_objects(
|
||||||
data[k] = self.object_map.get(v.id, v)
|
v, depth - 1, instance=instance, name=item_name
|
||||||
|
)
|
||||||
|
elif isinstance(v, DBRef) and hasattr(v, "id"):
|
||||||
|
data[k] = self.object_map.get((v.collection, v.id), v)
|
||||||
|
|
||||||
if instance and name:
|
if instance and name:
|
||||||
if is_list:
|
if is_list:
|
||||||
return tuple(data) if as_tuple else BaseList(data, instance, name)
|
return tuple(data) if as_tuple else list_type(data, instance, name)
|
||||||
return BaseDict(data, instance, name)
|
return BaseDict(data, instance, name)
|
||||||
depth += 1
|
depth += 1
|
||||||
return data
|
return data
|
||||||
|
|||||||
@@ -1,406 +0,0 @@
|
|||||||
from mongoengine import *
|
|
||||||
|
|
||||||
from django.utils.encoding import smart_str
|
|
||||||
from django.contrib.auth.models import _user_has_perm, _user_get_all_permissions, _user_has_module_perms
|
|
||||||
from django.db import models
|
|
||||||
from django.contrib.contenttypes.models import ContentTypeManager
|
|
||||||
from django.contrib import auth
|
|
||||||
from django.contrib.auth.models import AnonymousUser
|
|
||||||
from django.utils.translation import ugettext_lazy as _
|
|
||||||
|
|
||||||
from .utils import datetime_now
|
|
||||||
from .mongo_auth import get_user_document
|
|
||||||
|
|
||||||
REDIRECT_FIELD_NAME = 'next'
|
|
||||||
|
|
||||||
try:
|
|
||||||
from django.contrib.auth.hashers import check_password, make_password
|
|
||||||
except ImportError:
|
|
||||||
"""Handle older versions of Django"""
|
|
||||||
from django.utils.hashcompat import md5_constructor, sha_constructor
|
|
||||||
|
|
||||||
def get_hexdigest(algorithm, salt, raw_password):
|
|
||||||
raw_password, salt = smart_str(raw_password), smart_str(salt)
|
|
||||||
if algorithm == 'md5':
|
|
||||||
return md5_constructor(salt + raw_password).hexdigest()
|
|
||||||
elif algorithm == 'sha1':
|
|
||||||
return sha_constructor(salt + raw_password).hexdigest()
|
|
||||||
raise ValueError('Got unknown password algorithm type in password')
|
|
||||||
|
|
||||||
def check_password(raw_password, password):
|
|
||||||
algo, salt, hash = password.split('$')
|
|
||||||
return hash == get_hexdigest(algo, salt, raw_password)
|
|
||||||
|
|
||||||
def make_password(raw_password):
|
|
||||||
from random import random
|
|
||||||
algo = 'sha1'
|
|
||||||
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
|
|
||||||
hash = get_hexdigest(algo, salt, raw_password)
|
|
||||||
return '%s$%s$%s' % (algo, salt, hash)
|
|
||||||
|
|
||||||
|
|
||||||
class ContentType(Document):
|
|
||||||
name = StringField(max_length=100)
|
|
||||||
app_label = StringField(max_length=100)
|
|
||||||
model = StringField(max_length=100, verbose_name=_('python model class name'),
|
|
||||||
unique_with='app_label')
|
|
||||||
objects = ContentTypeManager()
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
verbose_name = _('content type')
|
|
||||||
verbose_name_plural = _('content types')
|
|
||||||
# db_table = 'django_content_type'
|
|
||||||
# ordering = ('name',)
|
|
||||||
# unique_together = (('app_label', 'model'),)
|
|
||||||
|
|
||||||
def __unicode__(self):
|
|
||||||
return self.name
|
|
||||||
|
|
||||||
def model_class(self):
|
|
||||||
"Returns the Python model class for this type of content."
|
|
||||||
from django.db import models
|
|
||||||
return models.get_model(self.app_label, self.model)
|
|
||||||
|
|
||||||
def get_object_for_this_type(self, **kwargs):
|
|
||||||
"""
|
|
||||||
Returns an object of this type for the keyword arguments given.
|
|
||||||
Basically, this is a proxy around this object_type's get_object() model
|
|
||||||
method. The ObjectNotExist exception, if thrown, will not be caught,
|
|
||||||
so code that calls this method should catch it.
|
|
||||||
"""
|
|
||||||
return self.model_class()._default_manager.using(self._state.db).get(**kwargs)
|
|
||||||
|
|
||||||
def natural_key(self):
|
|
||||||
return (self.app_label, self.model)
|
|
||||||
|
|
||||||
|
|
||||||
class SiteProfileNotAvailable(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class PermissionManager(models.Manager):
|
|
||||||
def get_by_natural_key(self, codename, app_label, model):
|
|
||||||
return self.get(
|
|
||||||
codename=codename,
|
|
||||||
content_type=ContentType.objects.get_by_natural_key(app_label, model)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Permission(Document):
|
|
||||||
"""The permissions system provides a way to assign permissions to specific
|
|
||||||
users and groups of users.
|
|
||||||
|
|
||||||
The permission system is used by the Django admin site, but may also be
|
|
||||||
useful in your own code. The Django admin site uses permissions as follows:
|
|
||||||
|
|
||||||
- The "add" permission limits the user's ability to view the "add"
|
|
||||||
form and add an object.
|
|
||||||
- The "change" permission limits a user's ability to view the change
|
|
||||||
list, view the "change" form and change an object.
|
|
||||||
- The "delete" permission limits the ability to delete an object.
|
|
||||||
|
|
||||||
Permissions are set globally per type of object, not per specific object
|
|
||||||
instance. It is possible to say "Mary may change news stories," but it's
|
|
||||||
not currently possible to say "Mary may change news stories, but only the
|
|
||||||
ones she created herself" or "Mary may only change news stories that have
|
|
||||||
a certain status or publication date."
|
|
||||||
|
|
||||||
Three basic permissions -- add, change and delete -- are automatically
|
|
||||||
created for each Django model.
|
|
||||||
"""
|
|
||||||
name = StringField(max_length=50, verbose_name=_('username'))
|
|
||||||
content_type = ReferenceField(ContentType)
|
|
||||||
codename = StringField(max_length=100, verbose_name=_('codename'))
|
|
||||||
# FIXME: don't access field of the other class
|
|
||||||
# unique_with=['content_type__app_label', 'content_type__model'])
|
|
||||||
|
|
||||||
objects = PermissionManager()
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
verbose_name = _('permission')
|
|
||||||
verbose_name_plural = _('permissions')
|
|
||||||
# unique_together = (('content_type', 'codename'),)
|
|
||||||
# ordering = ('content_type__app_label', 'content_type__model', 'codename')
|
|
||||||
|
|
||||||
def __unicode__(self):
|
|
||||||
return u"%s | %s | %s" % (
|
|
||||||
unicode(self.content_type.app_label),
|
|
||||||
unicode(self.content_type),
|
|
||||||
unicode(self.name))
|
|
||||||
|
|
||||||
def natural_key(self):
|
|
||||||
return (self.codename,) + self.content_type.natural_key()
|
|
||||||
natural_key.dependencies = ['contenttypes.contenttype']
|
|
||||||
|
|
||||||
|
|
||||||
class Group(Document):
|
|
||||||
"""Groups are a generic way of categorizing users to apply permissions,
|
|
||||||
or some other label, to those users. A user can belong to any number of
|
|
||||||
groups.
|
|
||||||
|
|
||||||
A user in a group automatically has all the permissions granted to that
|
|
||||||
group. For example, if the group Site editors has the permission
|
|
||||||
can_edit_home_page, any user in that group will have that permission.
|
|
||||||
|
|
||||||
Beyond permissions, groups are a convenient way to categorize users to
|
|
||||||
apply some label, or extended functionality, to them. For example, you
|
|
||||||
could create a group 'Special users', and you could write code that would
|
|
||||||
do special things to those users -- such as giving them access to a
|
|
||||||
members-only portion of your site, or sending them members-only
|
|
||||||
e-mail messages.
|
|
||||||
"""
|
|
||||||
name = StringField(max_length=80, unique=True, verbose_name=_('name'))
|
|
||||||
permissions = ListField(ReferenceField(Permission, verbose_name=_('permissions'), required=False))
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
verbose_name = _('group')
|
|
||||||
verbose_name_plural = _('groups')
|
|
||||||
|
|
||||||
def __unicode__(self):
|
|
||||||
return self.name
|
|
||||||
|
|
||||||
|
|
||||||
class UserManager(models.Manager):
|
|
||||||
def create_user(self, username, email, password=None):
|
|
||||||
"""
|
|
||||||
Creates and saves a User with the given username, e-mail and password.
|
|
||||||
"""
|
|
||||||
now = datetime_now()
|
|
||||||
|
|
||||||
# Normalize the address by lowercasing the domain part of the email
|
|
||||||
# address.
|
|
||||||
try:
|
|
||||||
email_name, domain_part = email.strip().split('@', 1)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
email = '@'.join([email_name, domain_part.lower()])
|
|
||||||
|
|
||||||
user = self.model(username=username, email=email, is_staff=False,
|
|
||||||
is_active=True, is_superuser=False, last_login=now,
|
|
||||||
date_joined=now)
|
|
||||||
|
|
||||||
user.set_password(password)
|
|
||||||
user.save(using=self._db)
|
|
||||||
return user
|
|
||||||
|
|
||||||
def create_superuser(self, username, email, password):
|
|
||||||
u = self.create_user(username, email, password)
|
|
||||||
u.is_staff = True
|
|
||||||
u.is_active = True
|
|
||||||
u.is_superuser = True
|
|
||||||
u.save(using=self._db)
|
|
||||||
return u
|
|
||||||
|
|
||||||
def make_random_password(self, length=10, allowed_chars='abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789'):
|
|
||||||
"Generates a random password with the given length and given allowed_chars"
|
|
||||||
# Note that default value of allowed_chars does not have "I" or letters
|
|
||||||
# that look like it -- just to avoid confusion.
|
|
||||||
from random import choice
|
|
||||||
return ''.join([choice(allowed_chars) for i in range(length)])
|
|
||||||
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
"""A User document that aims to mirror most of the API specified by Django
|
|
||||||
at http://docs.djangoproject.com/en/dev/topics/auth/#users
|
|
||||||
"""
|
|
||||||
username = StringField(max_length=30, required=True,
|
|
||||||
verbose_name=_('username'),
|
|
||||||
help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters"))
|
|
||||||
|
|
||||||
first_name = StringField(max_length=30,
|
|
||||||
verbose_name=_('first name'))
|
|
||||||
|
|
||||||
last_name = StringField(max_length=30,
|
|
||||||
verbose_name=_('last name'))
|
|
||||||
email = EmailField(verbose_name=_('e-mail address'))
|
|
||||||
password = StringField(max_length=128,
|
|
||||||
verbose_name=_('password'),
|
|
||||||
help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
|
|
||||||
is_staff = BooleanField(default=False,
|
|
||||||
verbose_name=_('staff status'),
|
|
||||||
help_text=_("Designates whether the user can log into this admin site."))
|
|
||||||
is_active = BooleanField(default=True,
|
|
||||||
verbose_name=_('active'),
|
|
||||||
help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts."))
|
|
||||||
is_superuser = BooleanField(default=False,
|
|
||||||
verbose_name=_('superuser status'),
|
|
||||||
help_text=_("Designates that this user has all permissions without explicitly assigning them."))
|
|
||||||
last_login = DateTimeField(default=datetime_now,
|
|
||||||
verbose_name=_('last login'))
|
|
||||||
date_joined = DateTimeField(default=datetime_now,
|
|
||||||
verbose_name=_('date joined'))
|
|
||||||
|
|
||||||
user_permissions = ListField(ReferenceField(Permission), verbose_name=_('user permissions'),
|
|
||||||
help_text=_('Permissions for the user.'))
|
|
||||||
|
|
||||||
USERNAME_FIELD = 'username'
|
|
||||||
REQUIRED_FIELDS = ['email']
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'allow_inheritance': True,
|
|
||||||
'indexes': [
|
|
||||||
{'fields': ['username'], 'unique': True, 'sparse': True}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
def __unicode__(self):
|
|
||||||
return self.username
|
|
||||||
|
|
||||||
def get_full_name(self):
|
|
||||||
"""Returns the users first and last names, separated by a space.
|
|
||||||
"""
|
|
||||||
full_name = u'%s %s' % (self.first_name or '', self.last_name or '')
|
|
||||||
return full_name.strip()
|
|
||||||
|
|
||||||
def is_anonymous(self):
|
|
||||||
return False
|
|
||||||
|
|
||||||
def is_authenticated(self):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def set_password(self, raw_password):
|
|
||||||
"""Sets the user's password - always use this rather than directly
|
|
||||||
assigning to :attr:`~mongoengine.django.auth.User.password` as the
|
|
||||||
password is hashed before storage.
|
|
||||||
"""
|
|
||||||
self.password = make_password(raw_password)
|
|
||||||
self.save()
|
|
||||||
return self
|
|
||||||
|
|
||||||
def check_password(self, raw_password):
|
|
||||||
"""Checks the user's password against a provided password - always use
|
|
||||||
this rather than directly comparing to
|
|
||||||
:attr:`~mongoengine.django.auth.User.password` as the password is
|
|
||||||
hashed before storage.
|
|
||||||
"""
|
|
||||||
return check_password(raw_password, self.password)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create_user(cls, username, password, email=None):
|
|
||||||
"""Create (and save) a new user with the given username, password and
|
|
||||||
email address.
|
|
||||||
"""
|
|
||||||
now = datetime_now()
|
|
||||||
|
|
||||||
# Normalize the address by lowercasing the domain part of the email
|
|
||||||
# address.
|
|
||||||
if email is not None:
|
|
||||||
try:
|
|
||||||
email_name, domain_part = email.strip().split('@', 1)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
email = '@'.join([email_name, domain_part.lower()])
|
|
||||||
|
|
||||||
user = cls(username=username, email=email, date_joined=now)
|
|
||||||
user.set_password(password)
|
|
||||||
user.save()
|
|
||||||
return user
|
|
||||||
|
|
||||||
def get_group_permissions(self, obj=None):
|
|
||||||
"""
|
|
||||||
Returns a list of permission strings that this user has through his/her
|
|
||||||
groups. This method queries all available auth backends. If an object
|
|
||||||
is passed in, only permissions matching this object are returned.
|
|
||||||
"""
|
|
||||||
permissions = set()
|
|
||||||
for backend in auth.get_backends():
|
|
||||||
if hasattr(backend, "get_group_permissions"):
|
|
||||||
permissions.update(backend.get_group_permissions(self, obj))
|
|
||||||
return permissions
|
|
||||||
|
|
||||||
def get_all_permissions(self, obj=None):
|
|
||||||
return _user_get_all_permissions(self, obj)
|
|
||||||
|
|
||||||
def has_perm(self, perm, obj=None):
|
|
||||||
"""
|
|
||||||
Returns True if the user has the specified permission. This method
|
|
||||||
queries all available auth backends, but returns immediately if any
|
|
||||||
backend returns True. Thus, a user who has permission from a single
|
|
||||||
auth backend is assumed to have permission in general. If an object is
|
|
||||||
provided, permissions for this specific object are checked.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Active superusers have all permissions.
|
|
||||||
if self.is_active and self.is_superuser:
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Otherwise we need to check the backends.
|
|
||||||
return _user_has_perm(self, perm, obj)
|
|
||||||
|
|
||||||
def has_module_perms(self, app_label):
|
|
||||||
"""
|
|
||||||
Returns True if the user has any permissions in the given app label.
|
|
||||||
Uses pretty much the same logic as has_perm, above.
|
|
||||||
"""
|
|
||||||
# Active superusers have all permissions.
|
|
||||||
if self.is_active and self.is_superuser:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return _user_has_module_perms(self, app_label)
|
|
||||||
|
|
||||||
def email_user(self, subject, message, from_email=None):
|
|
||||||
"Sends an e-mail to this User."
|
|
||||||
from django.core.mail import send_mail
|
|
||||||
send_mail(subject, message, from_email, [self.email])
|
|
||||||
|
|
||||||
def get_profile(self):
|
|
||||||
"""
|
|
||||||
Returns site-specific profile for this user. Raises
|
|
||||||
SiteProfileNotAvailable if this site does not allow profiles.
|
|
||||||
"""
|
|
||||||
if not hasattr(self, '_profile_cache'):
|
|
||||||
from django.conf import settings
|
|
||||||
if not getattr(settings, 'AUTH_PROFILE_MODULE', False):
|
|
||||||
raise SiteProfileNotAvailable('You need to set AUTH_PROFILE_MO'
|
|
||||||
'DULE in your project settings')
|
|
||||||
try:
|
|
||||||
app_label, model_name = settings.AUTH_PROFILE_MODULE.split('.')
|
|
||||||
except ValueError:
|
|
||||||
raise SiteProfileNotAvailable('app_label and model_name should'
|
|
||||||
' be separated by a dot in the AUTH_PROFILE_MODULE set'
|
|
||||||
'ting')
|
|
||||||
|
|
||||||
try:
|
|
||||||
model = models.get_model(app_label, model_name)
|
|
||||||
if model is None:
|
|
||||||
raise SiteProfileNotAvailable('Unable to load the profile '
|
|
||||||
'model, check AUTH_PROFILE_MODULE in your project sett'
|
|
||||||
'ings')
|
|
||||||
self._profile_cache = model._default_manager.using(self._state.db).get(user__id__exact=self.id)
|
|
||||||
self._profile_cache.user = self
|
|
||||||
except (ImportError, ImproperlyConfigured):
|
|
||||||
raise SiteProfileNotAvailable
|
|
||||||
return self._profile_cache
|
|
||||||
|
|
||||||
|
|
||||||
class MongoEngineBackend(object):
|
|
||||||
"""Authenticate using MongoEngine and mongoengine.django.auth.User.
|
|
||||||
"""
|
|
||||||
|
|
||||||
supports_object_permissions = False
|
|
||||||
supports_anonymous_user = False
|
|
||||||
supports_inactive_user = False
|
|
||||||
|
|
||||||
def authenticate(self, username=None, password=None):
|
|
||||||
user = get_user_document().objects(username=username).first()
|
|
||||||
if user:
|
|
||||||
if password and user.check_password(password):
|
|
||||||
backend = auth.get_backends()[0]
|
|
||||||
user.backend = "%s.%s" % (backend.__module__, backend.__class__.__name__)
|
|
||||||
return user
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_user(self, user_id):
|
|
||||||
return get_user_document().objects.with_id(user_id)
|
|
||||||
|
|
||||||
|
|
||||||
def get_user(userid):
|
|
||||||
"""Returns a User object from an id (User.id). Django's equivalent takes
|
|
||||||
request, but taking an id instead leaves it up to the developer to store
|
|
||||||
the id in any way they want (session, signed cookie, etc.)
|
|
||||||
"""
|
|
||||||
if not userid:
|
|
||||||
return AnonymousUser()
|
|
||||||
return MongoEngineBackend().get_user(userid) or AnonymousUser()
|
|
||||||
@@ -1,115 +0,0 @@
|
|||||||
from django.conf import settings
|
|
||||||
from django.contrib.auth.hashers import make_password
|
|
||||||
from django.contrib.auth.models import UserManager
|
|
||||||
from django.core.exceptions import ImproperlyConfigured
|
|
||||||
from django.db import models
|
|
||||||
from django.utils.importlib import import_module
|
|
||||||
from django.utils.translation import ugettext_lazy as _
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = (
|
|
||||||
'get_user_document',
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
MONGOENGINE_USER_DOCUMENT = getattr(
|
|
||||||
settings, 'MONGOENGINE_USER_DOCUMENT', 'mongoengine.django.auth.User')
|
|
||||||
|
|
||||||
|
|
||||||
def get_user_document():
|
|
||||||
"""Get the user document class used for authentication.
|
|
||||||
|
|
||||||
This is the class defined in settings.MONGOENGINE_USER_DOCUMENT, which
|
|
||||||
defaults to `mongoengine.django.auth.User`.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = MONGOENGINE_USER_DOCUMENT
|
|
||||||
dot = name.rindex('.')
|
|
||||||
module = import_module(name[:dot])
|
|
||||||
return getattr(module, name[dot + 1:])
|
|
||||||
|
|
||||||
|
|
||||||
class MongoUserManager(UserManager):
|
|
||||||
"""A User manager wich allows the use of MongoEngine documents in Django.
|
|
||||||
|
|
||||||
To use the manager, you must tell django.contrib.auth to use MongoUser as
|
|
||||||
the user model. In you settings.py, you need:
|
|
||||||
|
|
||||||
INSTALLED_APPS = (
|
|
||||||
...
|
|
||||||
'django.contrib.auth',
|
|
||||||
'mongoengine.django.mongo_auth',
|
|
||||||
...
|
|
||||||
)
|
|
||||||
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
|
|
||||||
|
|
||||||
Django will use the model object to access the custom Manager, which will
|
|
||||||
replace the original queryset with MongoEngine querysets.
|
|
||||||
|
|
||||||
By default, mongoengine.django.auth.User will be used to store users. You
|
|
||||||
can specify another document class in MONGOENGINE_USER_DOCUMENT in your
|
|
||||||
settings.py.
|
|
||||||
|
|
||||||
The User Document class has the same requirements as a standard custom user
|
|
||||||
model: https://docs.djangoproject.com/en/dev/topics/auth/customizing/
|
|
||||||
|
|
||||||
In particular, the User Document class must define USERNAME_FIELD and
|
|
||||||
REQUIRED_FIELDS.
|
|
||||||
|
|
||||||
`AUTH_USER_MODEL` has been added in Django 1.5.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def contribute_to_class(self, model, name):
|
|
||||||
super(MongoUserManager, self).contribute_to_class(model, name)
|
|
||||||
self.dj_model = self.model
|
|
||||||
self.model = get_user_document()
|
|
||||||
|
|
||||||
self.dj_model.USERNAME_FIELD = self.model.USERNAME_FIELD
|
|
||||||
username = models.CharField(_('username'), max_length=30, unique=True)
|
|
||||||
username.contribute_to_class(self.dj_model, self.dj_model.USERNAME_FIELD)
|
|
||||||
|
|
||||||
self.dj_model.REQUIRED_FIELDS = self.model.REQUIRED_FIELDS
|
|
||||||
for name in self.dj_model.REQUIRED_FIELDS:
|
|
||||||
field = models.CharField(_(name), max_length=30)
|
|
||||||
field.contribute_to_class(self.dj_model, name)
|
|
||||||
|
|
||||||
|
|
||||||
def get(self, *args, **kwargs):
|
|
||||||
try:
|
|
||||||
return self.get_query_set().get(*args, **kwargs)
|
|
||||||
except self.model.DoesNotExist:
|
|
||||||
# ModelBackend expects this exception
|
|
||||||
raise self.dj_model.DoesNotExist
|
|
||||||
|
|
||||||
@property
|
|
||||||
def db(self):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def get_empty_query_set(self):
|
|
||||||
return self.model.objects.none()
|
|
||||||
|
|
||||||
def get_query_set(self):
|
|
||||||
return self.model.objects
|
|
||||||
|
|
||||||
|
|
||||||
class MongoUser(models.Model):
|
|
||||||
""""Dummy user model for Django.
|
|
||||||
|
|
||||||
MongoUser is used to replace Django's UserManager with MongoUserManager.
|
|
||||||
The actual user document class is mongoengine.django.auth.User or any
|
|
||||||
other document class specified in MONGOENGINE_USER_DOCUMENT.
|
|
||||||
|
|
||||||
To get the user document class, use `get_user_document()`.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
objects = MongoUserManager()
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
app_label = 'mongo_auth'
|
|
||||||
|
|
||||||
def set_password(self, password):
|
|
||||||
"""Doesn't do anything, but works around the issue with Django 1.6."""
|
|
||||||
make_password(password)
|
|
||||||
@@ -1,124 +0,0 @@
|
|||||||
from bson import json_util
|
|
||||||
from django.conf import settings
|
|
||||||
from django.contrib.sessions.backends.base import SessionBase, CreateError
|
|
||||||
from django.core.exceptions import SuspiciousOperation
|
|
||||||
try:
|
|
||||||
from django.utils.encoding import force_unicode
|
|
||||||
except ImportError:
|
|
||||||
from django.utils.encoding import force_text as force_unicode
|
|
||||||
|
|
||||||
from mongoengine.document import Document
|
|
||||||
from mongoengine import fields
|
|
||||||
from mongoengine.queryset import OperationError
|
|
||||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME
|
|
||||||
|
|
||||||
from .utils import datetime_now
|
|
||||||
|
|
||||||
|
|
||||||
MONGOENGINE_SESSION_DB_ALIAS = getattr(
|
|
||||||
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
|
|
||||||
DEFAULT_CONNECTION_NAME)
|
|
||||||
|
|
||||||
# a setting for the name of the collection used to store sessions
|
|
||||||
MONGOENGINE_SESSION_COLLECTION = getattr(
|
|
||||||
settings, 'MONGOENGINE_SESSION_COLLECTION',
|
|
||||||
'django_session')
|
|
||||||
|
|
||||||
# a setting for whether session data is stored encoded or not
|
|
||||||
MONGOENGINE_SESSION_DATA_ENCODE = getattr(
|
|
||||||
settings, 'MONGOENGINE_SESSION_DATA_ENCODE',
|
|
||||||
True)
|
|
||||||
|
|
||||||
|
|
||||||
class MongoSession(Document):
|
|
||||||
session_key = fields.StringField(primary_key=True, max_length=40)
|
|
||||||
session_data = fields.StringField() if MONGOENGINE_SESSION_DATA_ENCODE \
|
|
||||||
else fields.DictField()
|
|
||||||
expire_date = fields.DateTimeField()
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'collection': MONGOENGINE_SESSION_COLLECTION,
|
|
||||||
'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
|
|
||||||
'allow_inheritance': False,
|
|
||||||
'indexes': [
|
|
||||||
{
|
|
||||||
'fields': ['expire_date'],
|
|
||||||
'expireAfterSeconds': 0
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_decoded(self):
|
|
||||||
return SessionStore().decode(self.session_data)
|
|
||||||
|
|
||||||
|
|
||||||
class SessionStore(SessionBase):
|
|
||||||
"""A MongoEngine-based session store for Django.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _get_session(self, *args, **kwargs):
|
|
||||||
sess = super(SessionStore, self)._get_session(*args, **kwargs)
|
|
||||||
if sess.get('_auth_user_id', None):
|
|
||||||
sess['_auth_user_id'] = str(sess.get('_auth_user_id'))
|
|
||||||
return sess
|
|
||||||
|
|
||||||
def load(self):
|
|
||||||
try:
|
|
||||||
s = MongoSession.objects(session_key=self.session_key,
|
|
||||||
expire_date__gt=datetime_now)[0]
|
|
||||||
if MONGOENGINE_SESSION_DATA_ENCODE:
|
|
||||||
return self.decode(force_unicode(s.session_data))
|
|
||||||
else:
|
|
||||||
return s.session_data
|
|
||||||
except (IndexError, SuspiciousOperation):
|
|
||||||
self.create()
|
|
||||||
return {}
|
|
||||||
|
|
||||||
def exists(self, session_key):
|
|
||||||
return bool(MongoSession.objects(session_key=session_key).first())
|
|
||||||
|
|
||||||
def create(self):
|
|
||||||
while True:
|
|
||||||
self._session_key = self._get_new_session_key()
|
|
||||||
try:
|
|
||||||
self.save(must_create=True)
|
|
||||||
except CreateError:
|
|
||||||
continue
|
|
||||||
self.modified = True
|
|
||||||
self._session_cache = {}
|
|
||||||
return
|
|
||||||
|
|
||||||
def save(self, must_create=False):
|
|
||||||
if self.session_key is None:
|
|
||||||
self._session_key = self._get_new_session_key()
|
|
||||||
s = MongoSession(session_key=self.session_key)
|
|
||||||
if MONGOENGINE_SESSION_DATA_ENCODE:
|
|
||||||
s.session_data = self.encode(self._get_session(no_load=must_create))
|
|
||||||
else:
|
|
||||||
s.session_data = self._get_session(no_load=must_create)
|
|
||||||
s.expire_date = self.get_expiry_date()
|
|
||||||
try:
|
|
||||||
s.save(force_insert=must_create)
|
|
||||||
except OperationError:
|
|
||||||
if must_create:
|
|
||||||
raise CreateError
|
|
||||||
raise
|
|
||||||
|
|
||||||
def delete(self, session_key=None):
|
|
||||||
if session_key is None:
|
|
||||||
if self.session_key is None:
|
|
||||||
return
|
|
||||||
session_key = self.session_key
|
|
||||||
MongoSession.objects(session_key=session_key).delete()
|
|
||||||
|
|
||||||
|
|
||||||
class BSONSerializer(object):
|
|
||||||
"""
|
|
||||||
Serializer that can handle BSON types (eg ObjectId).
|
|
||||||
"""
|
|
||||||
def dumps(self, obj):
|
|
||||||
return json_util.dumps(obj, separators=(',', ':')).encode('ascii')
|
|
||||||
|
|
||||||
def loads(self, data):
|
|
||||||
return json_util.loads(data.decode('ascii'))
|
|
||||||
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
from mongoengine.queryset import QuerySet
|
|
||||||
from mongoengine.base import BaseDocument
|
|
||||||
from mongoengine.errors import ValidationError
|
|
||||||
|
|
||||||
def _get_queryset(cls):
|
|
||||||
"""Inspired by django.shortcuts.*"""
|
|
||||||
if isinstance(cls, QuerySet):
|
|
||||||
return cls
|
|
||||||
else:
|
|
||||||
return cls.objects
|
|
||||||
|
|
||||||
def get_document_or_404(cls, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Uses get() to return an document, or raises a Http404 exception if the document
|
|
||||||
does not exist.
|
|
||||||
|
|
||||||
cls may be a Document or QuerySet object. All other passed
|
|
||||||
arguments and keyword arguments are used in the get() query.
|
|
||||||
|
|
||||||
Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
|
|
||||||
object is found.
|
|
||||||
|
|
||||||
Inspired by django.shortcuts.*
|
|
||||||
"""
|
|
||||||
queryset = _get_queryset(cls)
|
|
||||||
try:
|
|
||||||
return queryset.get(*args, **kwargs)
|
|
||||||
except (queryset._document.DoesNotExist, ValidationError):
|
|
||||||
from django.http import Http404
|
|
||||||
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
|
||||||
|
|
||||||
def get_list_or_404(cls, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Uses filter() to return a list of documents, or raise a Http404 exception if
|
|
||||||
the list is empty.
|
|
||||||
|
|
||||||
cls may be a Document or QuerySet object. All other passed
|
|
||||||
arguments and keyword arguments are used in the filter() query.
|
|
||||||
|
|
||||||
Inspired by django.shortcuts.*
|
|
||||||
"""
|
|
||||||
queryset = _get_queryset(cls)
|
|
||||||
obj_list = list(queryset.filter(*args, **kwargs))
|
|
||||||
if not obj_list:
|
|
||||||
from django.http import Http404
|
|
||||||
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
|
||||||
return obj_list
|
|
||||||
@@ -1,112 +0,0 @@
|
|||||||
import os
|
|
||||||
import itertools
|
|
||||||
import urlparse
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from django.conf import settings
|
|
||||||
from django.core.files.storage import Storage
|
|
||||||
from django.core.exceptions import ImproperlyConfigured
|
|
||||||
|
|
||||||
|
|
||||||
class FileDocument(Document):
|
|
||||||
"""A document used to store a single file in GridFS.
|
|
||||||
"""
|
|
||||||
file = FileField()
|
|
||||||
|
|
||||||
|
|
||||||
class GridFSStorage(Storage):
|
|
||||||
"""A custom storage backend to store files in GridFS
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, base_url=None):
|
|
||||||
|
|
||||||
if base_url is None:
|
|
||||||
base_url = settings.MEDIA_URL
|
|
||||||
self.base_url = base_url
|
|
||||||
self.document = FileDocument
|
|
||||||
self.field = 'file'
|
|
||||||
|
|
||||||
def delete(self, name):
|
|
||||||
"""Deletes the specified file from the storage system.
|
|
||||||
"""
|
|
||||||
if self.exists(name):
|
|
||||||
doc = self.document.objects.first()
|
|
||||||
field = getattr(doc, self.field)
|
|
||||||
self._get_doc_with_name(name).delete() # Delete the FileField
|
|
||||||
field.delete() # Delete the FileDocument
|
|
||||||
|
|
||||||
def exists(self, name):
|
|
||||||
"""Returns True if a file referened by the given name already exists in the
|
|
||||||
storage system, or False if the name is available for a new file.
|
|
||||||
"""
|
|
||||||
doc = self._get_doc_with_name(name)
|
|
||||||
if doc:
|
|
||||||
field = getattr(doc, self.field)
|
|
||||||
return bool(field.name)
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def listdir(self, path=None):
|
|
||||||
"""Lists the contents of the specified path, returning a 2-tuple of lists;
|
|
||||||
the first item being directories, the second item being files.
|
|
||||||
"""
|
|
||||||
def name(doc):
|
|
||||||
return getattr(doc, self.field).name
|
|
||||||
docs = self.document.objects
|
|
||||||
return [], [name(d) for d in docs if name(d)]
|
|
||||||
|
|
||||||
def size(self, name):
|
|
||||||
"""Returns the total size, in bytes, of the file specified by name.
|
|
||||||
"""
|
|
||||||
doc = self._get_doc_with_name(name)
|
|
||||||
if doc:
|
|
||||||
return getattr(doc, self.field).length
|
|
||||||
else:
|
|
||||||
raise ValueError("No such file or directory: '%s'" % name)
|
|
||||||
|
|
||||||
def url(self, name):
|
|
||||||
"""Returns an absolute URL where the file's contents can be accessed
|
|
||||||
directly by a web browser.
|
|
||||||
"""
|
|
||||||
if self.base_url is None:
|
|
||||||
raise ValueError("This file is not accessible via a URL.")
|
|
||||||
return urlparse.urljoin(self.base_url, name).replace('\\', '/')
|
|
||||||
|
|
||||||
def _get_doc_with_name(self, name):
|
|
||||||
"""Find the documents in the store with the given name
|
|
||||||
"""
|
|
||||||
docs = self.document.objects
|
|
||||||
doc = [d for d in docs if hasattr(getattr(d, self.field), 'name') and getattr(d, self.field).name == name]
|
|
||||||
if doc:
|
|
||||||
return doc[0]
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _open(self, name, mode='rb'):
|
|
||||||
doc = self._get_doc_with_name(name)
|
|
||||||
if doc:
|
|
||||||
return getattr(doc, self.field)
|
|
||||||
else:
|
|
||||||
raise ValueError("No file found with the name '%s'." % name)
|
|
||||||
|
|
||||||
def get_available_name(self, name):
|
|
||||||
"""Returns a filename that's free on the target storage system, and
|
|
||||||
available for new content to be written to.
|
|
||||||
"""
|
|
||||||
file_root, file_ext = os.path.splitext(name)
|
|
||||||
# If the filename already exists, add an underscore and a number (before
|
|
||||||
# the file extension, if one exists) to the filename until the generated
|
|
||||||
# filename doesn't exist.
|
|
||||||
count = itertools.count(1)
|
|
||||||
while self.exists(name):
|
|
||||||
# file_ext includes the dot.
|
|
||||||
name = os.path.join("%s_%s%s" % (file_root, count.next(), file_ext))
|
|
||||||
|
|
||||||
return name
|
|
||||||
|
|
||||||
def _save(self, name, content):
|
|
||||||
doc = self.document()
|
|
||||||
getattr(doc, self.field).put(content, filename=name)
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
return name
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
#coding: utf-8
|
|
||||||
from nose.plugins.skip import SkipTest
|
|
||||||
|
|
||||||
from mongoengine.python_support import PY3
|
|
||||||
from mongoengine import connect
|
|
||||||
|
|
||||||
try:
|
|
||||||
from django.test import TestCase
|
|
||||||
from django.conf import settings
|
|
||||||
except Exception as err:
|
|
||||||
if PY3:
|
|
||||||
from unittest import TestCase
|
|
||||||
# Dummy value so no error
|
|
||||||
class settings:
|
|
||||||
MONGO_DATABASE_NAME = 'dummy'
|
|
||||||
else:
|
|
||||||
raise err
|
|
||||||
|
|
||||||
|
|
||||||
class MongoTestCase(TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
if PY3:
|
|
||||||
raise SkipTest('django does not have Python 3 support')
|
|
||||||
|
|
||||||
"""
|
|
||||||
TestCase class that clear the collection between the tests
|
|
||||||
"""
|
|
||||||
db_name = 'test_%s' % settings.MONGO_DATABASE_NAME
|
|
||||||
def __init__(self, methodName='runtest'):
|
|
||||||
self.db = connect(self.db_name).get_db()
|
|
||||||
super(MongoTestCase, self).__init__(methodName)
|
|
||||||
|
|
||||||
def _post_teardown(self):
|
|
||||||
super(MongoTestCase, self)._post_teardown()
|
|
||||||
for collection in self.db.collection_names():
|
|
||||||
if collection == 'system.indexes':
|
|
||||||
continue
|
|
||||||
self.db.drop_collection(collection)
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
try:
|
|
||||||
# django >= 1.4
|
|
||||||
from django.utils.timezone import now as datetime_now
|
|
||||||
except ImportError:
|
|
||||||
from datetime import datetime
|
|
||||||
datetime_now = datetime.now
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,21 @@
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
from mongoengine.python_support import txt_type
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError',
|
"NotRegistered",
|
||||||
'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError',
|
"InvalidDocumentError",
|
||||||
'OperationError', 'NotUniqueError', 'ValidationError')
|
"LookUpError",
|
||||||
|
"DoesNotExist",
|
||||||
|
"MultipleObjectsReturned",
|
||||||
|
"InvalidQueryError",
|
||||||
|
"OperationError",
|
||||||
|
"NotUniqueError",
|
||||||
|
"BulkWriteError",
|
||||||
|
"FieldDoesNotExist",
|
||||||
|
"ValidationError",
|
||||||
|
"SaveConditionError",
|
||||||
|
"DeprecatedError",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class NotRegistered(Exception):
|
class NotRegistered(Exception):
|
||||||
@@ -40,6 +50,25 @@ class NotUniqueError(OperationError):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BulkWriteError(OperationError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SaveConditionError(OperationError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class FieldDoesNotExist(Exception):
|
||||||
|
"""Raised when trying to set a field
|
||||||
|
not declared in a :class:`~mongoengine.Document`
|
||||||
|
or an :class:`~mongoengine.EmbeddedDocument`.
|
||||||
|
|
||||||
|
To avoid this behavior on data loading,
|
||||||
|
you should set the :attr:`strict` to ``False``
|
||||||
|
in the :attr:`meta` dictionary.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
class ValidationError(AssertionError):
|
class ValidationError(AssertionError):
|
||||||
"""Validation exception.
|
"""Validation exception.
|
||||||
|
|
||||||
@@ -56,23 +85,24 @@ class ValidationError(AssertionError):
|
|||||||
_message = None
|
_message = None
|
||||||
|
|
||||||
def __init__(self, message="", **kwargs):
|
def __init__(self, message="", **kwargs):
|
||||||
self.errors = kwargs.get('errors', {})
|
super().__init__(message)
|
||||||
self.field_name = kwargs.get('field_name')
|
self.errors = kwargs.get("errors", {})
|
||||||
|
self.field_name = kwargs.get("field_name")
|
||||||
self.message = message
|
self.message = message
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return txt_type(self.message)
|
return str(self.message)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return '%s(%s,)' % (self.__class__.__name__, self.message)
|
return "{}({},)".format(self.__class__.__name__, self.message)
|
||||||
|
|
||||||
def __getattribute__(self, name):
|
def __getattribute__(self, name):
|
||||||
message = super(ValidationError, self).__getattribute__(name)
|
message = super().__getattribute__(name)
|
||||||
if name == 'message':
|
if name == "message":
|
||||||
if self.field_name:
|
if self.field_name:
|
||||||
message = '%s' % message
|
message = "%s" % message
|
||||||
if self.errors:
|
if self.errors:
|
||||||
message = '%s(%s)' % (message, self._format_errors())
|
message = "{}({})".format(message, self._format_errors())
|
||||||
return message
|
return message
|
||||||
|
|
||||||
def _get_message(self):
|
def _get_message(self):
|
||||||
@@ -93,34 +123,40 @@ class ValidationError(AssertionError):
|
|||||||
|
|
||||||
def build_dict(source):
|
def build_dict(source):
|
||||||
errors_dict = {}
|
errors_dict = {}
|
||||||
if not source:
|
|
||||||
return errors_dict
|
|
||||||
if isinstance(source, dict):
|
if isinstance(source, dict):
|
||||||
for field_name, error in source.iteritems():
|
for field_name, error in source.items():
|
||||||
errors_dict[field_name] = build_dict(error)
|
errors_dict[field_name] = build_dict(error)
|
||||||
elif isinstance(source, ValidationError) and source.errors:
|
elif isinstance(source, ValidationError) and source.errors:
|
||||||
return build_dict(source.errors)
|
return build_dict(source.errors)
|
||||||
else:
|
else:
|
||||||
return unicode(source)
|
return str(source)
|
||||||
|
|
||||||
return errors_dict
|
return errors_dict
|
||||||
|
|
||||||
if not self.errors:
|
if not self.errors:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
return build_dict(self.errors)
|
return build_dict(self.errors)
|
||||||
|
|
||||||
def _format_errors(self):
|
def _format_errors(self):
|
||||||
"""Returns a string listing all errors within a document"""
|
"""Returns a string listing all errors within a document"""
|
||||||
|
|
||||||
def generate_key(value, prefix=''):
|
def generate_key(value, prefix=""):
|
||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
value = ' '.join([generate_key(k) for k in value])
|
value = " ".join([generate_key(k) for k in value])
|
||||||
if isinstance(value, dict):
|
elif isinstance(value, dict):
|
||||||
value = ' '.join(
|
value = " ".join([generate_key(v, k) for k, v in value.items()])
|
||||||
[generate_key(v, k) for k, v in value.iteritems()])
|
|
||||||
|
|
||||||
results = "%s.%s" % (prefix, value) if prefix else value
|
results = "{}.{}".format(prefix, value) if prefix else value
|
||||||
return results
|
return results
|
||||||
|
|
||||||
error_dict = defaultdict(list)
|
error_dict = defaultdict(list)
|
||||||
for k, v in self.to_dict().iteritems():
|
for k, v in self.to_dict().items():
|
||||||
error_dict[generate_key(v)].append(k)
|
error_dict[generate_key(v)].append(k)
|
||||||
return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.iteritems()])
|
return " ".join(["{}: {}".format(k, v) for k, v in error_dict.items()])
|
||||||
|
|
||||||
|
|
||||||
|
class DeprecatedError(Exception):
|
||||||
|
"""Raise when a user uses a feature that has been Deprecated"""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
19
mongoengine/mongodb_support.py
Normal file
19
mongoengine/mongodb_support.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
"""
|
||||||
|
Helper functions, constants, and types to aid with MongoDB version support
|
||||||
|
"""
|
||||||
|
from mongoengine.connection import get_connection
|
||||||
|
|
||||||
|
|
||||||
|
# Constant that can be used to compare the version retrieved with
|
||||||
|
# get_mongodb_version()
|
||||||
|
MONGODB_34 = (3, 4)
|
||||||
|
MONGODB_36 = (3, 6)
|
||||||
|
|
||||||
|
|
||||||
|
def get_mongodb_version():
|
||||||
|
"""Return the version of the default connected mongoDB (first 2 digits)
|
||||||
|
|
||||||
|
:return: tuple(int, int)
|
||||||
|
"""
|
||||||
|
version_list = get_connection().server_info()["versionArray"][:2] # e.g: (3, 2)
|
||||||
|
return tuple(version_list)
|
||||||
61
mongoengine/pymongo_support.py
Normal file
61
mongoengine/pymongo_support.py
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
"""
|
||||||
|
Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support.
|
||||||
|
"""
|
||||||
|
import pymongo
|
||||||
|
from pymongo.errors import OperationFailure
|
||||||
|
|
||||||
|
_PYMONGO_37 = (3, 7)
|
||||||
|
|
||||||
|
PYMONGO_VERSION = tuple(pymongo.version_tuple[:2])
|
||||||
|
|
||||||
|
IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37
|
||||||
|
|
||||||
|
|
||||||
|
def count_documents(
|
||||||
|
collection, filter, skip=None, limit=None, hint=None, collation=None
|
||||||
|
):
|
||||||
|
"""Pymongo>3.7 deprecates count in favour of count_documents
|
||||||
|
"""
|
||||||
|
if limit == 0:
|
||||||
|
return 0 # Pymongo raises an OperationFailure if called with limit=0
|
||||||
|
|
||||||
|
kwargs = {}
|
||||||
|
if skip is not None:
|
||||||
|
kwargs["skip"] = skip
|
||||||
|
if limit is not None:
|
||||||
|
kwargs["limit"] = limit
|
||||||
|
if hint not in (-1, None):
|
||||||
|
kwargs["hint"] = hint
|
||||||
|
if collation is not None:
|
||||||
|
kwargs["collation"] = collation
|
||||||
|
|
||||||
|
# count_documents appeared in pymongo 3.7
|
||||||
|
if IS_PYMONGO_GTE_37:
|
||||||
|
try:
|
||||||
|
return collection.count_documents(filter=filter, **kwargs)
|
||||||
|
except OperationFailure:
|
||||||
|
# OperationFailure - accounts for some operators that used to work
|
||||||
|
# with .count but are no longer working with count_documents (i.e $geoNear, $near, and $nearSphere)
|
||||||
|
# fallback to deprecated Cursor.count
|
||||||
|
# Keeping this should be reevaluated the day pymongo removes .count entirely
|
||||||
|
pass
|
||||||
|
|
||||||
|
cursor = collection.find(filter)
|
||||||
|
for option, option_value in kwargs.items():
|
||||||
|
cursor_method = getattr(cursor, option)
|
||||||
|
cursor = cursor_method(option_value)
|
||||||
|
with_limit_and_skip = "skip" in kwargs or "limit" in kwargs
|
||||||
|
return cursor.count(with_limit_and_skip=with_limit_and_skip)
|
||||||
|
|
||||||
|
|
||||||
|
def list_collection_names(db, include_system_collections=False):
|
||||||
|
"""Pymongo>3.7 deprecates collection_names in favour of list_collection_names"""
|
||||||
|
if IS_PYMONGO_GTE_37:
|
||||||
|
collections = db.list_collection_names()
|
||||||
|
else:
|
||||||
|
collections = db.collection_names()
|
||||||
|
|
||||||
|
if not include_system_collections:
|
||||||
|
collections = [c for c in collections if not c.startswith("system.")]
|
||||||
|
|
||||||
|
return collections
|
||||||
@@ -1,61 +0,0 @@
|
|||||||
"""Helper functions and types to aid with Python 2.5 - 3 support."""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
PY3 = sys.version_info[0] == 3
|
|
||||||
PY25 = sys.version_info[:2] == (2, 5)
|
|
||||||
UNICODE_KWARGS = int(''.join([str(x) for x in sys.version_info[:3]])) > 264
|
|
||||||
|
|
||||||
if PY3:
|
|
||||||
import codecs
|
|
||||||
from io import BytesIO as StringIO
|
|
||||||
# return s converted to binary. b('test') should be equivalent to b'test'
|
|
||||||
def b(s):
|
|
||||||
return codecs.latin_1_encode(s)[0]
|
|
||||||
|
|
||||||
bin_type = bytes
|
|
||||||
txt_type = str
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
from cStringIO import StringIO
|
|
||||||
except ImportError:
|
|
||||||
from StringIO import StringIO
|
|
||||||
|
|
||||||
# Conversion to binary only necessary in Python 3
|
|
||||||
def b(s):
|
|
||||||
return s
|
|
||||||
|
|
||||||
bin_type = str
|
|
||||||
txt_type = unicode
|
|
||||||
|
|
||||||
str_types = (bin_type, txt_type)
|
|
||||||
|
|
||||||
if PY25:
|
|
||||||
def product(*args, **kwds):
|
|
||||||
pools = map(tuple, args) * kwds.get('repeat', 1)
|
|
||||||
result = [[]]
|
|
||||||
for pool in pools:
|
|
||||||
result = [x + [y] for x in result for y in pool]
|
|
||||||
for prod in result:
|
|
||||||
yield tuple(prod)
|
|
||||||
reduce = reduce
|
|
||||||
else:
|
|
||||||
from itertools import product
|
|
||||||
from functools import reduce
|
|
||||||
|
|
||||||
|
|
||||||
# For use with Python 2.5
|
|
||||||
# converts all keys from unicode to str for d and all nested dictionaries
|
|
||||||
def to_str_keys_recursive(d):
|
|
||||||
if isinstance(d, list):
|
|
||||||
for val in d:
|
|
||||||
if isinstance(val, (dict, list)):
|
|
||||||
to_str_keys_recursive(val)
|
|
||||||
elif isinstance(d, dict):
|
|
||||||
for key, val in d.items():
|
|
||||||
if isinstance(val, (dict, list)):
|
|
||||||
to_str_keys_recursive(val)
|
|
||||||
if isinstance(key, unicode):
|
|
||||||
d[str(key)] = d.pop(key)
|
|
||||||
else:
|
|
||||||
raise ValueError("non list/dict parameter not allowed")
|
|
||||||
@@ -1,11 +1,28 @@
|
|||||||
from mongoengine.errors import (DoesNotExist, MultipleObjectsReturned,
|
from mongoengine.errors import *
|
||||||
InvalidQueryError, OperationError,
|
|
||||||
NotUniqueError)
|
|
||||||
from mongoengine.queryset.field_list import *
|
from mongoengine.queryset.field_list import *
|
||||||
from mongoengine.queryset.manager import *
|
from mongoengine.queryset.manager import *
|
||||||
from mongoengine.queryset.queryset import *
|
from mongoengine.queryset.queryset import *
|
||||||
from mongoengine.queryset.transform import *
|
from mongoengine.queryset.transform import *
|
||||||
from mongoengine.queryset.visitor import *
|
from mongoengine.queryset.visitor import *
|
||||||
|
|
||||||
__all__ = (field_list.__all__ + manager.__all__ + queryset.__all__ +
|
# Expose just the public subset of all imported objects and constants.
|
||||||
transform.__all__ + visitor.__all__)
|
__all__ = (
|
||||||
|
"QuerySet",
|
||||||
|
"QuerySetNoCache",
|
||||||
|
"Q",
|
||||||
|
"queryset_manager",
|
||||||
|
"QuerySetManager",
|
||||||
|
"QueryFieldList",
|
||||||
|
"DO_NOTHING",
|
||||||
|
"NULLIFY",
|
||||||
|
"CASCADE",
|
||||||
|
"DENY",
|
||||||
|
"PULL",
|
||||||
|
# Errors that might be related to a queryset, mostly here for backward
|
||||||
|
# compatibility
|
||||||
|
"DoesNotExist",
|
||||||
|
"InvalidQueryError",
|
||||||
|
"MultipleObjectsReturned",
|
||||||
|
"NotUniqueError",
|
||||||
|
"OperationError",
|
||||||
|
)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,13 +1,15 @@
|
|||||||
|
__all__ = ("QueryFieldList",)
|
||||||
__all__ = ('QueryFieldList',)
|
|
||||||
|
|
||||||
|
|
||||||
class QueryFieldList(object):
|
class QueryFieldList:
|
||||||
"""Object that handles combinations of .only() and .exclude() calls"""
|
"""Object that handles combinations of .only() and .exclude() calls"""
|
||||||
|
|
||||||
ONLY = 1
|
ONLY = 1
|
||||||
EXCLUDE = 0
|
EXCLUDE = 0
|
||||||
|
|
||||||
def __init__(self, fields=None, value=ONLY, always_include=None, _only_called=False):
|
def __init__(
|
||||||
|
self, fields=None, value=ONLY, always_include=None, _only_called=False
|
||||||
|
):
|
||||||
"""The QueryFieldList builder
|
"""The QueryFieldList builder
|
||||||
|
|
||||||
:param fields: A list of fields used in `.only()` or `.exclude()`
|
:param fields: A list of fields used in `.only()` or `.exclude()`
|
||||||
@@ -50,7 +52,7 @@ class QueryFieldList(object):
|
|||||||
self.fields = f.fields - self.fields
|
self.fields = f.fields - self.fields
|
||||||
self._clean_slice()
|
self._clean_slice()
|
||||||
|
|
||||||
if '_id' in f.fields:
|
if "_id" in f.fields:
|
||||||
self._id = f.value
|
self._id = f.value
|
||||||
|
|
||||||
if self.always_include:
|
if self.always_include:
|
||||||
@@ -60,23 +62,23 @@ class QueryFieldList(object):
|
|||||||
else:
|
else:
|
||||||
self.fields -= self.always_include
|
self.fields -= self.always_include
|
||||||
|
|
||||||
if getattr(f, '_only_called', False):
|
if getattr(f, "_only_called", False):
|
||||||
self._only_called = True
|
self._only_called = True
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __nonzero__(self):
|
def __bool__(self):
|
||||||
return bool(self.fields)
|
return bool(self.fields)
|
||||||
|
|
||||||
def as_dict(self):
|
def as_dict(self):
|
||||||
field_list = dict((field, self.value) for field in self.fields)
|
field_list = {field: self.value for field in self.fields}
|
||||||
if self.slice:
|
if self.slice:
|
||||||
field_list.update(self.slice)
|
field_list.update(self.slice)
|
||||||
if self._id is not None:
|
if self._id is not None:
|
||||||
field_list['_id'] = self._id
|
field_list["_id"] = self._id
|
||||||
return field_list
|
return field_list
|
||||||
|
|
||||||
def reset(self):
|
def reset(self):
|
||||||
self.fields = set([])
|
self.fields = set()
|
||||||
self.slice = {}
|
self.slice = {}
|
||||||
self.value = self.ONLY
|
self.value = self.ONLY
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
from functools import partial
|
from functools import partial
|
||||||
from mongoengine.queryset.queryset import QuerySet
|
from mongoengine.queryset.queryset import QuerySet
|
||||||
|
|
||||||
__all__ = ('queryset_manager', 'QuerySetManager')
|
__all__ = ("queryset_manager", "QuerySetManager")
|
||||||
|
|
||||||
|
|
||||||
class QuerySetManager(object):
|
class QuerySetManager:
|
||||||
"""
|
"""
|
||||||
The default QuerySet Manager.
|
The default QuerySet Manager.
|
||||||
|
|
||||||
@@ -29,14 +29,14 @@ class QuerySetManager(object):
|
|||||||
Document.objects is accessed.
|
Document.objects is accessed.
|
||||||
"""
|
"""
|
||||||
if instance is not None:
|
if instance is not None:
|
||||||
# Document class being used rather than a document object
|
# Document object being used rather than a document class
|
||||||
return self
|
return self
|
||||||
|
|
||||||
# owner is the document that contains the QuerySetManager
|
# owner is the document that contains the QuerySetManager
|
||||||
queryset_class = owner._meta.get('queryset_class', self.default)
|
queryset_class = owner._meta.get("queryset_class", self.default)
|
||||||
queryset = queryset_class(owner, owner._get_collection())
|
queryset = queryset_class(owner, owner._get_collection())
|
||||||
if self.get_queryset:
|
if self.get_queryset:
|
||||||
arg_count = self.get_queryset.func_code.co_argcount
|
arg_count = self.get_queryset.__code__.co_argcount
|
||||||
if arg_count == 1:
|
if arg_count == 1:
|
||||||
queryset = self.get_queryset(queryset)
|
queryset = self.get_queryset(queryset)
|
||||||
elif arg_count == 2:
|
elif arg_count == 2:
|
||||||
|
|||||||
@@ -1,9 +1,22 @@
|
|||||||
from mongoengine.errors import OperationError
|
from mongoengine.errors import OperationError
|
||||||
from mongoengine.queryset.base import (BaseQuerySet, DO_NOTHING, NULLIFY,
|
from mongoengine.queryset.base import (
|
||||||
CASCADE, DENY, PULL)
|
BaseQuerySet,
|
||||||
|
CASCADE,
|
||||||
|
DENY,
|
||||||
|
DO_NOTHING,
|
||||||
|
NULLIFY,
|
||||||
|
PULL,
|
||||||
|
)
|
||||||
|
|
||||||
__all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE',
|
__all__ = (
|
||||||
'DENY', 'PULL')
|
"QuerySet",
|
||||||
|
"QuerySetNoCache",
|
||||||
|
"DO_NOTHING",
|
||||||
|
"NULLIFY",
|
||||||
|
"CASCADE",
|
||||||
|
"DENY",
|
||||||
|
"PULL",
|
||||||
|
)
|
||||||
|
|
||||||
# The maximum number of items to display in a QuerySet.__repr__
|
# The maximum number of items to display in a QuerySet.__repr__
|
||||||
REPR_OUTPUT_SIZE = 20
|
REPR_OUTPUT_SIZE = 20
|
||||||
@@ -27,9 +40,10 @@ class QuerySet(BaseQuerySet):
|
|||||||
in batches of ``ITER_CHUNK_SIZE``.
|
in batches of ``ITER_CHUNK_SIZE``.
|
||||||
|
|
||||||
If ``self._has_more`` the cursor hasn't been exhausted so cache then
|
If ``self._has_more`` the cursor hasn't been exhausted so cache then
|
||||||
batch. Otherwise iterate the result_cache.
|
batch. Otherwise iterate the result_cache.
|
||||||
"""
|
"""
|
||||||
self._iter = True
|
self._iter = True
|
||||||
|
|
||||||
if self._has_more:
|
if self._has_more:
|
||||||
return self._iter_results()
|
return self._iter_results()
|
||||||
|
|
||||||
@@ -38,45 +52,60 @@ class QuerySet(BaseQuerySet):
|
|||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
"""Since __len__ is called quite frequently (for example, as part of
|
"""Since __len__ is called quite frequently (for example, as part of
|
||||||
list(qs) we populate the result cache and cache the length.
|
list(qs)), we populate the result cache and cache the length.
|
||||||
"""
|
"""
|
||||||
if self._len is not None:
|
if self._len is not None:
|
||||||
return self._len
|
return self._len
|
||||||
|
|
||||||
|
# Populate the result cache with *all* of the docs in the cursor
|
||||||
if self._has_more:
|
if self._has_more:
|
||||||
# populate the cache
|
|
||||||
list(self._iter_results())
|
list(self._iter_results())
|
||||||
|
|
||||||
|
# Cache the length of the complete result cache and return it
|
||||||
self._len = len(self._result_cache)
|
self._len = len(self._result_cache)
|
||||||
return self._len
|
return self._len
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""Provides the string representation of the QuerySet
|
"""Provide a string representation of the QuerySet"""
|
||||||
"""
|
|
||||||
if self._iter:
|
if self._iter:
|
||||||
return '.. queryset mid-iteration ..'
|
return ".. queryset mid-iteration .."
|
||||||
|
|
||||||
self._populate_cache()
|
self._populate_cache()
|
||||||
data = self._result_cache[:REPR_OUTPUT_SIZE + 1]
|
data = self._result_cache[: REPR_OUTPUT_SIZE + 1]
|
||||||
if len(data) > REPR_OUTPUT_SIZE:
|
if len(data) > REPR_OUTPUT_SIZE:
|
||||||
data[-1] = "...(remaining elements truncated)..."
|
data[-1] = "...(remaining elements truncated)..."
|
||||||
return repr(data)
|
return repr(data)
|
||||||
|
|
||||||
|
|
||||||
def _iter_results(self):
|
def _iter_results(self):
|
||||||
"""A generator for iterating over the result cache.
|
"""A generator for iterating over the result cache.
|
||||||
|
|
||||||
Also populates the cache if there are more possible results to yield.
|
Also populates the cache if there are more possible results to
|
||||||
Raises StopIteration when there are no more results"""
|
yield. Raises StopIteration when there are no more results.
|
||||||
|
"""
|
||||||
if self._result_cache is None:
|
if self._result_cache is None:
|
||||||
self._result_cache = []
|
self._result_cache = []
|
||||||
|
|
||||||
pos = 0
|
pos = 0
|
||||||
while True:
|
while True:
|
||||||
upper = len(self._result_cache)
|
|
||||||
while pos < upper:
|
# For all positions lower than the length of the current result
|
||||||
|
# cache, serve the docs straight from the cache w/o hitting the
|
||||||
|
# database.
|
||||||
|
# XXX it's VERY important to compute the len within the `while`
|
||||||
|
# condition because the result cache might expand mid-iteration
|
||||||
|
# (e.g. if we call len(qs) inside a loop that iterates over the
|
||||||
|
# queryset). Fortunately len(list) is O(1) in Python, so this
|
||||||
|
# doesn't cause performance issues.
|
||||||
|
while pos < len(self._result_cache):
|
||||||
yield self._result_cache[pos]
|
yield self._result_cache[pos]
|
||||||
pos = pos + 1
|
pos += 1
|
||||||
|
|
||||||
|
# return if we already established there were no more
|
||||||
|
# docs in the db cursor.
|
||||||
if not self._has_more:
|
if not self._has_more:
|
||||||
raise StopIteration
|
return
|
||||||
|
|
||||||
|
# Otherwise, populate more of the cache and repeat.
|
||||||
if len(self._result_cache) <= pos:
|
if len(self._result_cache) <= pos:
|
||||||
self._populate_cache()
|
self._populate_cache()
|
||||||
|
|
||||||
@@ -87,14 +116,24 @@ class QuerySet(BaseQuerySet):
|
|||||||
"""
|
"""
|
||||||
if self._result_cache is None:
|
if self._result_cache is None:
|
||||||
self._result_cache = []
|
self._result_cache = []
|
||||||
if self._has_more:
|
|
||||||
try:
|
|
||||||
for i in xrange(ITER_CHUNK_SIZE):
|
|
||||||
self._result_cache.append(self.next())
|
|
||||||
except StopIteration:
|
|
||||||
self._has_more = False
|
|
||||||
|
|
||||||
def count(self, with_limit_and_skip=True):
|
# Skip populating the cache if we already established there are no
|
||||||
|
# more docs to pull from the database.
|
||||||
|
if not self._has_more:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Pull in ITER_CHUNK_SIZE docs from the database and store them in
|
||||||
|
# the result cache.
|
||||||
|
try:
|
||||||
|
for _ in range(ITER_CHUNK_SIZE):
|
||||||
|
self._result_cache.append(next(self))
|
||||||
|
except StopIteration:
|
||||||
|
# Getting this exception means there are no more docs in the
|
||||||
|
# db cursor. Set _has_more to False so that we can use that
|
||||||
|
# information in other places.
|
||||||
|
self._has_more = False
|
||||||
|
|
||||||
|
def count(self, with_limit_and_skip=False):
|
||||||
"""Count the selected elements in the query.
|
"""Count the selected elements in the query.
|
||||||
|
|
||||||
:param with_limit_and_skip (optional): take any :meth:`limit` or
|
:param with_limit_and_skip (optional): take any :meth:`limit` or
|
||||||
@@ -102,21 +141,23 @@ class QuerySet(BaseQuerySet):
|
|||||||
getting the count
|
getting the count
|
||||||
"""
|
"""
|
||||||
if with_limit_and_skip is False:
|
if with_limit_and_skip is False:
|
||||||
return super(QuerySet, self).count(with_limit_and_skip)
|
return super().count(with_limit_and_skip)
|
||||||
|
|
||||||
if self._len is None:
|
if self._len is None:
|
||||||
self._len = super(QuerySet, self).count(with_limit_and_skip)
|
# cache the length
|
||||||
|
self._len = super().count(with_limit_and_skip)
|
||||||
|
|
||||||
return self._len
|
return self._len
|
||||||
|
|
||||||
def no_cache(self):
|
def no_cache(self):
|
||||||
"""Convert to a non_caching queryset
|
"""Convert to a non-caching queryset
|
||||||
|
|
||||||
.. versionadded:: 0.8.3 Convert to non caching queryset
|
.. versionadded:: 0.8.3 Convert to non caching queryset
|
||||||
"""
|
"""
|
||||||
if self._result_cache is not None:
|
if self._result_cache is not None:
|
||||||
raise OperationError("QuerySet already cached")
|
raise OperationError("QuerySet already cached")
|
||||||
return self.clone_into(QuerySetNoCache(self._document, self._collection))
|
|
||||||
|
return self._clone_into(QuerySetNoCache(self._document, self._collection))
|
||||||
|
|
||||||
|
|
||||||
class QuerySetNoCache(BaseQuerySet):
|
class QuerySetNoCache(BaseQuerySet):
|
||||||
@@ -127,7 +168,7 @@ class QuerySetNoCache(BaseQuerySet):
|
|||||||
|
|
||||||
.. versionadded:: 0.8.3 Convert to caching queryset
|
.. versionadded:: 0.8.3 Convert to caching queryset
|
||||||
"""
|
"""
|
||||||
return self.clone_into(QuerySet(self._document, self._collection))
|
return self._clone_into(QuerySet(self._document, self._collection))
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""Provides the string representation of the QuerySet
|
"""Provides the string representation of the QuerySet
|
||||||
@@ -135,14 +176,15 @@ class QuerySetNoCache(BaseQuerySet):
|
|||||||
.. versionchanged:: 0.6.13 Now doesnt modify the cursor
|
.. versionchanged:: 0.6.13 Now doesnt modify the cursor
|
||||||
"""
|
"""
|
||||||
if self._iter:
|
if self._iter:
|
||||||
return '.. queryset mid-iteration ..'
|
return ".. queryset mid-iteration .."
|
||||||
|
|
||||||
data = []
|
data = []
|
||||||
for i in xrange(REPR_OUTPUT_SIZE + 1):
|
for _ in range(REPR_OUTPUT_SIZE + 1):
|
||||||
try:
|
try:
|
||||||
data.append(self.next())
|
data.append(next(self))
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
break
|
break
|
||||||
|
|
||||||
if len(data) > REPR_OUTPUT_SIZE:
|
if len(data) > REPR_OUTPUT_SIZE:
|
||||||
data[-1] = "...(remaining elements truncated)..."
|
data[-1] = "...(remaining elements truncated)..."
|
||||||
|
|
||||||
|
|||||||
@@ -1,44 +1,74 @@
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from bson import ObjectId, SON
|
||||||
|
from bson.dbref import DBRef
|
||||||
import pymongo
|
import pymongo
|
||||||
from bson import SON
|
|
||||||
|
|
||||||
|
from mongoengine.base import UPDATE_OPERATORS
|
||||||
from mongoengine.common import _import_class
|
from mongoengine.common import _import_class
|
||||||
from mongoengine.errors import InvalidQueryError, LookUpError
|
from mongoengine.errors import InvalidQueryError
|
||||||
|
|
||||||
__all__ = ('query', 'update')
|
__all__ = ("query", "update", "STRING_OPERATORS")
|
||||||
|
|
||||||
|
COMPARISON_OPERATORS = (
|
||||||
|
"ne",
|
||||||
|
"gt",
|
||||||
|
"gte",
|
||||||
|
"lt",
|
||||||
|
"lte",
|
||||||
|
"in",
|
||||||
|
"nin",
|
||||||
|
"mod",
|
||||||
|
"all",
|
||||||
|
"size",
|
||||||
|
"exists",
|
||||||
|
"not",
|
||||||
|
"elemMatch",
|
||||||
|
"type",
|
||||||
|
)
|
||||||
|
GEO_OPERATORS = (
|
||||||
|
"within_distance",
|
||||||
|
"within_spherical_distance",
|
||||||
|
"within_box",
|
||||||
|
"within_polygon",
|
||||||
|
"near",
|
||||||
|
"near_sphere",
|
||||||
|
"max_distance",
|
||||||
|
"min_distance",
|
||||||
|
"geo_within",
|
||||||
|
"geo_within_box",
|
||||||
|
"geo_within_polygon",
|
||||||
|
"geo_within_center",
|
||||||
|
"geo_within_sphere",
|
||||||
|
"geo_intersects",
|
||||||
|
)
|
||||||
|
STRING_OPERATORS = (
|
||||||
|
"contains",
|
||||||
|
"icontains",
|
||||||
|
"startswith",
|
||||||
|
"istartswith",
|
||||||
|
"endswith",
|
||||||
|
"iendswith",
|
||||||
|
"exact",
|
||||||
|
"iexact",
|
||||||
|
)
|
||||||
|
CUSTOM_OPERATORS = ("match",)
|
||||||
|
MATCH_OPERATORS = (
|
||||||
|
COMPARISON_OPERATORS + GEO_OPERATORS + STRING_OPERATORS + CUSTOM_OPERATORS
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
|
# TODO make this less complex
|
||||||
'all', 'size', 'exists', 'not')
|
def query(_doc_cls=None, **kwargs):
|
||||||
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
|
"""Transform a query from Django-style format to Mongo format."""
|
||||||
'within_box', 'within_polygon', 'near', 'near_sphere',
|
|
||||||
'max_distance', 'geo_within', 'geo_within_box',
|
|
||||||
'geo_within_polygon', 'geo_within_center',
|
|
||||||
'geo_within_sphere', 'geo_intersects')
|
|
||||||
STRING_OPERATORS = ('contains', 'icontains', 'startswith',
|
|
||||||
'istartswith', 'endswith', 'iendswith',
|
|
||||||
'exact', 'iexact')
|
|
||||||
CUSTOM_OPERATORS = ('match',)
|
|
||||||
MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
|
|
||||||
STRING_OPERATORS + CUSTOM_OPERATORS)
|
|
||||||
|
|
||||||
UPDATE_OPERATORS = ('set', 'unset', 'inc', 'dec', 'pop', 'push',
|
|
||||||
'push_all', 'pull', 'pull_all', 'add_to_set',
|
|
||||||
'set_on_insert')
|
|
||||||
|
|
||||||
|
|
||||||
def query(_doc_cls=None, _field_operation=False, **query):
|
|
||||||
"""Transform a query from Django-style format to Mongo format.
|
|
||||||
"""
|
|
||||||
mongo_query = {}
|
mongo_query = {}
|
||||||
merge_query = defaultdict(list)
|
merge_query = defaultdict(list)
|
||||||
for key, value in sorted(query.items()):
|
for key, value in sorted(kwargs.items()):
|
||||||
if key == "__raw__":
|
if key == "__raw__":
|
||||||
mongo_query.update(value)
|
mongo_query.update(value)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
parts = key.split('__')
|
parts = key.rsplit("__")
|
||||||
indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()]
|
indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()]
|
||||||
parts = [part for part in parts if not part.isdigit()]
|
parts = [part for part in parts if not part.isdigit()]
|
||||||
# Check for an operator and transform to mongo-style if there is
|
# Check for an operator and transform to mongo-style if there is
|
||||||
@@ -46,8 +76,12 @@ def query(_doc_cls=None, _field_operation=False, **query):
|
|||||||
if len(parts) > 1 and parts[-1] in MATCH_OPERATORS:
|
if len(parts) > 1 and parts[-1] in MATCH_OPERATORS:
|
||||||
op = parts.pop()
|
op = parts.pop()
|
||||||
|
|
||||||
|
# Allow to escape operator-like field name by __
|
||||||
|
if len(parts) > 1 and parts[-1] == "":
|
||||||
|
parts.pop()
|
||||||
|
|
||||||
negate = False
|
negate = False
|
||||||
if len(parts) > 1 and parts[-1] == 'not':
|
if len(parts) > 1 and parts[-1] == "not":
|
||||||
parts.pop()
|
parts.pop()
|
||||||
negate = True
|
negate = True
|
||||||
|
|
||||||
@@ -55,74 +89,120 @@ def query(_doc_cls=None, _field_operation=False, **query):
|
|||||||
# Switch field names to proper names [set in Field(name='foo')]
|
# Switch field names to proper names [set in Field(name='foo')]
|
||||||
try:
|
try:
|
||||||
fields = _doc_cls._lookup_field(parts)
|
fields = _doc_cls._lookup_field(parts)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
raise InvalidQueryError(e)
|
raise InvalidQueryError(e)
|
||||||
parts = []
|
parts = []
|
||||||
|
|
||||||
|
CachedReferenceField = _import_class("CachedReferenceField")
|
||||||
|
GenericReferenceField = _import_class("GenericReferenceField")
|
||||||
|
|
||||||
cleaned_fields = []
|
cleaned_fields = []
|
||||||
for field in fields:
|
for field in fields:
|
||||||
append_field = True
|
append_field = True
|
||||||
if isinstance(field, basestring):
|
if isinstance(field, str):
|
||||||
parts.append(field)
|
parts.append(field)
|
||||||
append_field = False
|
append_field = False
|
||||||
|
# is last and CachedReferenceField
|
||||||
|
elif isinstance(field, CachedReferenceField) and fields[-1] == field:
|
||||||
|
parts.append("%s._id" % field.db_field)
|
||||||
else:
|
else:
|
||||||
parts.append(field.db_field)
|
parts.append(field.db_field)
|
||||||
|
|
||||||
if append_field:
|
if append_field:
|
||||||
cleaned_fields.append(field)
|
cleaned_fields.append(field)
|
||||||
|
|
||||||
# Convert value to proper value
|
# Convert value to proper value
|
||||||
field = cleaned_fields[-1]
|
field = cleaned_fields[-1]
|
||||||
|
|
||||||
singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not']
|
singular_ops = [None, "ne", "gt", "gte", "lt", "lte", "not"]
|
||||||
singular_ops += STRING_OPERATORS
|
singular_ops += STRING_OPERATORS
|
||||||
if op in singular_ops:
|
if op in singular_ops:
|
||||||
if isinstance(field, basestring):
|
value = field.prepare_query_value(op, value)
|
||||||
if (op in STRING_OPERATORS and
|
|
||||||
isinstance(value, basestring)):
|
if isinstance(field, CachedReferenceField) and value:
|
||||||
StringField = _import_class('StringField')
|
value = value["_id"]
|
||||||
value = StringField.prepare_query_value(op, value)
|
|
||||||
else:
|
elif op in ("in", "nin", "all", "near") and not isinstance(value, dict):
|
||||||
value = field
|
# Raise an error if the in/nin/all/near param is not iterable.
|
||||||
else:
|
value = _prepare_query_for_iterable(field, op, value)
|
||||||
value = field.prepare_query_value(op, value)
|
|
||||||
elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict):
|
# If we're querying a GenericReferenceField, we need to alter the
|
||||||
# 'in', 'nin' and 'all' require a list of values
|
# key depending on the value:
|
||||||
value = [field.prepare_query_value(op, v) for v in value]
|
# * If the value is a DBRef, the key should be "field_name._ref".
|
||||||
|
# * If the value is an ObjectId, the key should be "field_name._ref.$id".
|
||||||
|
if isinstance(field, GenericReferenceField):
|
||||||
|
if isinstance(value, DBRef):
|
||||||
|
parts[-1] += "._ref"
|
||||||
|
elif isinstance(value, ObjectId):
|
||||||
|
parts[-1] += "._ref.$id"
|
||||||
|
|
||||||
# if op and op not in COMPARISON_OPERATORS:
|
# if op and op not in COMPARISON_OPERATORS:
|
||||||
if op:
|
if op:
|
||||||
if op in GEO_OPERATORS:
|
if op in GEO_OPERATORS:
|
||||||
value = _geo_operator(field, op, value)
|
value = _geo_operator(field, op, value)
|
||||||
elif op in CUSTOM_OPERATORS:
|
elif op in ("match", "elemMatch"):
|
||||||
if op == 'match':
|
ListField = _import_class("ListField")
|
||||||
value = field.prepare_query_value(op, value)
|
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
|
||||||
value = {"$elemMatch": value}
|
if (
|
||||||
|
isinstance(value, dict)
|
||||||
|
and isinstance(field, ListField)
|
||||||
|
and isinstance(field.field, EmbeddedDocumentField)
|
||||||
|
):
|
||||||
|
value = query(field.field.document_type, **value)
|
||||||
else:
|
else:
|
||||||
NotImplementedError("Custom method '%s' has not "
|
value = field.prepare_query_value(op, value)
|
||||||
"been implemented" % op)
|
value = {"$elemMatch": value}
|
||||||
|
elif op in CUSTOM_OPERATORS:
|
||||||
|
NotImplementedError(
|
||||||
|
'Custom method "%s" has not ' "been implemented" % op
|
||||||
|
)
|
||||||
elif op not in STRING_OPERATORS:
|
elif op not in STRING_OPERATORS:
|
||||||
value = {'$' + op: value}
|
value = {"$" + op: value}
|
||||||
|
|
||||||
if negate:
|
if negate:
|
||||||
value = {'$not': value}
|
value = {"$not": value}
|
||||||
|
|
||||||
for i, part in indices:
|
for i, part in indices:
|
||||||
parts.insert(i, part)
|
parts.insert(i, part)
|
||||||
key = '.'.join(parts)
|
|
||||||
if op is None or key not in mongo_query:
|
key = ".".join(parts)
|
||||||
|
|
||||||
|
if key not in mongo_query:
|
||||||
mongo_query[key] = value
|
mongo_query[key] = value
|
||||||
elif key in mongo_query:
|
else:
|
||||||
if key in mongo_query and isinstance(mongo_query[key], dict):
|
if isinstance(mongo_query[key], dict) and isinstance(value, dict):
|
||||||
mongo_query[key].update(value)
|
mongo_query[key].update(value)
|
||||||
# $maxDistance needs to come last - convert to SON
|
# $max/minDistance needs to come last - convert to SON
|
||||||
if '$maxDistance' in mongo_query[key]:
|
value_dict = mongo_query[key]
|
||||||
value_dict = mongo_query[key]
|
if ("$maxDistance" in value_dict or "$minDistance" in value_dict) and (
|
||||||
|
"$near" in value_dict or "$nearSphere" in value_dict
|
||||||
|
):
|
||||||
value_son = SON()
|
value_son = SON()
|
||||||
for k, v in value_dict.iteritems():
|
for k, v in value_dict.items():
|
||||||
if k == '$maxDistance':
|
if k == "$maxDistance" or k == "$minDistance":
|
||||||
continue
|
continue
|
||||||
value_son[k] = v
|
value_son[k] = v
|
||||||
value_son['$maxDistance'] = value_dict['$maxDistance']
|
# Required for MongoDB >= 2.6, may fail when combining
|
||||||
|
# PyMongo 3+ and MongoDB < 2.6
|
||||||
|
near_embedded = False
|
||||||
|
for near_op in ("$near", "$nearSphere"):
|
||||||
|
if isinstance(value_dict.get(near_op), dict):
|
||||||
|
value_son[near_op] = SON(value_son[near_op])
|
||||||
|
if "$maxDistance" in value_dict:
|
||||||
|
value_son[near_op]["$maxDistance"] = value_dict[
|
||||||
|
"$maxDistance"
|
||||||
|
]
|
||||||
|
if "$minDistance" in value_dict:
|
||||||
|
value_son[near_op]["$minDistance"] = value_dict[
|
||||||
|
"$minDistance"
|
||||||
|
]
|
||||||
|
near_embedded = True
|
||||||
|
|
||||||
|
if not near_embedded:
|
||||||
|
if "$maxDistance" in value_dict:
|
||||||
|
value_son["$maxDistance"] = value_dict["$maxDistance"]
|
||||||
|
if "$minDistance" in value_dict:
|
||||||
|
value_son["$minDistance"] = value_dict["$minDistance"]
|
||||||
mongo_query[key] = value_son
|
mongo_query[key] = value_son
|
||||||
else:
|
else:
|
||||||
# Store for manually merging later
|
# Store for manually merging later
|
||||||
@@ -134,50 +214,64 @@ def query(_doc_cls=None, _field_operation=False, **query):
|
|||||||
del mongo_query[k]
|
del mongo_query[k]
|
||||||
if isinstance(v, list):
|
if isinstance(v, list):
|
||||||
value = [{k: val} for val in v]
|
value = [{k: val} for val in v]
|
||||||
if '$and' in mongo_query.keys():
|
if "$and" in mongo_query.keys():
|
||||||
mongo_query['$and'].append(value)
|
mongo_query["$and"].extend(value)
|
||||||
else:
|
else:
|
||||||
mongo_query['$and'] = value
|
mongo_query["$and"] = value
|
||||||
|
|
||||||
return mongo_query
|
return mongo_query
|
||||||
|
|
||||||
|
|
||||||
def update(_doc_cls=None, **update):
|
def update(_doc_cls=None, **update):
|
||||||
"""Transform an update spec from Django-style format to Mongo format.
|
"""Transform an update spec from Django-style format to Mongo
|
||||||
|
format.
|
||||||
"""
|
"""
|
||||||
mongo_update = {}
|
mongo_update = {}
|
||||||
|
|
||||||
for key, value in update.items():
|
for key, value in update.items():
|
||||||
if key == "__raw__":
|
if key == "__raw__":
|
||||||
mongo_update.update(value)
|
mongo_update.update(value)
|
||||||
continue
|
continue
|
||||||
parts = key.split('__')
|
|
||||||
|
parts = key.split("__")
|
||||||
|
|
||||||
|
# if there is no operator, default to 'set'
|
||||||
|
if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS:
|
||||||
|
parts.insert(0, "set")
|
||||||
|
|
||||||
# Check for an operator and transform to mongo-style if there is
|
# Check for an operator and transform to mongo-style if there is
|
||||||
op = None
|
op = None
|
||||||
if parts[0] in UPDATE_OPERATORS:
|
if parts[0] in UPDATE_OPERATORS:
|
||||||
op = parts.pop(0)
|
op = parts.pop(0)
|
||||||
# Convert Pythonic names to Mongo equivalents
|
# Convert Pythonic names to Mongo equivalents
|
||||||
if op in ('push_all', 'pull_all'):
|
operator_map = {
|
||||||
op = op.replace('_all', 'All')
|
"push_all": "pushAll",
|
||||||
elif op == 'dec':
|
"pull_all": "pullAll",
|
||||||
|
"dec": "inc",
|
||||||
|
"add_to_set": "addToSet",
|
||||||
|
"set_on_insert": "setOnInsert",
|
||||||
|
}
|
||||||
|
if op == "dec":
|
||||||
# Support decrement by flipping a positive value's sign
|
# Support decrement by flipping a positive value's sign
|
||||||
# and using 'inc'
|
# and using 'inc'
|
||||||
op = 'inc'
|
value = -value
|
||||||
if value > 0:
|
# If the operator doesn't found from operator map, the op value
|
||||||
value = -value
|
# will stay unchanged
|
||||||
elif op == 'add_to_set':
|
op = operator_map.get(op, op)
|
||||||
op = 'addToSet'
|
|
||||||
elif op == 'set_on_insert':
|
|
||||||
op = "setOnInsert"
|
|
||||||
|
|
||||||
match = None
|
match = None
|
||||||
if parts[-1] in COMPARISON_OPERATORS:
|
if parts[-1] in COMPARISON_OPERATORS:
|
||||||
match = parts.pop()
|
match = parts.pop()
|
||||||
|
|
||||||
|
# Allow to escape operator-like field name by __
|
||||||
|
if len(parts) > 1 and parts[-1] == "":
|
||||||
|
parts.pop()
|
||||||
|
|
||||||
if _doc_cls:
|
if _doc_cls:
|
||||||
# Switch field names to proper names [set in Field(name='foo')]
|
# Switch field names to proper names [set in Field(name='foo')]
|
||||||
try:
|
try:
|
||||||
fields = _doc_cls._lookup_field(parts)
|
fields = _doc_cls._lookup_field(parts)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
raise InvalidQueryError(e)
|
raise InvalidQueryError(e)
|
||||||
parts = []
|
parts = []
|
||||||
|
|
||||||
@@ -185,10 +279,10 @@ def update(_doc_cls=None, **update):
|
|||||||
appended_sub_field = False
|
appended_sub_field = False
|
||||||
for field in fields:
|
for field in fields:
|
||||||
append_field = True
|
append_field = True
|
||||||
if isinstance(field, basestring):
|
if isinstance(field, str):
|
||||||
# Convert the S operator to $
|
# Convert the S operator to $
|
||||||
if field == 'S':
|
if field == "S":
|
||||||
field = '$'
|
field = "$"
|
||||||
parts.append(field)
|
parts.append(field)
|
||||||
append_field = False
|
append_field = False
|
||||||
else:
|
else:
|
||||||
@@ -196,7 +290,7 @@ def update(_doc_cls=None, **update):
|
|||||||
if append_field:
|
if append_field:
|
||||||
appended_sub_field = False
|
appended_sub_field = False
|
||||||
cleaned_fields.append(field)
|
cleaned_fields.append(field)
|
||||||
if hasattr(field, 'field'):
|
if hasattr(field, "field"):
|
||||||
cleaned_fields.append(field.field)
|
cleaned_fields.append(field.field)
|
||||||
appended_sub_field = True
|
appended_sub_field = True
|
||||||
|
|
||||||
@@ -210,45 +304,58 @@ def update(_doc_cls=None, **update):
|
|||||||
if isinstance(field, GeoJsonBaseField):
|
if isinstance(field, GeoJsonBaseField):
|
||||||
value = field.to_mongo(value)
|
value = field.to_mongo(value)
|
||||||
|
|
||||||
if op in (None, 'set', 'push', 'pull'):
|
if op == "pull":
|
||||||
|
if field.required or value is not None:
|
||||||
|
if match in ("in", "nin") and not isinstance(value, dict):
|
||||||
|
value = _prepare_query_for_iterable(field, op, value)
|
||||||
|
else:
|
||||||
|
value = field.prepare_query_value(op, value)
|
||||||
|
elif op == "push" and isinstance(value, (list, tuple, set)):
|
||||||
|
value = [field.prepare_query_value(op, v) for v in value]
|
||||||
|
elif op in (None, "set", "push"):
|
||||||
if field.required or value is not None:
|
if field.required or value is not None:
|
||||||
value = field.prepare_query_value(op, value)
|
value = field.prepare_query_value(op, value)
|
||||||
elif op in ('pushAll', 'pullAll'):
|
elif op in ("pushAll", "pullAll"):
|
||||||
value = [field.prepare_query_value(op, v) for v in value]
|
value = [field.prepare_query_value(op, v) for v in value]
|
||||||
elif op in ('addToSet', 'setOnInsert'):
|
elif op in ("addToSet", "setOnInsert"):
|
||||||
if isinstance(value, (list, tuple, set)):
|
if isinstance(value, (list, tuple, set)):
|
||||||
value = [field.prepare_query_value(op, v) for v in value]
|
value = [field.prepare_query_value(op, v) for v in value]
|
||||||
elif field.required or value is not None:
|
elif field.required or value is not None:
|
||||||
value = field.prepare_query_value(op, value)
|
value = field.prepare_query_value(op, value)
|
||||||
elif op == "unset":
|
elif op == "unset":
|
||||||
value = 1
|
value = 1
|
||||||
|
elif op == "inc":
|
||||||
|
value = field.prepare_query_value(op, value)
|
||||||
|
|
||||||
if match:
|
if match:
|
||||||
match = '$' + match
|
match = "$" + match
|
||||||
value = {match: value}
|
value = {match: value}
|
||||||
|
|
||||||
key = '.'.join(parts)
|
key = ".".join(parts)
|
||||||
|
|
||||||
if not op:
|
if "pull" in op and "." in key:
|
||||||
raise InvalidQueryError("Updates must supply an operation "
|
|
||||||
"eg: set__FIELD=value")
|
|
||||||
|
|
||||||
if 'pull' in op and '.' in key:
|
|
||||||
# Dot operators don't work on pull operations
|
# Dot operators don't work on pull operations
|
||||||
# unless they point to a list field
|
# unless they point to a list field
|
||||||
# Otherwise it uses nested dict syntax
|
# Otherwise it uses nested dict syntax
|
||||||
if op == 'pullAll':
|
if op == "pullAll":
|
||||||
raise InvalidQueryError("pullAll operations only support "
|
raise InvalidQueryError(
|
||||||
"a single field depth")
|
"pullAll operations only support a single field depth"
|
||||||
|
)
|
||||||
|
|
||||||
# Look for the last list field and use dot notation until there
|
# Look for the last list field and use dot notation until there
|
||||||
field_classes = [c.__class__ for c in cleaned_fields]
|
field_classes = [c.__class__ for c in cleaned_fields]
|
||||||
field_classes.reverse()
|
field_classes.reverse()
|
||||||
ListField = _import_class('ListField')
|
ListField = _import_class("ListField")
|
||||||
if ListField in field_classes:
|
EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField")
|
||||||
# Join all fields via dot notation to the last ListField
|
if ListField in field_classes or EmbeddedDocumentListField in field_classes:
|
||||||
|
# Join all fields via dot notation to the last ListField or EmbeddedDocumentListField
|
||||||
# Then process as normal
|
# Then process as normal
|
||||||
last_listField = len(cleaned_fields) - field_classes.index(ListField)
|
if ListField in field_classes:
|
||||||
|
_check_field = ListField
|
||||||
|
else:
|
||||||
|
_check_field = EmbeddedDocumentListField
|
||||||
|
|
||||||
|
last_listField = len(cleaned_fields) - field_classes.index(_check_field)
|
||||||
key = ".".join(parts[:last_listField])
|
key = ".".join(parts[:last_listField])
|
||||||
parts = parts[last_listField:]
|
parts = parts[last_listField:]
|
||||||
parts.insert(0, key)
|
parts.insert(0, key)
|
||||||
@@ -256,12 +363,28 @@ def update(_doc_cls=None, **update):
|
|||||||
parts.reverse()
|
parts.reverse()
|
||||||
for key in parts:
|
for key in parts:
|
||||||
value = {key: value}
|
value = {key: value}
|
||||||
elif op == 'addToSet' and isinstance(value, list):
|
elif op == "addToSet" and isinstance(value, list):
|
||||||
value = {key: {"$each": value}}
|
value = {key: {"$each": value}}
|
||||||
|
elif op in ("push", "pushAll"):
|
||||||
|
if parts[-1].isdigit():
|
||||||
|
key = ".".join(parts[0:-1])
|
||||||
|
position = int(parts[-1])
|
||||||
|
# $position expects an iterable. If pushing a single value,
|
||||||
|
# wrap it in a list.
|
||||||
|
if not isinstance(value, (set, tuple, list)):
|
||||||
|
value = [value]
|
||||||
|
value = {key: {"$each": value, "$position": position}}
|
||||||
|
else:
|
||||||
|
if op == "pushAll":
|
||||||
|
op = "push" # convert to non-deprecated keyword
|
||||||
|
if not isinstance(value, (set, tuple, list)):
|
||||||
|
value = [value]
|
||||||
|
value = {key: {"$each": value}}
|
||||||
|
else:
|
||||||
|
value = {key: value}
|
||||||
else:
|
else:
|
||||||
value = {key: value}
|
value = {key: value}
|
||||||
key = '$' + op
|
key = "$" + op
|
||||||
|
|
||||||
if key not in mongo_update:
|
if key not in mongo_update:
|
||||||
mongo_update[key] = value
|
mongo_update[key] = value
|
||||||
elif key in mongo_update and isinstance(mongo_update[key], dict):
|
elif key in mongo_update and isinstance(mongo_update[key], dict):
|
||||||
@@ -271,25 +394,28 @@ def update(_doc_cls=None, **update):
|
|||||||
|
|
||||||
|
|
||||||
def _geo_operator(field, op, value):
|
def _geo_operator(field, op, value):
|
||||||
"""Helper to return the query for a given geo query"""
|
"""Helper to return the query for a given geo query."""
|
||||||
if field._geo_index == pymongo.GEO2D:
|
if op == "max_distance":
|
||||||
|
value = {"$maxDistance": value}
|
||||||
|
elif op == "min_distance":
|
||||||
|
value = {"$minDistance": value}
|
||||||
|
elif field._geo_index == pymongo.GEO2D:
|
||||||
if op == "within_distance":
|
if op == "within_distance":
|
||||||
value = {'$within': {'$center': value}}
|
value = {"$within": {"$center": value}}
|
||||||
elif op == "within_spherical_distance":
|
elif op == "within_spherical_distance":
|
||||||
value = {'$within': {'$centerSphere': value}}
|
value = {"$within": {"$centerSphere": value}}
|
||||||
elif op == "within_polygon":
|
elif op == "within_polygon":
|
||||||
value = {'$within': {'$polygon': value}}
|
value = {"$within": {"$polygon": value}}
|
||||||
elif op == "near":
|
elif op == "near":
|
||||||
value = {'$near': value}
|
value = {"$near": value}
|
||||||
elif op == "near_sphere":
|
elif op == "near_sphere":
|
||||||
value = {'$nearSphere': value}
|
value = {"$nearSphere": value}
|
||||||
elif op == 'within_box':
|
elif op == "within_box":
|
||||||
value = {'$within': {'$box': value}}
|
value = {"$within": {"$box": value}}
|
||||||
elif op == "max_distance":
|
|
||||||
value = {'$maxDistance': value}
|
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError("Geo method '%s' has not "
|
raise NotImplementedError(
|
||||||
"been implemented for a GeoPointField" % op)
|
'Geo method "%s" has not been ' "implemented for a GeoPointField" % op
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
if op == "geo_within":
|
if op == "geo_within":
|
||||||
value = {"$geoWithin": _infer_geometry(value)}
|
value = {"$geoWithin": _infer_geometry(value)}
|
||||||
@@ -304,40 +430,73 @@ def _geo_operator(field, op, value):
|
|||||||
elif op == "geo_intersects":
|
elif op == "geo_intersects":
|
||||||
value = {"$geoIntersects": _infer_geometry(value)}
|
value = {"$geoIntersects": _infer_geometry(value)}
|
||||||
elif op == "near":
|
elif op == "near":
|
||||||
value = {'$near': _infer_geometry(value)}
|
value = {"$near": _infer_geometry(value)}
|
||||||
elif op == "max_distance":
|
|
||||||
value = {'$maxDistance': value}
|
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError("Geo method '%s' has not "
|
raise NotImplementedError(
|
||||||
"been implemented for a %s " % (op, field._name))
|
'Geo method "{}" has not been implemented for a {} '.format(
|
||||||
|
op, field._name
|
||||||
|
)
|
||||||
|
)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
def _infer_geometry(value):
|
def _infer_geometry(value):
|
||||||
"""Helper method that tries to infer the $geometry shape for a given value"""
|
"""Helper method that tries to infer the $geometry shape for a
|
||||||
|
given value.
|
||||||
|
"""
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
if "$geometry" in value:
|
if "$geometry" in value:
|
||||||
return value
|
return value
|
||||||
elif 'coordinates' in value and 'type' in value:
|
elif "coordinates" in value and "type" in value:
|
||||||
return {"$geometry": value}
|
return {"$geometry": value}
|
||||||
raise InvalidQueryError("Invalid $geometry dictionary should have "
|
raise InvalidQueryError(
|
||||||
"type and coordinates keys")
|
"Invalid $geometry dictionary should have type and coordinates keys"
|
||||||
|
)
|
||||||
elif isinstance(value, (list, set)):
|
elif isinstance(value, (list, set)):
|
||||||
|
# TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon?
|
||||||
|
|
||||||
try:
|
try:
|
||||||
value[0][0][0]
|
value[0][0][0]
|
||||||
return {"$geometry": {"type": "Polygon", "coordinates": value}}
|
return {"$geometry": {"type": "Polygon", "coordinates": value}}
|
||||||
except:
|
except (TypeError, IndexError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
value[0][0]
|
value[0][0]
|
||||||
return {"$geometry": {"type": "LineString", "coordinates": value}}
|
return {"$geometry": {"type": "LineString", "coordinates": value}}
|
||||||
except:
|
except (TypeError, IndexError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
value[0]
|
value[0]
|
||||||
return {"$geometry": {"type": "Point", "coordinates": value}}
|
return {"$geometry": {"type": "Point", "coordinates": value}}
|
||||||
except:
|
except (TypeError, IndexError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
raise InvalidQueryError("Invalid $geometry data. Can be either a dictionary "
|
raise InvalidQueryError(
|
||||||
"or (nested) lists of coordinate(s)")
|
"Invalid $geometry data. Can be either a "
|
||||||
|
"dictionary or (nested) lists of coordinate(s)"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _prepare_query_for_iterable(field, op, value):
|
||||||
|
# We need a special check for BaseDocument, because - although it's iterable - using
|
||||||
|
# it as such in the context of this method is most definitely a mistake.
|
||||||
|
BaseDocument = _import_class("BaseDocument")
|
||||||
|
|
||||||
|
if isinstance(value, BaseDocument):
|
||||||
|
raise TypeError(
|
||||||
|
"When using the `in`, `nin`, `all`, or "
|
||||||
|
"`near`-operators you can't use a "
|
||||||
|
"`Document`, you must wrap your object "
|
||||||
|
"in a list (object -> [object])."
|
||||||
|
)
|
||||||
|
|
||||||
|
if not hasattr(value, "__iter__"):
|
||||||
|
raise TypeError(
|
||||||
|
"The `in`, `nin`, `all`, or "
|
||||||
|
"`near`-operators must be applied to an "
|
||||||
|
"iterable (e.g. a list)."
|
||||||
|
)
|
||||||
|
|
||||||
|
return [field.prepare_query_value(op, v) for v in value]
|
||||||
|
|||||||
@@ -1,14 +1,18 @@
|
|||||||
import copy
|
import copy
|
||||||
|
import warnings
|
||||||
|
|
||||||
from mongoengine.errors import InvalidQueryError
|
from mongoengine.errors import InvalidQueryError
|
||||||
from mongoengine.python_support import product, reduce
|
|
||||||
|
|
||||||
from mongoengine.queryset import transform
|
from mongoengine.queryset import transform
|
||||||
|
|
||||||
__all__ = ('Q',)
|
__all__ = ("Q", "QNode")
|
||||||
|
|
||||||
|
|
||||||
class QNodeVisitor(object):
|
def warn_empty_is_deprecated():
|
||||||
|
msg = "'empty' property is deprecated in favour of using 'not bool(filter)'"
|
||||||
|
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
||||||
|
|
||||||
|
|
||||||
|
class QNodeVisitor:
|
||||||
"""Base visitor class for visiting Q-object nodes in a query tree.
|
"""Base visitor class for visiting Q-object nodes in a query tree.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -28,7 +32,7 @@ class DuplicateQueryConditionsError(InvalidQueryError):
|
|||||||
|
|
||||||
|
|
||||||
class SimplificationVisitor(QNodeVisitor):
|
class SimplificationVisitor(QNodeVisitor):
|
||||||
"""Simplifies query trees by combinging unnecessary 'and' connection nodes
|
"""Simplifies query trees by combining unnecessary 'and' connection nodes
|
||||||
into a single Q-object.
|
into a single Q-object.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -80,9 +84,8 @@ class QueryCompilerVisitor(QNodeVisitor):
|
|||||||
return transform.query(self.document, **query.query)
|
return transform.query(self.document, **query.query)
|
||||||
|
|
||||||
|
|
||||||
class QNode(object):
|
class QNode:
|
||||||
"""Base class for nodes in query trees.
|
"""Base class for nodes in query trees."""
|
||||||
"""
|
|
||||||
|
|
||||||
AND = 0
|
AND = 0
|
||||||
OR = 1
|
OR = 1
|
||||||
@@ -96,18 +99,22 @@ class QNode(object):
|
|||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def _combine(self, other, operation):
|
def _combine(self, other, operation):
|
||||||
"""Combine this node with another node into a QCombination object.
|
"""Combine this node with another node into a QCombination
|
||||||
|
object.
|
||||||
"""
|
"""
|
||||||
if getattr(other, 'empty', True):
|
# If the other Q() is empty, ignore it and just use `self`.
|
||||||
|
if not bool(other):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
if self.empty:
|
# Or if this Q is empty, ignore it and just use `other`.
|
||||||
|
if not bool(self):
|
||||||
return other
|
return other
|
||||||
|
|
||||||
return QCombination(operation, [self, other])
|
return QCombination(operation, [self, other])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def empty(self):
|
def empty(self):
|
||||||
|
warn_empty_is_deprecated()
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def __or__(self, other):
|
def __or__(self, other):
|
||||||
@@ -118,8 +125,8 @@ class QNode(object):
|
|||||||
|
|
||||||
|
|
||||||
class QCombination(QNode):
|
class QCombination(QNode):
|
||||||
"""Represents the combination of several conditions by a given logical
|
"""Represents the combination of several conditions by a given
|
||||||
operator.
|
logical operator.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, operation, children):
|
def __init__(self, operation, children):
|
||||||
@@ -133,6 +140,13 @@ class QCombination(QNode):
|
|||||||
else:
|
else:
|
||||||
self.children.append(node)
|
self.children.append(node)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
op = " & " if self.operation is self.AND else " | "
|
||||||
|
return "(%s)" % op.join([repr(node) for node in self.children])
|
||||||
|
|
||||||
|
def __bool__(self):
|
||||||
|
return bool(self.children)
|
||||||
|
|
||||||
def accept(self, visitor):
|
def accept(self, visitor):
|
||||||
for i in range(len(self.children)):
|
for i in range(len(self.children)):
|
||||||
if isinstance(self.children[i], QNode):
|
if isinstance(self.children[i], QNode):
|
||||||
@@ -142,8 +156,16 @@ class QCombination(QNode):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def empty(self):
|
def empty(self):
|
||||||
|
warn_empty_is_deprecated()
|
||||||
return not bool(self.children)
|
return not bool(self.children)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return (
|
||||||
|
self.__class__ == other.__class__
|
||||||
|
and self.operation == other.operation
|
||||||
|
and self.children == other.children
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Q(QNode):
|
class Q(QNode):
|
||||||
"""A simple query object, used in a query tree to build up more complex
|
"""A simple query object, used in a query tree to build up more complex
|
||||||
@@ -153,9 +175,19 @@ class Q(QNode):
|
|||||||
def __init__(self, **query):
|
def __init__(self, **query):
|
||||||
self.query = query
|
self.query = query
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Q(**%s)" % repr(self.query)
|
||||||
|
|
||||||
|
def __bool__(self):
|
||||||
|
return bool(self.query)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return self.__class__ == other.__class__ and self.query == other.query
|
||||||
|
|
||||||
def accept(self, visitor):
|
def accept(self, visitor):
|
||||||
return visitor.visit_query(self)
|
return visitor.visit_query(self)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def empty(self):
|
def empty(self):
|
||||||
|
warn_empty_is_deprecated()
|
||||||
return not bool(self.query)
|
return not bool(self.query)
|
||||||
|
|||||||
@@ -1,18 +1,25 @@
|
|||||||
# -*- coding: utf-8 -*-
|
__all__ = (
|
||||||
|
"pre_init",
|
||||||
__all__ = ['pre_init', 'post_init', 'pre_save', 'pre_save_post_validation',
|
"post_init",
|
||||||
'post_save', 'pre_delete', 'post_delete']
|
"pre_save",
|
||||||
|
"pre_save_post_validation",
|
||||||
|
"post_save",
|
||||||
|
"pre_delete",
|
||||||
|
"post_delete",
|
||||||
|
)
|
||||||
|
|
||||||
signals_available = False
|
signals_available = False
|
||||||
try:
|
try:
|
||||||
from blinker import Namespace
|
from blinker import Namespace
|
||||||
|
|
||||||
signals_available = True
|
signals_available = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
class Namespace(object):
|
|
||||||
|
class Namespace:
|
||||||
def signal(self, name, doc=None):
|
def signal(self, name, doc=None):
|
||||||
return _FakeSignal(name, doc)
|
return _FakeSignal(name, doc)
|
||||||
|
|
||||||
class _FakeSignal(object):
|
class _FakeSignal:
|
||||||
"""If blinker is unavailable, create a fake class with the same
|
"""If blinker is unavailable, create a fake class with the same
|
||||||
interface that allows sending of signals but will fail with an
|
interface that allows sending of signals but will fail with an
|
||||||
error on anything else. Instead of doing anything on send, it
|
error on anything else. Instead of doing anything on send, it
|
||||||
@@ -24,24 +31,29 @@ except ImportError:
|
|||||||
self.__doc__ = doc
|
self.__doc__ = doc
|
||||||
|
|
||||||
def _fail(self, *args, **kwargs):
|
def _fail(self, *args, **kwargs):
|
||||||
raise RuntimeError('signalling support is unavailable '
|
raise RuntimeError(
|
||||||
'because the blinker library is '
|
"signalling support is unavailable "
|
||||||
'not installed.')
|
"because the blinker library is "
|
||||||
send = lambda *a, **kw: None
|
"not installed."
|
||||||
connect = disconnect = has_receivers_for = receivers_for = \
|
)
|
||||||
temporarily_connected_to = _fail
|
|
||||||
|
send = lambda *a, **kw: None # noqa
|
||||||
|
connect = (
|
||||||
|
disconnect
|
||||||
|
) = has_receivers_for = receivers_for = temporarily_connected_to = _fail
|
||||||
del _fail
|
del _fail
|
||||||
|
|
||||||
|
|
||||||
# the namespace for code signals. If you are not mongoengine code, do
|
# the namespace for code signals. If you are not mongoengine code, do
|
||||||
# not put signals in here. Create your own namespace instead.
|
# not put signals in here. Create your own namespace instead.
|
||||||
_signals = Namespace()
|
_signals = Namespace()
|
||||||
|
|
||||||
pre_init = _signals.signal('pre_init')
|
pre_init = _signals.signal("pre_init")
|
||||||
post_init = _signals.signal('post_init')
|
post_init = _signals.signal("post_init")
|
||||||
pre_save = _signals.signal('pre_save')
|
pre_save = _signals.signal("pre_save")
|
||||||
pre_save_post_validation = _signals.signal('pre_save_post_validation')
|
pre_save_post_validation = _signals.signal("pre_save_post_validation")
|
||||||
post_save = _signals.signal('post_save')
|
post_save = _signals.signal("post_save")
|
||||||
pre_delete = _signals.signal('pre_delete')
|
pre_delete = _signals.signal("pre_delete")
|
||||||
post_delete = _signals.signal('post_delete')
|
post_delete = _signals.signal("post_delete")
|
||||||
pre_bulk_insert = _signals.signal('pre_bulk_insert')
|
pre_bulk_insert = _signals.signal("pre_bulk_insert")
|
||||||
post_bulk_insert = _signals.signal('post_bulk_insert')
|
post_bulk_insert = _signals.signal("post_bulk_insert")
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
%define srcname mongoengine
|
%define srcname mongoengine
|
||||||
|
|
||||||
Name: python-%{srcname}
|
Name: python-%{srcname}
|
||||||
Version: 0.8.5
|
Version: 0.8.7
|
||||||
Release: 1%{?dist}
|
Release: 1%{?dist}
|
||||||
Summary: A Python Document-Object Mapper for working with MongoDB
|
Summary: A Python Document-Object Mapper for working with MongoDB
|
||||||
|
|
||||||
@@ -51,4 +51,4 @@ rm -rf $RPM_BUILD_ROOT
|
|||||||
# %{python_sitearch}/*
|
# %{python_sitearch}/*
|
||||||
|
|
||||||
%changelog
|
%changelog
|
||||||
* See: http://docs.mongoengine.org/en/latest/changelog.html
|
* See: http://docs.mongoengine.org/en/latest/changelog.html
|
||||||
|
|||||||
8
requirements-dev.txt
Normal file
8
requirements-dev.txt
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
black
|
||||||
|
flake8
|
||||||
|
flake8-import-order
|
||||||
|
pre-commit
|
||||||
|
pytest
|
||||||
|
ipdb
|
||||||
|
ipython
|
||||||
|
tox
|
||||||
@@ -1 +0,0 @@
|
|||||||
pymongo
|
|
||||||
21
setup.cfg
21
setup.cfg
@@ -1,11 +1,10 @@
|
|||||||
[nosetests]
|
[flake8]
|
||||||
verbosity = 3
|
ignore=E501,F401,F403,F405,I201,I202,W504, W605, W503
|
||||||
detailed-errors = 1
|
exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests
|
||||||
#with-coverage = 1
|
max-complexity=47
|
||||||
#cover-erase = 1
|
application-import-names=mongoengine,tests
|
||||||
#cover-html = 1
|
|
||||||
#cover-html-dir = ../htmlcov
|
[tool:pytest]
|
||||||
#cover-package = mongoengine
|
# Limits the discovery to tests directory
|
||||||
py3where = build
|
# avoids that it runs for instance the benchmark
|
||||||
where = tests
|
testpaths = tests
|
||||||
#tests = document/__init__.py
|
|
||||||
|
|||||||
173
setup.py
173
setup.py
@@ -1,6 +1,9 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from setuptools import setup, find_packages
|
|
||||||
|
from pkg_resources import normalize_path
|
||||||
|
from setuptools import find_packages, setup
|
||||||
|
from setuptools.command.test import test as TestCommand
|
||||||
|
|
||||||
# Hack to silence atexit traceback in newer python versions
|
# Hack to silence atexit traceback in newer python versions
|
||||||
try:
|
try:
|
||||||
@@ -8,71 +11,137 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
DESCRIPTION = 'MongoEngine is a Python Object-Document ' + \
|
DESCRIPTION = "MongoEngine is a Python Object-Document Mapper for working with MongoDB."
|
||||||
'Mapper for working with MongoDB.'
|
|
||||||
LONG_DESCRIPTION = None
|
|
||||||
try:
|
try:
|
||||||
LONG_DESCRIPTION = open('README.rst').read()
|
with open("README.rst") as fin:
|
||||||
except:
|
LONG_DESCRIPTION = fin.read()
|
||||||
pass
|
except Exception:
|
||||||
|
LONG_DESCRIPTION = None
|
||||||
|
|
||||||
|
|
||||||
def get_version(version_tuple):
|
def get_version(version_tuple):
|
||||||
if not isinstance(version_tuple[-1], int):
|
"""Return the version tuple as a string, e.g. for (0, 10, 7),
|
||||||
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
|
return '0.10.7'.
|
||||||
return '.'.join(map(str, version_tuple))
|
"""
|
||||||
|
return ".".join(map(str, version_tuple))
|
||||||
|
|
||||||
|
|
||||||
|
class PyTest(TestCommand):
|
||||||
|
"""Will force pytest to search for tests inside the build directory
|
||||||
|
for 2to3 converted code (used by tox), instead of the current directory.
|
||||||
|
Required as long as we need 2to3
|
||||||
|
|
||||||
|
Known Limitation: https://tox.readthedocs.io/en/latest/example/pytest.html#known-issues-and-limitations
|
||||||
|
Source: https://www.hackzine.org/python-testing-with-pytest-and-2to3-plus-tox-and-travis-ci.html
|
||||||
|
"""
|
||||||
|
|
||||||
|
# https://pytest.readthedocs.io/en/2.7.3/goodpractises.html#integration-with-setuptools-test-commands
|
||||||
|
# Allows to provide pytest command argument through the test runner command `python setup.py test`
|
||||||
|
# e.g: `python setup.py test -a "-k=test"`
|
||||||
|
# This only works for 1 argument though
|
||||||
|
user_options = [("pytest-args=", "a", "Arguments to pass to py.test")]
|
||||||
|
|
||||||
|
def initialize_options(self):
|
||||||
|
TestCommand.initialize_options(self)
|
||||||
|
self.pytest_args = ""
|
||||||
|
|
||||||
|
def finalize_options(self):
|
||||||
|
TestCommand.finalize_options(self)
|
||||||
|
self.test_args = ["tests"]
|
||||||
|
self.test_suite = True
|
||||||
|
|
||||||
|
def run_tests(self):
|
||||||
|
# import here, cause outside the eggs aren't loaded
|
||||||
|
from pkg_resources import _namespace_packages
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
# Purge modules under test from sys.modules. The test loader will
|
||||||
|
# re-import them from the build location. Required when 2to3 is used
|
||||||
|
# with namespace packages.
|
||||||
|
if sys.version_info >= (3,) and getattr(self.distribution, "use_2to3", False):
|
||||||
|
module = self.test_args[-1].split(".")[0]
|
||||||
|
if module in _namespace_packages:
|
||||||
|
del_modules = []
|
||||||
|
if module in sys.modules:
|
||||||
|
del_modules.append(module)
|
||||||
|
module += "."
|
||||||
|
for name in sys.modules:
|
||||||
|
if name.startswith(module):
|
||||||
|
del_modules.append(name)
|
||||||
|
map(sys.modules.__delitem__, del_modules)
|
||||||
|
|
||||||
|
# Run on the build directory for 2to3-built code
|
||||||
|
# This will prevent the old 2.x code from being found
|
||||||
|
# by py.test discovery mechanism, that apparently
|
||||||
|
# ignores sys.path..
|
||||||
|
ei_cmd = self.get_finalized_command("egg_info")
|
||||||
|
self.test_args = [normalize_path(ei_cmd.egg_base)]
|
||||||
|
|
||||||
|
cmd_args = self.test_args + ([self.pytest_args] if self.pytest_args else [])
|
||||||
|
errno = pytest.main(cmd_args)
|
||||||
|
sys.exit(errno)
|
||||||
|
|
||||||
|
|
||||||
# Dirty hack to get version number from monogengine/__init__.py - we can't
|
# Dirty hack to get version number from monogengine/__init__.py - we can't
|
||||||
# import it as it depends on PyMongo and PyMongo isn't installed until this
|
# import it as it depends on PyMongo and PyMongo isn't installed until this
|
||||||
# file is read
|
# file is read
|
||||||
init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
|
init = os.path.join(os.path.dirname(__file__), "mongoengine", "__init__.py")
|
||||||
version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0]
|
version_line = list(filter(lambda l: l.startswith("VERSION"), open(init)))[0]
|
||||||
|
|
||||||
VERSION = get_version(eval(version_line.split('=')[-1]))
|
VERSION = get_version(eval(version_line.split("=")[-1]))
|
||||||
print(VERSION)
|
|
||||||
|
|
||||||
CLASSIFIERS = [
|
CLASSIFIERS = [
|
||||||
'Development Status :: 4 - Beta',
|
"Development Status :: 5 - Production/Stable",
|
||||||
'Intended Audience :: Developers',
|
"Intended Audience :: Developers",
|
||||||
'License :: OSI Approved :: MIT License',
|
"License :: OSI Approved :: MIT License",
|
||||||
'Operating System :: OS Independent',
|
"Operating System :: OS Independent",
|
||||||
'Programming Language :: Python',
|
"Programming Language :: Python",
|
||||||
"Programming Language :: Python :: 2",
|
|
||||||
"Programming Language :: Python :: 2.6",
|
|
||||||
"Programming Language :: Python :: 2.7",
|
|
||||||
"Programming Language :: Python :: 3",
|
"Programming Language :: Python :: 3",
|
||||||
"Programming Language :: Python :: 3.1",
|
"Programming Language :: Python :: 3.5",
|
||||||
"Programming Language :: Python :: 3.2",
|
"Programming Language :: Python :: 3.6",
|
||||||
|
"Programming Language :: Python :: 3.7",
|
||||||
|
"Programming Language :: Python :: 3.8",
|
||||||
"Programming Language :: Python :: Implementation :: CPython",
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
'Topic :: Database',
|
"Programming Language :: Python :: Implementation :: PyPy",
|
||||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
"Topic :: Database",
|
||||||
|
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||||
]
|
]
|
||||||
|
|
||||||
extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])}
|
extra_opts = {
|
||||||
if sys.version_info[0] == 3:
|
"packages": find_packages(exclude=["tests", "tests.*"]),
|
||||||
extra_opts['use_2to3'] = True
|
"tests_require": [
|
||||||
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6', 'django>=1.5.1']
|
"pytest<5.0",
|
||||||
if "test" in sys.argv or "nosetests" in sys.argv:
|
"pytest-cov",
|
||||||
extra_opts['packages'] = find_packages()
|
"coverage<5.0", # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls
|
||||||
extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]}
|
"blinker",
|
||||||
else:
|
"Pillow>=2.0.0, <7.0.0", # 7.0.0 dropped Python2 support
|
||||||
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2>=2.6', 'python-dateutil']
|
],
|
||||||
|
}
|
||||||
|
|
||||||
setup(name='mongoengine',
|
if "test" in sys.argv:
|
||||||
version=VERSION,
|
extra_opts["packages"] = find_packages()
|
||||||
author='Harry Marr',
|
extra_opts["package_data"] = {
|
||||||
author_email='harry.marr@{nospam}gmail.com',
|
"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]
|
||||||
maintainer="Ross Lawley",
|
}
|
||||||
maintainer_email="ross.lawley@{nospam}gmail.com",
|
|
||||||
url='http://mongoengine.org/',
|
setup(
|
||||||
download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
|
name="mongoengine",
|
||||||
license='MIT',
|
version=VERSION,
|
||||||
include_package_data=True,
|
author="Harry Marr",
|
||||||
description=DESCRIPTION,
|
author_email="harry.marr@gmail.com",
|
||||||
long_description=LONG_DESCRIPTION,
|
maintainer="Stefan Wojcik",
|
||||||
platforms=['any'],
|
maintainer_email="wojcikstefan@gmail.com",
|
||||||
classifiers=CLASSIFIERS,
|
url="http://mongoengine.org/",
|
||||||
install_requires=['pymongo>=2.5'],
|
download_url="https://github.com/MongoEngine/mongoengine/tarball/master",
|
||||||
test_suite='nose.collector',
|
license="MIT",
|
||||||
**extra_opts
|
include_package_data=True,
|
||||||
|
description=DESCRIPTION,
|
||||||
|
long_description=LONG_DESCRIPTION,
|
||||||
|
platforms=["any"],
|
||||||
|
classifiers=CLASSIFIERS,
|
||||||
|
python_requires=">=3.5",
|
||||||
|
install_requires=["pymongo>=3.4, <4.0"],
|
||||||
|
cmdclass={"test": PyTest},
|
||||||
|
**extra_opts
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,5 +0,0 @@
|
|||||||
from all_warnings import AllWarnings
|
|
||||||
from document import *
|
|
||||||
from queryset import *
|
|
||||||
from fields import *
|
|
||||||
from migration import *
|
|
||||||
|
|||||||
@@ -1,44 +0,0 @@
|
|||||||
"""
|
|
||||||
This test has been put into a module. This is because it tests warnings that
|
|
||||||
only get triggered on first hit. This way we can ensure its imported into the
|
|
||||||
top level and called first by the test suite.
|
|
||||||
"""
|
|
||||||
import sys
|
|
||||||
sys.path[0:0] = [""]
|
|
||||||
import unittest
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ('AllWarnings', )
|
|
||||||
|
|
||||||
|
|
||||||
class AllWarnings(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
self.warning_list = []
|
|
||||||
self.showwarning_default = warnings.showwarning
|
|
||||||
warnings.showwarning = self.append_to_warning_list
|
|
||||||
|
|
||||||
def append_to_warning_list(self, message, category, *args):
|
|
||||||
self.warning_list.append({"message": message,
|
|
||||||
"category": category})
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
# restore default handling of warnings
|
|
||||||
warnings.showwarning = self.showwarning_default
|
|
||||||
|
|
||||||
def test_document_collection_syntax_warning(self):
|
|
||||||
|
|
||||||
class NonAbstractBase(Document):
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
class InheritedDocumentFailTest(NonAbstractBase):
|
|
||||||
meta = {'collection': 'fail'}
|
|
||||||
|
|
||||||
warning = self.warning_list[0]
|
|
||||||
self.assertEqual(SyntaxWarning, warning["category"])
|
|
||||||
self.assertEqual('non_abstract_base',
|
|
||||||
InheritedDocumentFailTest._get_collection_name())
|
|
||||||
|
|||||||
35
tests/all_warnings/test_warnings.py
Normal file
35
tests/all_warnings/test_warnings.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
"""
|
||||||
|
This test has been put into a module. This is because it tests warnings that
|
||||||
|
only get triggered on first hit. This way we can ensure its imported into the
|
||||||
|
top level and called first by the test suite.
|
||||||
|
"""
|
||||||
|
import unittest
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
|
||||||
|
class TestAllWarnings(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
connect(db="mongoenginetest")
|
||||||
|
self.warning_list = []
|
||||||
|
self.showwarning_default = warnings.showwarning
|
||||||
|
warnings.showwarning = self.append_to_warning_list
|
||||||
|
|
||||||
|
def append_to_warning_list(self, message, category, *args):
|
||||||
|
self.warning_list.append({"message": message, "category": category})
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
# restore default handling of warnings
|
||||||
|
warnings.showwarning = self.showwarning_default
|
||||||
|
|
||||||
|
def test_document_collection_syntax_warning(self):
|
||||||
|
class NonAbstractBase(Document):
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
class InheritedDocumentFailTest(NonAbstractBase):
|
||||||
|
meta = {"collection": "fail"}
|
||||||
|
|
||||||
|
warning = self.warning_list[0]
|
||||||
|
assert SyntaxWarning == warning["category"]
|
||||||
|
assert "non_abstract_base" == InheritedDocumentFailTest._get_collection_name()
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
import sys
|
|
||||||
sys.path[0:0] = [""]
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
from class_methods import *
|
|
||||||
from delta import *
|
|
||||||
from dynamic import *
|
|
||||||
from indexes import *
|
|
||||||
from inheritance import *
|
|
||||||
from instance import *
|
|
||||||
from json_serialisation import *
|
|
||||||
from validation import *
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
|
|||||||
@@ -1,352 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
import sys
|
|
||||||
sys.path[0:0] = [""]
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
|
|
||||||
from mongoengine.queryset import NULLIFY, PULL
|
|
||||||
from mongoengine.connection import get_db
|
|
||||||
|
|
||||||
__all__ = ("ClassMethodsTest", )
|
|
||||||
|
|
||||||
|
|
||||||
class ClassMethodsTest(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
self.db = get_db()
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
age = IntField()
|
|
||||||
|
|
||||||
non_field = True
|
|
||||||
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
self.Person = Person
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
for collection in self.db.collection_names():
|
|
||||||
if 'system.' in collection:
|
|
||||||
continue
|
|
||||||
self.db.drop_collection(collection)
|
|
||||||
|
|
||||||
def test_definition(self):
|
|
||||||
"""Ensure that document may be defined using fields.
|
|
||||||
"""
|
|
||||||
self.assertEqual(['age', 'id', 'name'],
|
|
||||||
sorted(self.Person._fields.keys()))
|
|
||||||
self.assertEqual(["IntField", "ObjectIdField", "StringField"],
|
|
||||||
sorted([x.__class__.__name__ for x in
|
|
||||||
self.Person._fields.values()]))
|
|
||||||
|
|
||||||
def test_get_db(self):
|
|
||||||
"""Ensure that get_db returns the expected db.
|
|
||||||
"""
|
|
||||||
db = self.Person._get_db()
|
|
||||||
self.assertEqual(self.db, db)
|
|
||||||
|
|
||||||
def test_get_collection_name(self):
|
|
||||||
"""Ensure that get_collection_name returns the expected collection
|
|
||||||
name.
|
|
||||||
"""
|
|
||||||
collection_name = 'person'
|
|
||||||
self.assertEqual(collection_name, self.Person._get_collection_name())
|
|
||||||
|
|
||||||
def test_get_collection(self):
|
|
||||||
"""Ensure that get_collection returns the expected collection.
|
|
||||||
"""
|
|
||||||
collection_name = 'person'
|
|
||||||
collection = self.Person._get_collection()
|
|
||||||
self.assertEqual(self.db[collection_name], collection)
|
|
||||||
|
|
||||||
def test_drop_collection(self):
|
|
||||||
"""Ensure that the collection may be dropped from the database.
|
|
||||||
"""
|
|
||||||
collection_name = 'person'
|
|
||||||
self.Person(name='Test').save()
|
|
||||||
self.assertTrue(collection_name in self.db.collection_names())
|
|
||||||
|
|
||||||
self.Person.drop_collection()
|
|
||||||
self.assertFalse(collection_name in self.db.collection_names())
|
|
||||||
|
|
||||||
def test_register_delete_rule(self):
|
|
||||||
"""Ensure that register delete rule adds a delete rule to the document
|
|
||||||
meta.
|
|
||||||
"""
|
|
||||||
class Job(Document):
|
|
||||||
employee = ReferenceField(self.Person)
|
|
||||||
|
|
||||||
self.assertEqual(self.Person._meta.get('delete_rules'), None)
|
|
||||||
|
|
||||||
self.Person.register_delete_rule(Job, 'employee', NULLIFY)
|
|
||||||
self.assertEqual(self.Person._meta['delete_rules'],
|
|
||||||
{(Job, 'employee'): NULLIFY})
|
|
||||||
|
|
||||||
def test_compare_indexes(self):
|
|
||||||
""" Ensure that the indexes are properly created and that
|
|
||||||
compare_indexes identifies the missing/extra indexes
|
|
||||||
"""
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
author = StringField()
|
|
||||||
title = StringField()
|
|
||||||
description = StringField()
|
|
||||||
tags = StringField()
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'indexes': [('author', 'title')]
|
|
||||||
}
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
BlogPost.ensure_indexes()
|
|
||||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
|
||||||
|
|
||||||
BlogPost.ensure_index(['author', 'description'])
|
|
||||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('author', 1), ('description', 1)]] })
|
|
||||||
|
|
||||||
BlogPost._get_collection().drop_index('author_1_description_1')
|
|
||||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
|
||||||
|
|
||||||
BlogPost._get_collection().drop_index('author_1_title_1')
|
|
||||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('author', 1), ('title', 1)]], 'extra': [] })
|
|
||||||
|
|
||||||
def test_compare_indexes_inheritance(self):
|
|
||||||
""" Ensure that the indexes are properly created and that
|
|
||||||
compare_indexes identifies the missing/extra indexes for subclassed
|
|
||||||
documents (_cls included)
|
|
||||||
"""
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
author = StringField()
|
|
||||||
title = StringField()
|
|
||||||
description = StringField()
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'allow_inheritance': True
|
|
||||||
}
|
|
||||||
|
|
||||||
class BlogPostWithTags(BlogPost):
|
|
||||||
tags = StringField()
|
|
||||||
tag_list = ListField(StringField())
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'indexes': [('author', 'tags')]
|
|
||||||
}
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
BlogPost.ensure_indexes()
|
|
||||||
BlogPostWithTags.ensure_indexes()
|
|
||||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
|
||||||
|
|
||||||
BlogPostWithTags.ensure_index(['author', 'tag_list'])
|
|
||||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]] })
|
|
||||||
|
|
||||||
BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1')
|
|
||||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
|
||||||
|
|
||||||
BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1')
|
|
||||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': [] })
|
|
||||||
|
|
||||||
def test_compare_indexes_multiple_subclasses(self):
|
|
||||||
""" Ensure that compare_indexes behaves correctly if called from a
|
|
||||||
class, which base class has multiple subclasses
|
|
||||||
"""
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
author = StringField()
|
|
||||||
title = StringField()
|
|
||||||
description = StringField()
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'allow_inheritance': True
|
|
||||||
}
|
|
||||||
|
|
||||||
class BlogPostWithTags(BlogPost):
|
|
||||||
tags = StringField()
|
|
||||||
tag_list = ListField(StringField())
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'indexes': [('author', 'tags')]
|
|
||||||
}
|
|
||||||
|
|
||||||
class BlogPostWithCustomField(BlogPost):
|
|
||||||
custom = DictField()
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'indexes': [('author', 'custom')]
|
|
||||||
}
|
|
||||||
|
|
||||||
BlogPost.ensure_indexes()
|
|
||||||
BlogPostWithTags.ensure_indexes()
|
|
||||||
BlogPostWithCustomField.ensure_indexes()
|
|
||||||
|
|
||||||
self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] })
|
|
||||||
self.assertEqual(BlogPostWithTags.compare_indexes(), { 'missing': [], 'extra': [] })
|
|
||||||
self.assertEqual(BlogPostWithCustomField.compare_indexes(), { 'missing': [], 'extra': [] })
|
|
||||||
|
|
||||||
def test_list_indexes_inheritance(self):
|
|
||||||
""" ensure that all of the indexes are listed regardless of the super-
|
|
||||||
or sub-class that we call it from
|
|
||||||
"""
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
author = StringField()
|
|
||||||
title = StringField()
|
|
||||||
description = StringField()
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'allow_inheritance': True
|
|
||||||
}
|
|
||||||
|
|
||||||
class BlogPostWithTags(BlogPost):
|
|
||||||
tags = StringField()
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'indexes': [('author', 'tags')]
|
|
||||||
}
|
|
||||||
|
|
||||||
class BlogPostWithTagsAndExtraText(BlogPostWithTags):
|
|
||||||
extra_text = StringField()
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'indexes': [('author', 'tags', 'extra_text')]
|
|
||||||
}
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
BlogPost.ensure_indexes()
|
|
||||||
BlogPostWithTags.ensure_indexes()
|
|
||||||
BlogPostWithTagsAndExtraText.ensure_indexes()
|
|
||||||
|
|
||||||
self.assertEqual(BlogPost.list_indexes(),
|
|
||||||
BlogPostWithTags.list_indexes())
|
|
||||||
self.assertEqual(BlogPost.list_indexes(),
|
|
||||||
BlogPostWithTagsAndExtraText.list_indexes())
|
|
||||||
self.assertEqual(BlogPost.list_indexes(),
|
|
||||||
[[('_cls', 1), ('author', 1), ('tags', 1)],
|
|
||||||
[('_cls', 1), ('author', 1), ('tags', 1), ('extra_text', 1)],
|
|
||||||
[(u'_id', 1)], [('_cls', 1)]])
|
|
||||||
|
|
||||||
def test_register_delete_rule_inherited(self):
|
|
||||||
|
|
||||||
class Vaccine(Document):
|
|
||||||
name = StringField(required=True)
|
|
||||||
|
|
||||||
meta = {"indexes": ["name"]}
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
family = StringField(required=True)
|
|
||||||
vaccine_made = ListField(ReferenceField("Vaccine", reverse_delete_rule=PULL))
|
|
||||||
|
|
||||||
meta = {"allow_inheritance": True, "indexes": ["family"]}
|
|
||||||
|
|
||||||
class Cat(Animal):
|
|
||||||
name = StringField(required=True)
|
|
||||||
|
|
||||||
self.assertEqual(Vaccine._meta['delete_rules'][(Animal, 'vaccine_made')], PULL)
|
|
||||||
self.assertEqual(Vaccine._meta['delete_rules'][(Cat, 'vaccine_made')], PULL)
|
|
||||||
|
|
||||||
def test_collection_naming(self):
|
|
||||||
"""Ensure that a collection with a specified name may be used.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class DefaultNamingTest(Document):
|
|
||||||
pass
|
|
||||||
self.assertEqual('default_naming_test',
|
|
||||||
DefaultNamingTest._get_collection_name())
|
|
||||||
|
|
||||||
class CustomNamingTest(Document):
|
|
||||||
meta = {'collection': 'pimp_my_collection'}
|
|
||||||
|
|
||||||
self.assertEqual('pimp_my_collection',
|
|
||||||
CustomNamingTest._get_collection_name())
|
|
||||||
|
|
||||||
class DynamicNamingTest(Document):
|
|
||||||
meta = {'collection': lambda c: "DYNAMO"}
|
|
||||||
self.assertEqual('DYNAMO', DynamicNamingTest._get_collection_name())
|
|
||||||
|
|
||||||
# Use Abstract class to handle backwards compatibility
|
|
||||||
class BaseDocument(Document):
|
|
||||||
meta = {
|
|
||||||
'abstract': True,
|
|
||||||
'collection': lambda c: c.__name__.lower()
|
|
||||||
}
|
|
||||||
|
|
||||||
class OldNamingConvention(BaseDocument):
|
|
||||||
pass
|
|
||||||
self.assertEqual('oldnamingconvention',
|
|
||||||
OldNamingConvention._get_collection_name())
|
|
||||||
|
|
||||||
class InheritedAbstractNamingTest(BaseDocument):
|
|
||||||
meta = {'collection': 'wibble'}
|
|
||||||
self.assertEqual('wibble',
|
|
||||||
InheritedAbstractNamingTest._get_collection_name())
|
|
||||||
|
|
||||||
# Mixin tests
|
|
||||||
class BaseMixin(object):
|
|
||||||
meta = {
|
|
||||||
'collection': lambda c: c.__name__.lower()
|
|
||||||
}
|
|
||||||
|
|
||||||
class OldMixinNamingConvention(Document, BaseMixin):
|
|
||||||
pass
|
|
||||||
self.assertEqual('oldmixinnamingconvention',
|
|
||||||
OldMixinNamingConvention._get_collection_name())
|
|
||||||
|
|
||||||
class BaseMixin(object):
|
|
||||||
meta = {
|
|
||||||
'collection': lambda c: c.__name__.lower()
|
|
||||||
}
|
|
||||||
|
|
||||||
class BaseDocument(Document, BaseMixin):
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
class MyDocument(BaseDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
self.assertEqual('basedocument', MyDocument._get_collection_name())
|
|
||||||
|
|
||||||
def test_custom_collection_name_operations(self):
|
|
||||||
"""Ensure that a collection with a specified name is used as expected.
|
|
||||||
"""
|
|
||||||
collection_name = 'personCollTest'
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
meta = {'collection': collection_name}
|
|
||||||
|
|
||||||
Person(name="Test User").save()
|
|
||||||
self.assertTrue(collection_name in self.db.collection_names())
|
|
||||||
|
|
||||||
user_obj = self.db[collection_name].find_one()
|
|
||||||
self.assertEqual(user_obj['name'], "Test User")
|
|
||||||
|
|
||||||
user_obj = Person.objects[0]
|
|
||||||
self.assertEqual(user_obj.name, "Test User")
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
self.assertFalse(collection_name in self.db.collection_names())
|
|
||||||
|
|
||||||
def test_collection_name_and_primary(self):
|
|
||||||
"""Ensure that a collection with a specified name may be used.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField(primary_key=True)
|
|
||||||
meta = {'collection': 'app'}
|
|
||||||
|
|
||||||
Person(name="Test User").save()
|
|
||||||
|
|
||||||
user_obj = Person.objects.first()
|
|
||||||
self.assertEqual(user_obj.name, "Test User")
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
@@ -1,739 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
import sys
|
|
||||||
sys.path[0:0] = [""]
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
from bson import SON
|
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine.connection import get_db
|
|
||||||
|
|
||||||
__all__ = ("DeltaTest",)
|
|
||||||
|
|
||||||
|
|
||||||
class DeltaTest(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
self.db = get_db()
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
age = IntField()
|
|
||||||
|
|
||||||
non_field = True
|
|
||||||
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
self.Person = Person
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
for collection in self.db.collection_names():
|
|
||||||
if 'system.' in collection:
|
|
||||||
continue
|
|
||||||
self.db.drop_collection(collection)
|
|
||||||
|
|
||||||
def test_delta(self):
|
|
||||||
self.delta(Document)
|
|
||||||
self.delta(DynamicDocument)
|
|
||||||
|
|
||||||
def delta(self, DocClass):
|
|
||||||
|
|
||||||
class Doc(DocClass):
|
|
||||||
string_field = StringField()
|
|
||||||
int_field = IntField()
|
|
||||||
dict_field = DictField()
|
|
||||||
list_field = ListField()
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEqual(doc._get_changed_fields(), [])
|
|
||||||
self.assertEqual(doc._delta(), ({}, {}))
|
|
||||||
|
|
||||||
doc.string_field = 'hello'
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['string_field'])
|
|
||||||
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.int_field = 1
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['int_field'])
|
|
||||||
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
dict_value = {'hello': 'world', 'ping': 'pong'}
|
|
||||||
doc.dict_field = dict_value
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
|
||||||
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
list_value = ['1', 2, {'hello': 'world'}]
|
|
||||||
doc.list_field = list_value
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
|
||||||
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
|
|
||||||
|
|
||||||
# Test unsetting
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.dict_field = {}
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
|
||||||
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.list_field = []
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
|
||||||
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
|
|
||||||
|
|
||||||
def test_delta_recursive(self):
|
|
||||||
self.delta_recursive(Document, EmbeddedDocument)
|
|
||||||
self.delta_recursive(DynamicDocument, EmbeddedDocument)
|
|
||||||
self.delta_recursive(Document, DynamicEmbeddedDocument)
|
|
||||||
self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument)
|
|
||||||
|
|
||||||
def delta_recursive(self, DocClass, EmbeddedClass):
|
|
||||||
|
|
||||||
class Embedded(EmbeddedClass):
|
|
||||||
string_field = StringField()
|
|
||||||
int_field = IntField()
|
|
||||||
dict_field = DictField()
|
|
||||||
list_field = ListField()
|
|
||||||
|
|
||||||
class Doc(DocClass):
|
|
||||||
string_field = StringField()
|
|
||||||
int_field = IntField()
|
|
||||||
dict_field = DictField()
|
|
||||||
list_field = ListField()
|
|
||||||
embedded_field = EmbeddedDocumentField(Embedded)
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEqual(doc._get_changed_fields(), [])
|
|
||||||
self.assertEqual(doc._delta(), ({}, {}))
|
|
||||||
|
|
||||||
embedded_1 = Embedded()
|
|
||||||
embedded_1.string_field = 'hello'
|
|
||||||
embedded_1.int_field = 1
|
|
||||||
embedded_1.dict_field = {'hello': 'world'}
|
|
||||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
doc.embedded_field = embedded_1
|
|
||||||
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['embedded_field'])
|
|
||||||
|
|
||||||
embedded_delta = {
|
|
||||||
'string_field': 'hello',
|
|
||||||
'int_field': 1,
|
|
||||||
'dict_field': {'hello': 'world'},
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}]
|
|
||||||
}
|
|
||||||
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
|
|
||||||
self.assertEqual(doc._delta(),
|
|
||||||
({'embedded_field': embedded_delta}, {}))
|
|
||||||
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
doc.embedded_field.dict_field = {}
|
|
||||||
self.assertEqual(doc._get_changed_fields(),
|
|
||||||
['embedded_field.dict_field'])
|
|
||||||
self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
|
|
||||||
self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
self.assertEqual(doc.embedded_field.dict_field, {})
|
|
||||||
|
|
||||||
doc.embedded_field.list_field = []
|
|
||||||
self.assertEqual(doc._get_changed_fields(),
|
|
||||||
['embedded_field.list_field'])
|
|
||||||
self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1}))
|
|
||||||
self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1}))
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
self.assertEqual(doc.embedded_field.list_field, [])
|
|
||||||
|
|
||||||
embedded_2 = Embedded()
|
|
||||||
embedded_2.string_field = 'hello'
|
|
||||||
embedded_2.int_field = 1
|
|
||||||
embedded_2.dict_field = {'hello': 'world'}
|
|
||||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
|
|
||||||
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
|
||||||
self.assertEqual(doc._get_changed_fields(),
|
|
||||||
['embedded_field.list_field'])
|
|
||||||
|
|
||||||
self.assertEqual(doc.embedded_field._delta(), ({
|
|
||||||
'list_field': ['1', 2, {
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'string_field': 'hello',
|
|
||||||
'dict_field': {'hello': 'world'},
|
|
||||||
'int_field': 1,
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
}]
|
|
||||||
}, {}))
|
|
||||||
|
|
||||||
self.assertEqual(doc._delta(), ({
|
|
||||||
'embedded_field.list_field': ['1', 2, {
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'string_field': 'hello',
|
|
||||||
'dict_field': {'hello': 'world'},
|
|
||||||
'int_field': 1,
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
}]
|
|
||||||
}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
|
||||||
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
|
||||||
for k in doc.embedded_field.list_field[2]._fields:
|
|
||||||
self.assertEqual(doc.embedded_field.list_field[2][k],
|
|
||||||
embedded_2[k])
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].string_field = 'world'
|
|
||||||
self.assertEqual(doc._get_changed_fields(),
|
|
||||||
['embedded_field.list_field.2.string_field'])
|
|
||||||
self.assertEqual(doc.embedded_field._delta(),
|
|
||||||
({'list_field.2.string_field': 'world'}, {}))
|
|
||||||
self.assertEqual(doc._delta(),
|
|
||||||
({'embedded_field.list_field.2.string_field': 'world'}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
|
||||||
'world')
|
|
||||||
|
|
||||||
# Test multiple assignments
|
|
||||||
doc.embedded_field.list_field[2].string_field = 'hello world'
|
|
||||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
|
||||||
self.assertEqual(doc._get_changed_fields(),
|
|
||||||
['embedded_field.list_field'])
|
|
||||||
self.assertEqual(doc.embedded_field._delta(), ({
|
|
||||||
'list_field': ['1', 2, {
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'string_field': 'hello world',
|
|
||||||
'int_field': 1,
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
'dict_field': {'hello': 'world'}}]}, {}))
|
|
||||||
self.assertEqual(doc._delta(), ({
|
|
||||||
'embedded_field.list_field': ['1', 2, {
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'string_field': 'hello world',
|
|
||||||
'int_field': 1,
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
'dict_field': {'hello': 'world'}}
|
|
||||||
]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
|
||||||
'hello world')
|
|
||||||
|
|
||||||
# Test list native methods
|
|
||||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
|
||||||
self.assertEqual(doc._delta(),
|
|
||||||
({'embedded_field.list_field.2.list_field':
|
|
||||||
[2, {'hello': 'world'}]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].list_field.append(1)
|
|
||||||
self.assertEqual(doc._delta(),
|
|
||||||
({'embedded_field.list_field.2.list_field':
|
|
||||||
[2, {'hello': 'world'}, 1]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
|
||||||
[2, {'hello': 'world'}, 1])
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
|
||||||
[1, 2, {'hello': 'world'}])
|
|
||||||
|
|
||||||
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
|
||||||
self.assertEqual(doc._delta(),
|
|
||||||
({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
del(doc.embedded_field.list_field[2].list_field)
|
|
||||||
self.assertEqual(doc._delta(),
|
|
||||||
({}, {'embedded_field.list_field.2.list_field': 1}))
|
|
||||||
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
doc.dict_field['Embedded'] = embedded_1
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
doc.dict_field['Embedded'].string_field = 'Hello World'
|
|
||||||
self.assertEqual(doc._get_changed_fields(),
|
|
||||||
['dict_field.Embedded.string_field'])
|
|
||||||
self.assertEqual(doc._delta(),
|
|
||||||
({'dict_field.Embedded.string_field': 'Hello World'}, {}))
|
|
||||||
|
|
||||||
def test_circular_reference_deltas(self):
|
|
||||||
self.circular_reference_deltas(Document, Document)
|
|
||||||
self.circular_reference_deltas(Document, DynamicDocument)
|
|
||||||
self.circular_reference_deltas(DynamicDocument, Document)
|
|
||||||
self.circular_reference_deltas(DynamicDocument, DynamicDocument)
|
|
||||||
|
|
||||||
def circular_reference_deltas(self, DocClass1, DocClass2):
|
|
||||||
|
|
||||||
class Person(DocClass1):
|
|
||||||
name = StringField()
|
|
||||||
owns = ListField(ReferenceField('Organization'))
|
|
||||||
|
|
||||||
class Organization(DocClass2):
|
|
||||||
name = StringField()
|
|
||||||
owner = ReferenceField('Person')
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
Organization.drop_collection()
|
|
||||||
|
|
||||||
person = Person(name="owner").save()
|
|
||||||
organization = Organization(name="company").save()
|
|
||||||
|
|
||||||
person.owns.append(organization)
|
|
||||||
organization.owner = person
|
|
||||||
|
|
||||||
person.save()
|
|
||||||
organization.save()
|
|
||||||
|
|
||||||
p = Person.objects[0].select_related()
|
|
||||||
o = Organization.objects.first()
|
|
||||||
self.assertEqual(p.owns[0], o)
|
|
||||||
self.assertEqual(o.owner, p)
|
|
||||||
|
|
||||||
def test_circular_reference_deltas_2(self):
|
|
||||||
self.circular_reference_deltas_2(Document, Document)
|
|
||||||
self.circular_reference_deltas_2(Document, DynamicDocument)
|
|
||||||
self.circular_reference_deltas_2(DynamicDocument, Document)
|
|
||||||
self.circular_reference_deltas_2(DynamicDocument, DynamicDocument)
|
|
||||||
|
|
||||||
def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True):
|
|
||||||
|
|
||||||
class Person(DocClass1):
|
|
||||||
name = StringField()
|
|
||||||
owns = ListField(ReferenceField('Organization', dbref=dbref))
|
|
||||||
employer = ReferenceField('Organization', dbref=dbref)
|
|
||||||
|
|
||||||
class Organization(DocClass2):
|
|
||||||
name = StringField()
|
|
||||||
owner = ReferenceField('Person', dbref=dbref)
|
|
||||||
employees = ListField(ReferenceField('Person', dbref=dbref))
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
Organization.drop_collection()
|
|
||||||
|
|
||||||
person = Person(name="owner").save()
|
|
||||||
employee = Person(name="employee").save()
|
|
||||||
organization = Organization(name="company").save()
|
|
||||||
|
|
||||||
person.owns.append(organization)
|
|
||||||
organization.owner = person
|
|
||||||
|
|
||||||
organization.employees.append(employee)
|
|
||||||
employee.employer = organization
|
|
||||||
|
|
||||||
person.save()
|
|
||||||
organization.save()
|
|
||||||
employee.save()
|
|
||||||
|
|
||||||
p = Person.objects.get(name="owner")
|
|
||||||
e = Person.objects.get(name="employee")
|
|
||||||
o = Organization.objects.first()
|
|
||||||
|
|
||||||
self.assertEqual(p.owns[0], o)
|
|
||||||
self.assertEqual(o.owner, p)
|
|
||||||
self.assertEqual(e.employer, o)
|
|
||||||
|
|
||||||
return person, organization, employee
|
|
||||||
|
|
||||||
def test_delta_db_field(self):
|
|
||||||
self.delta_db_field(Document)
|
|
||||||
self.delta_db_field(DynamicDocument)
|
|
||||||
|
|
||||||
def delta_db_field(self, DocClass):
|
|
||||||
|
|
||||||
class Doc(DocClass):
|
|
||||||
string_field = StringField(db_field='db_string_field')
|
|
||||||
int_field = IntField(db_field='db_int_field')
|
|
||||||
dict_field = DictField(db_field='db_dict_field')
|
|
||||||
list_field = ListField(db_field='db_list_field')
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEqual(doc._get_changed_fields(), [])
|
|
||||||
self.assertEqual(doc._delta(), ({}, {}))
|
|
||||||
|
|
||||||
doc.string_field = 'hello'
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['db_string_field'])
|
|
||||||
self.assertEqual(doc._delta(), ({'db_string_field': 'hello'}, {}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.int_field = 1
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['db_int_field'])
|
|
||||||
self.assertEqual(doc._delta(), ({'db_int_field': 1}, {}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
dict_value = {'hello': 'world', 'ping': 'pong'}
|
|
||||||
doc.dict_field = dict_value
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['db_dict_field'])
|
|
||||||
self.assertEqual(doc._delta(), ({'db_dict_field': dict_value}, {}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
list_value = ['1', 2, {'hello': 'world'}]
|
|
||||||
doc.list_field = list_value
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['db_list_field'])
|
|
||||||
self.assertEqual(doc._delta(), ({'db_list_field': list_value}, {}))
|
|
||||||
|
|
||||||
# Test unsetting
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.dict_field = {}
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['db_dict_field'])
|
|
||||||
self.assertEqual(doc._delta(), ({}, {'db_dict_field': 1}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.list_field = []
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['db_list_field'])
|
|
||||||
self.assertEqual(doc._delta(), ({}, {'db_list_field': 1}))
|
|
||||||
|
|
||||||
# Test it saves that data
|
|
||||||
doc = Doc()
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc.string_field = 'hello'
|
|
||||||
doc.int_field = 1
|
|
||||||
doc.dict_field = {'hello': 'world'}
|
|
||||||
doc.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
self.assertEqual(doc.string_field, 'hello')
|
|
||||||
self.assertEqual(doc.int_field, 1)
|
|
||||||
self.assertEqual(doc.dict_field, {'hello': 'world'})
|
|
||||||
self.assertEqual(doc.list_field, ['1', 2, {'hello': 'world'}])
|
|
||||||
|
|
||||||
def test_delta_recursive_db_field(self):
|
|
||||||
self.delta_recursive_db_field(Document, EmbeddedDocument)
|
|
||||||
self.delta_recursive_db_field(Document, DynamicEmbeddedDocument)
|
|
||||||
self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument)
|
|
||||||
self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument)
|
|
||||||
|
|
||||||
def delta_recursive_db_field(self, DocClass, EmbeddedClass):
|
|
||||||
|
|
||||||
class Embedded(EmbeddedClass):
|
|
||||||
string_field = StringField(db_field='db_string_field')
|
|
||||||
int_field = IntField(db_field='db_int_field')
|
|
||||||
dict_field = DictField(db_field='db_dict_field')
|
|
||||||
list_field = ListField(db_field='db_list_field')
|
|
||||||
|
|
||||||
class Doc(DocClass):
|
|
||||||
string_field = StringField(db_field='db_string_field')
|
|
||||||
int_field = IntField(db_field='db_int_field')
|
|
||||||
dict_field = DictField(db_field='db_dict_field')
|
|
||||||
list_field = ListField(db_field='db_list_field')
|
|
||||||
embedded_field = EmbeddedDocumentField(Embedded,
|
|
||||||
db_field='db_embedded_field')
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEqual(doc._get_changed_fields(), [])
|
|
||||||
self.assertEqual(doc._delta(), ({}, {}))
|
|
||||||
|
|
||||||
embedded_1 = Embedded()
|
|
||||||
embedded_1.string_field = 'hello'
|
|
||||||
embedded_1.int_field = 1
|
|
||||||
embedded_1.dict_field = {'hello': 'world'}
|
|
||||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
doc.embedded_field = embedded_1
|
|
||||||
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['db_embedded_field'])
|
|
||||||
|
|
||||||
embedded_delta = {
|
|
||||||
'db_string_field': 'hello',
|
|
||||||
'db_int_field': 1,
|
|
||||||
'db_dict_field': {'hello': 'world'},
|
|
||||||
'db_list_field': ['1', 2, {'hello': 'world'}]
|
|
||||||
}
|
|
||||||
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
|
|
||||||
self.assertEqual(doc._delta(),
|
|
||||||
({'db_embedded_field': embedded_delta}, {}))
|
|
||||||
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
doc.embedded_field.dict_field = {}
|
|
||||||
self.assertEqual(doc._get_changed_fields(),
|
|
||||||
['db_embedded_field.db_dict_field'])
|
|
||||||
self.assertEqual(doc.embedded_field._delta(),
|
|
||||||
({}, {'db_dict_field': 1}))
|
|
||||||
self.assertEqual(doc._delta(),
|
|
||||||
({}, {'db_embedded_field.db_dict_field': 1}))
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
self.assertEqual(doc.embedded_field.dict_field, {})
|
|
||||||
|
|
||||||
doc.embedded_field.list_field = []
|
|
||||||
self.assertEqual(doc._get_changed_fields(),
|
|
||||||
['db_embedded_field.db_list_field'])
|
|
||||||
self.assertEqual(doc.embedded_field._delta(),
|
|
||||||
({}, {'db_list_field': 1}))
|
|
||||||
self.assertEqual(doc._delta(),
|
|
||||||
({}, {'db_embedded_field.db_list_field': 1}))
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
self.assertEqual(doc.embedded_field.list_field, [])
|
|
||||||
|
|
||||||
embedded_2 = Embedded()
|
|
||||||
embedded_2.string_field = 'hello'
|
|
||||||
embedded_2.int_field = 1
|
|
||||||
embedded_2.dict_field = {'hello': 'world'}
|
|
||||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
|
|
||||||
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
|
||||||
self.assertEqual(doc._get_changed_fields(),
|
|
||||||
['db_embedded_field.db_list_field'])
|
|
||||||
self.assertEqual(doc.embedded_field._delta(), ({
|
|
||||||
'db_list_field': ['1', 2, {
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'db_string_field': 'hello',
|
|
||||||
'db_dict_field': {'hello': 'world'},
|
|
||||||
'db_int_field': 1,
|
|
||||||
'db_list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
}]
|
|
||||||
}, {}))
|
|
||||||
|
|
||||||
self.assertEqual(doc._delta(), ({
|
|
||||||
'db_embedded_field.db_list_field': ['1', 2, {
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'db_string_field': 'hello',
|
|
||||||
'db_dict_field': {'hello': 'world'},
|
|
||||||
'db_int_field': 1,
|
|
||||||
'db_list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
}]
|
|
||||||
}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
|
||||||
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
|
||||||
for k in doc.embedded_field.list_field[2]._fields:
|
|
||||||
self.assertEqual(doc.embedded_field.list_field[2][k],
|
|
||||||
embedded_2[k])
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].string_field = 'world'
|
|
||||||
self.assertEqual(doc._get_changed_fields(),
|
|
||||||
['db_embedded_field.db_list_field.2.db_string_field'])
|
|
||||||
self.assertEqual(doc.embedded_field._delta(),
|
|
||||||
({'db_list_field.2.db_string_field': 'world'}, {}))
|
|
||||||
self.assertEqual(doc._delta(),
|
|
||||||
({'db_embedded_field.db_list_field.2.db_string_field': 'world'},
|
|
||||||
{}))
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
|
||||||
'world')
|
|
||||||
|
|
||||||
# Test multiple assignments
|
|
||||||
doc.embedded_field.list_field[2].string_field = 'hello world'
|
|
||||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
|
||||||
self.assertEqual(doc._get_changed_fields(),
|
|
||||||
['db_embedded_field.db_list_field'])
|
|
||||||
self.assertEqual(doc.embedded_field._delta(), ({
|
|
||||||
'db_list_field': ['1', 2, {
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'db_string_field': 'hello world',
|
|
||||||
'db_int_field': 1,
|
|
||||||
'db_list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
'db_dict_field': {'hello': 'world'}}]}, {}))
|
|
||||||
self.assertEqual(doc._delta(), ({
|
|
||||||
'db_embedded_field.db_list_field': ['1', 2, {
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'db_string_field': 'hello world',
|
|
||||||
'db_int_field': 1,
|
|
||||||
'db_list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
'db_dict_field': {'hello': 'world'}}
|
|
||||||
]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
|
||||||
'hello world')
|
|
||||||
|
|
||||||
# Test list native methods
|
|
||||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
|
||||||
self.assertEqual(doc._delta(),
|
|
||||||
({'db_embedded_field.db_list_field.2.db_list_field':
|
|
||||||
[2, {'hello': 'world'}]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].list_field.append(1)
|
|
||||||
self.assertEqual(doc._delta(),
|
|
||||||
({'db_embedded_field.db_list_field.2.db_list_field':
|
|
||||||
[2, {'hello': 'world'}, 1]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
|
||||||
[2, {'hello': 'world'}, 1])
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
|
||||||
[1, 2, {'hello': 'world'}])
|
|
||||||
|
|
||||||
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
|
||||||
self.assertEqual(doc._delta(),
|
|
||||||
({'db_embedded_field.db_list_field.2.db_list_field':
|
|
||||||
[1, 2, {}]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc = doc.reload(10)
|
|
||||||
|
|
||||||
del(doc.embedded_field.list_field[2].list_field)
|
|
||||||
self.assertEqual(doc._delta(), ({},
|
|
||||||
{'db_embedded_field.db_list_field.2.db_list_field': 1}))
|
|
||||||
|
|
||||||
def test_delta_for_dynamic_documents(self):
|
|
||||||
class Person(DynamicDocument):
|
|
||||||
name = StringField()
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
p = Person(name="James", age=34)
|
|
||||||
self.assertEqual(p._delta(), (
|
|
||||||
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
|
|
||||||
|
|
||||||
p.doc = 123
|
|
||||||
del(p.doc)
|
|
||||||
self.assertEqual(p._delta(), (
|
|
||||||
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
|
|
||||||
|
|
||||||
p = Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.age = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p.age = 24
|
|
||||||
self.assertEqual(p.age, 24)
|
|
||||||
self.assertEqual(p._get_changed_fields(), ['age'])
|
|
||||||
self.assertEqual(p._delta(), ({'age': 24}, {}))
|
|
||||||
|
|
||||||
p = Person.objects(age=22).get()
|
|
||||||
p.age = 24
|
|
||||||
self.assertEqual(p.age, 24)
|
|
||||||
self.assertEqual(p._get_changed_fields(), ['age'])
|
|
||||||
self.assertEqual(p._delta(), ({'age': 24}, {}))
|
|
||||||
|
|
||||||
p.save()
|
|
||||||
self.assertEqual(1, Person.objects(age=24).count())
|
|
||||||
|
|
||||||
def test_dynamic_delta(self):
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEqual(doc._get_changed_fields(), [])
|
|
||||||
self.assertEqual(doc._delta(), ({}, {}))
|
|
||||||
|
|
||||||
doc.string_field = 'hello'
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['string_field'])
|
|
||||||
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.int_field = 1
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['int_field'])
|
|
||||||
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
dict_value = {'hello': 'world', 'ping': 'pong'}
|
|
||||||
doc.dict_field = dict_value
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
|
||||||
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
list_value = ['1', 2, {'hello': 'world'}]
|
|
||||||
doc.list_field = list_value
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
|
||||||
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
|
|
||||||
|
|
||||||
# Test unsetting
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.dict_field = {}
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
|
||||||
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.list_field = []
|
|
||||||
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
|
||||||
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
|
|
||||||
|
|
||||||
def test_delta_with_dbref_true(self):
|
|
||||||
person, organization, employee = self.circular_reference_deltas_2(Document, Document, True)
|
|
||||||
employee.name = 'test'
|
|
||||||
|
|
||||||
self.assertEqual(organization._get_changed_fields(), [])
|
|
||||||
|
|
||||||
updates, removals = organization._delta()
|
|
||||||
self.assertEqual({}, removals)
|
|
||||||
self.assertEqual({}, updates)
|
|
||||||
|
|
||||||
organization.employees.append(person)
|
|
||||||
updates, removals = organization._delta()
|
|
||||||
self.assertEqual({}, removals)
|
|
||||||
self.assertTrue('employees' in updates)
|
|
||||||
|
|
||||||
def test_delta_with_dbref_false(self):
|
|
||||||
person, organization, employee = self.circular_reference_deltas_2(Document, Document, False)
|
|
||||||
employee.name = 'test'
|
|
||||||
|
|
||||||
self.assertEqual(organization._get_changed_fields(), [])
|
|
||||||
|
|
||||||
updates, removals = organization._delta()
|
|
||||||
self.assertEqual({}, removals)
|
|
||||||
self.assertEqual({}, updates)
|
|
||||||
|
|
||||||
organization.employees.append(person)
|
|
||||||
updates, removals = organization._delta()
|
|
||||||
self.assertEqual({}, removals)
|
|
||||||
self.assertTrue('employees' in updates)
|
|
||||||
|
|
||||||
def test_nested_nested_fields_mark_as_changed(self):
|
|
||||||
class EmbeddedDoc(EmbeddedDocument):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class MyDoc(Document):
|
|
||||||
subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc)))
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
MyDoc.drop_collection()
|
|
||||||
|
|
||||||
mydoc = MyDoc(name='testcase1', subs={'a': {'b': EmbeddedDoc(name='foo')}}).save()
|
|
||||||
|
|
||||||
mydoc = MyDoc.objects.first()
|
|
||||||
subdoc = mydoc.subs['a']['b']
|
|
||||||
subdoc.name = 'bar'
|
|
||||||
|
|
||||||
self.assertEqual(["name"], subdoc._get_changed_fields())
|
|
||||||
self.assertEqual(["subs.a.b.name"], mydoc._get_changed_fields())
|
|
||||||
|
|
||||||
mydoc._clear_changed_fields()
|
|
||||||
self.assertEqual([], mydoc._get_changed_fields())
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
@@ -1,297 +0,0 @@
|
|||||||
import unittest
|
|
||||||
import sys
|
|
||||||
sys.path[0:0] = [""]
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine.connection import get_db
|
|
||||||
|
|
||||||
__all__ = ("DynamicTest", )
|
|
||||||
|
|
||||||
|
|
||||||
class DynamicTest(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
self.db = get_db()
|
|
||||||
|
|
||||||
class Person(DynamicDocument):
|
|
||||||
name = StringField()
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
self.Person = Person
|
|
||||||
|
|
||||||
def test_simple_dynamic_document(self):
|
|
||||||
"""Ensures simple dynamic documents are saved correctly"""
|
|
||||||
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "James"
|
|
||||||
p.age = 34
|
|
||||||
|
|
||||||
self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James",
|
|
||||||
"age": 34})
|
|
||||||
self.assertEqual(p.to_mongo().keys(), ["_cls", "name", "age"])
|
|
||||||
p.save()
|
|
||||||
self.assertEqual(p.to_mongo().keys(), ["_id", "_cls", "name", "age"])
|
|
||||||
|
|
||||||
self.assertEqual(self.Person.objects.first().age, 34)
|
|
||||||
|
|
||||||
# Confirm no changes to self.Person
|
|
||||||
self.assertFalse(hasattr(self.Person, 'age'))
|
|
||||||
|
|
||||||
def test_change_scope_of_variable(self):
|
|
||||||
"""Test changing the scope of a dynamic field has no adverse effects"""
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.misc = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
p.misc = {'hello': 'world'}
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
self.assertEqual(p.misc, {'hello': 'world'})
|
|
||||||
|
|
||||||
def test_delete_dynamic_field(self):
|
|
||||||
"""Test deleting a dynamic field works"""
|
|
||||||
self.Person.drop_collection()
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.misc = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
p.misc = {'hello': 'world'}
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
self.assertEqual(p.misc, {'hello': 'world'})
|
|
||||||
collection = self.db[self.Person._get_collection_name()]
|
|
||||||
obj = collection.find_one()
|
|
||||||
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name'])
|
|
||||||
|
|
||||||
del(p.misc)
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
self.assertFalse(hasattr(p, 'misc'))
|
|
||||||
|
|
||||||
obj = collection.find_one()
|
|
||||||
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name'])
|
|
||||||
|
|
||||||
def test_dynamic_document_queries(self):
|
|
||||||
"""Ensure we can query dynamic fields"""
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.age = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
self.assertEqual(1, self.Person.objects(age=22).count())
|
|
||||||
p = self.Person.objects(age=22)
|
|
||||||
p = p.get()
|
|
||||||
self.assertEqual(22, p.age)
|
|
||||||
|
|
||||||
def test_complex_dynamic_document_queries(self):
|
|
||||||
class Person(DynamicDocument):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
p = Person(name="test")
|
|
||||||
p.age = "ten"
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p1 = Person(name="test1")
|
|
||||||
p1.age = "less then ten and a half"
|
|
||||||
p1.save()
|
|
||||||
|
|
||||||
p2 = Person(name="test2")
|
|
||||||
p2.age = 10
|
|
||||||
p2.save()
|
|
||||||
|
|
||||||
self.assertEqual(Person.objects(age__icontains='ten').count(), 2)
|
|
||||||
self.assertEqual(Person.objects(age__gte=10).count(), 1)
|
|
||||||
|
|
||||||
def test_complex_data_lookups(self):
|
|
||||||
"""Ensure you can query dynamic document dynamic fields"""
|
|
||||||
p = self.Person()
|
|
||||||
p.misc = {'hello': 'world'}
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
self.assertEqual(1, self.Person.objects(misc__hello='world').count())
|
|
||||||
|
|
||||||
def test_complex_embedded_document_validation(self):
|
|
||||||
"""Ensure embedded dynamic documents may be validated"""
|
|
||||||
class Embedded(DynamicEmbeddedDocument):
|
|
||||||
content = URLField()
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
|
|
||||||
embedded_doc_1 = Embedded(content='http://mongoengine.org')
|
|
||||||
embedded_doc_1.validate()
|
|
||||||
|
|
||||||
embedded_doc_2 = Embedded(content='this is not a url')
|
|
||||||
self.assertRaises(ValidationError, embedded_doc_2.validate)
|
|
||||||
|
|
||||||
doc.embedded_field_1 = embedded_doc_1
|
|
||||||
doc.embedded_field_2 = embedded_doc_2
|
|
||||||
self.assertRaises(ValidationError, doc.validate)
|
|
||||||
|
|
||||||
def test_inheritance(self):
|
|
||||||
"""Ensure that dynamic document plays nice with inheritance"""
|
|
||||||
class Employee(self.Person):
|
|
||||||
salary = IntField()
|
|
||||||
|
|
||||||
Employee.drop_collection()
|
|
||||||
|
|
||||||
self.assertTrue('name' in Employee._fields)
|
|
||||||
self.assertTrue('salary' in Employee._fields)
|
|
||||||
self.assertEqual(Employee._get_collection_name(),
|
|
||||||
self.Person._get_collection_name())
|
|
||||||
|
|
||||||
joe_bloggs = Employee()
|
|
||||||
joe_bloggs.name = "Joe Bloggs"
|
|
||||||
joe_bloggs.salary = 10
|
|
||||||
joe_bloggs.age = 20
|
|
||||||
joe_bloggs.save()
|
|
||||||
|
|
||||||
self.assertEqual(1, self.Person.objects(age=20).count())
|
|
||||||
self.assertEqual(1, Employee.objects(age=20).count())
|
|
||||||
|
|
||||||
joe_bloggs = self.Person.objects.first()
|
|
||||||
self.assertTrue(isinstance(joe_bloggs, Employee))
|
|
||||||
|
|
||||||
def test_embedded_dynamic_document(self):
|
|
||||||
"""Test dynamic embedded documents"""
|
|
||||||
class Embedded(DynamicEmbeddedDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
|
|
||||||
embedded_1 = Embedded()
|
|
||||||
embedded_1.string_field = 'hello'
|
|
||||||
embedded_1.int_field = 1
|
|
||||||
embedded_1.dict_field = {'hello': 'world'}
|
|
||||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
doc.embedded_field = embedded_1
|
|
||||||
|
|
||||||
self.assertEqual(doc.to_mongo(), {
|
|
||||||
"embedded_field": {
|
|
||||||
"_cls": "Embedded",
|
|
||||||
"string_field": "hello",
|
|
||||||
"int_field": 1,
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
"list_field": ['1', 2, {'hello': 'world'}]
|
|
||||||
}
|
|
||||||
})
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
|
||||||
self.assertEqual(doc.embedded_field.string_field, "hello")
|
|
||||||
self.assertEqual(doc.embedded_field.int_field, 1)
|
|
||||||
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
|
|
||||||
self.assertEqual(doc.embedded_field.list_field,
|
|
||||||
['1', 2, {'hello': 'world'}])
|
|
||||||
|
|
||||||
def test_complex_embedded_documents(self):
|
|
||||||
"""Test complex dynamic embedded documents setups"""
|
|
||||||
class Embedded(DynamicEmbeddedDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
|
|
||||||
embedded_1 = Embedded()
|
|
||||||
embedded_1.string_field = 'hello'
|
|
||||||
embedded_1.int_field = 1
|
|
||||||
embedded_1.dict_field = {'hello': 'world'}
|
|
||||||
|
|
||||||
embedded_2 = Embedded()
|
|
||||||
embedded_2.string_field = 'hello'
|
|
||||||
embedded_2.int_field = 1
|
|
||||||
embedded_2.dict_field = {'hello': 'world'}
|
|
||||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
|
|
||||||
embedded_1.list_field = ['1', 2, embedded_2]
|
|
||||||
doc.embedded_field = embedded_1
|
|
||||||
|
|
||||||
self.assertEqual(doc.to_mongo(), {
|
|
||||||
"embedded_field": {
|
|
||||||
"_cls": "Embedded",
|
|
||||||
"string_field": "hello",
|
|
||||||
"int_field": 1,
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
"list_field": ['1', 2,
|
|
||||||
{"_cls": "Embedded",
|
|
||||||
"string_field": "hello",
|
|
||||||
"int_field": 1,
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
"list_field": ['1', 2, {'hello': 'world'}]}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
})
|
|
||||||
doc.save()
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
|
||||||
self.assertEqual(doc.embedded_field.string_field, "hello")
|
|
||||||
self.assertEqual(doc.embedded_field.int_field, 1)
|
|
||||||
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
|
|
||||||
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
|
||||||
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
|
||||||
|
|
||||||
embedded_field = doc.embedded_field.list_field[2]
|
|
||||||
|
|
||||||
self.assertEqual(embedded_field.__class__, Embedded)
|
|
||||||
self.assertEqual(embedded_field.string_field, "hello")
|
|
||||||
self.assertEqual(embedded_field.int_field, 1)
|
|
||||||
self.assertEqual(embedded_field.dict_field, {'hello': 'world'})
|
|
||||||
self.assertEqual(embedded_field.list_field, ['1', 2,
|
|
||||||
{'hello': 'world'}])
|
|
||||||
|
|
||||||
def test_dynamic_and_embedded(self):
|
|
||||||
"""Ensure embedded documents play nicely"""
|
|
||||||
|
|
||||||
class Address(EmbeddedDocument):
|
|
||||||
city = StringField()
|
|
||||||
|
|
||||||
class Person(DynamicDocument):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
Person(name="Ross", address=Address(city="London")).save()
|
|
||||||
|
|
||||||
person = Person.objects.first()
|
|
||||||
person.address.city = "Lundenne"
|
|
||||||
person.save()
|
|
||||||
|
|
||||||
self.assertEqual(Person.objects.first().address.city, "Lundenne")
|
|
||||||
|
|
||||||
person = Person.objects.first()
|
|
||||||
person.address = Address(city="Londinium")
|
|
||||||
person.save()
|
|
||||||
|
|
||||||
self.assertEqual(Person.objects.first().address.city, "Londinium")
|
|
||||||
|
|
||||||
person = Person.objects.first()
|
|
||||||
person.age = 35
|
|
||||||
person.save()
|
|
||||||
self.assertEqual(Person.objects.first().age, 35)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
@@ -1,731 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
import unittest
|
|
||||||
import sys
|
|
||||||
sys.path[0:0] = [""]
|
|
||||||
|
|
||||||
import os
|
|
||||||
import pymongo
|
|
||||||
|
|
||||||
from nose.plugins.skip import SkipTest
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine.connection import get_db, get_connection
|
|
||||||
|
|
||||||
__all__ = ("IndexesTest", )
|
|
||||||
|
|
||||||
|
|
||||||
class IndexesTest(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
self.db = get_db()
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
age = IntField()
|
|
||||||
|
|
||||||
non_field = True
|
|
||||||
|
|
||||||
meta = {"allow_inheritance": True}
|
|
||||||
|
|
||||||
self.Person = Person
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
for collection in self.db.collection_names():
|
|
||||||
if 'system.' in collection:
|
|
||||||
continue
|
|
||||||
self.db.drop_collection(collection)
|
|
||||||
|
|
||||||
def test_indexes_document(self):
|
|
||||||
"""Ensure that indexes are used when meta[indexes] is specified for
|
|
||||||
Documents
|
|
||||||
"""
|
|
||||||
self._index_test(Document)
|
|
||||||
|
|
||||||
def test_indexes_dynamic_document(self):
|
|
||||||
"""Ensure that indexes are used when meta[indexes] is specified for
|
|
||||||
Dynamic Documents
|
|
||||||
"""
|
|
||||||
self._index_test(DynamicDocument)
|
|
||||||
|
|
||||||
def _index_test(self, InheritFrom):
|
|
||||||
|
|
||||||
class BlogPost(InheritFrom):
|
|
||||||
date = DateTimeField(db_field='addDate', default=datetime.now)
|
|
||||||
category = StringField()
|
|
||||||
tags = ListField(StringField())
|
|
||||||
meta = {
|
|
||||||
'indexes': [
|
|
||||||
'-date',
|
|
||||||
'tags',
|
|
||||||
('category', '-date')
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
expected_specs = [{'fields': [('addDate', -1)]},
|
|
||||||
{'fields': [('tags', 1)]},
|
|
||||||
{'fields': [('category', 1), ('addDate', -1)]}]
|
|
||||||
self.assertEqual(expected_specs, BlogPost._meta['index_specs'])
|
|
||||||
|
|
||||||
BlogPost.ensure_indexes()
|
|
||||||
info = BlogPost.objects._collection.index_information()
|
|
||||||
# _id, '-date', 'tags', ('cat', 'date')
|
|
||||||
self.assertEqual(len(info), 4)
|
|
||||||
info = [value['key'] for key, value in info.iteritems()]
|
|
||||||
for expected in expected_specs:
|
|
||||||
self.assertTrue(expected['fields'] in info)
|
|
||||||
|
|
||||||
def _index_test_inheritance(self, InheritFrom):
|
|
||||||
|
|
||||||
class BlogPost(InheritFrom):
|
|
||||||
date = DateTimeField(db_field='addDate', default=datetime.now)
|
|
||||||
category = StringField()
|
|
||||||
tags = ListField(StringField())
|
|
||||||
meta = {
|
|
||||||
'indexes': [
|
|
||||||
'-date',
|
|
||||||
'tags',
|
|
||||||
('category', '-date')
|
|
||||||
],
|
|
||||||
'allow_inheritance': True
|
|
||||||
}
|
|
||||||
|
|
||||||
expected_specs = [{'fields': [('_cls', 1), ('addDate', -1)]},
|
|
||||||
{'fields': [('_cls', 1), ('tags', 1)]},
|
|
||||||
{'fields': [('_cls', 1), ('category', 1),
|
|
||||||
('addDate', -1)]}]
|
|
||||||
self.assertEqual(expected_specs, BlogPost._meta['index_specs'])
|
|
||||||
|
|
||||||
BlogPost.ensure_indexes()
|
|
||||||
info = BlogPost.objects._collection.index_information()
|
|
||||||
# _id, '-date', 'tags', ('cat', 'date')
|
|
||||||
# NB: there is no index on _cls by itself, since
|
|
||||||
# the indices on -date and tags will both contain
|
|
||||||
# _cls as first element in the key
|
|
||||||
self.assertEqual(len(info), 4)
|
|
||||||
info = [value['key'] for key, value in info.iteritems()]
|
|
||||||
for expected in expected_specs:
|
|
||||||
self.assertTrue(expected['fields'] in info)
|
|
||||||
|
|
||||||
class ExtendedBlogPost(BlogPost):
|
|
||||||
title = StringField()
|
|
||||||
meta = {'indexes': ['title']}
|
|
||||||
|
|
||||||
expected_specs.append({'fields': [('_cls', 1), ('title', 1)]})
|
|
||||||
self.assertEqual(expected_specs, ExtendedBlogPost._meta['index_specs'])
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
ExtendedBlogPost.ensure_indexes()
|
|
||||||
info = ExtendedBlogPost.objects._collection.index_information()
|
|
||||||
info = [value['key'] for key, value in info.iteritems()]
|
|
||||||
for expected in expected_specs:
|
|
||||||
self.assertTrue(expected['fields'] in info)
|
|
||||||
|
|
||||||
def test_indexes_document_inheritance(self):
|
|
||||||
"""Ensure that indexes are used when meta[indexes] is specified for
|
|
||||||
Documents
|
|
||||||
"""
|
|
||||||
self._index_test_inheritance(Document)
|
|
||||||
|
|
||||||
def test_indexes_dynamic_document_inheritance(self):
|
|
||||||
"""Ensure that indexes are used when meta[indexes] is specified for
|
|
||||||
Dynamic Documents
|
|
||||||
"""
|
|
||||||
self._index_test_inheritance(DynamicDocument)
|
|
||||||
|
|
||||||
def test_inherited_index(self):
|
|
||||||
"""Ensure index specs are inhertited correctly"""
|
|
||||||
|
|
||||||
class A(Document):
|
|
||||||
title = StringField()
|
|
||||||
meta = {
|
|
||||||
'indexes': [
|
|
||||||
{
|
|
||||||
'fields': ('title',),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
'allow_inheritance': True,
|
|
||||||
}
|
|
||||||
|
|
||||||
class B(A):
|
|
||||||
description = StringField()
|
|
||||||
|
|
||||||
self.assertEqual(A._meta['index_specs'], B._meta['index_specs'])
|
|
||||||
self.assertEqual([{'fields': [('_cls', 1), ('title', 1)]}],
|
|
||||||
A._meta['index_specs'])
|
|
||||||
|
|
||||||
def test_index_no_cls(self):
|
|
||||||
"""Ensure index specs are inhertited correctly"""
|
|
||||||
|
|
||||||
class A(Document):
|
|
||||||
title = StringField()
|
|
||||||
meta = {
|
|
||||||
'indexes': [
|
|
||||||
{'fields': ('title',), 'cls': False},
|
|
||||||
],
|
|
||||||
'allow_inheritance': True,
|
|
||||||
'index_cls': False
|
|
||||||
}
|
|
||||||
|
|
||||||
self.assertEqual([('title', 1)], A._meta['index_specs'][0]['fields'])
|
|
||||||
A._get_collection().drop_indexes()
|
|
||||||
A.ensure_indexes()
|
|
||||||
info = A._get_collection().index_information()
|
|
||||||
self.assertEqual(len(info.keys()), 2)
|
|
||||||
|
|
||||||
def test_build_index_spec_is_not_destructive(self):
|
|
||||||
|
|
||||||
class MyDoc(Document):
|
|
||||||
keywords = StringField()
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'indexes': ['keywords'],
|
|
||||||
'allow_inheritance': False
|
|
||||||
}
|
|
||||||
|
|
||||||
self.assertEqual(MyDoc._meta['index_specs'],
|
|
||||||
[{'fields': [('keywords', 1)]}])
|
|
||||||
|
|
||||||
# Force index creation
|
|
||||||
MyDoc.ensure_indexes()
|
|
||||||
|
|
||||||
self.assertEqual(MyDoc._meta['index_specs'],
|
|
||||||
[{'fields': [('keywords', 1)]}])
|
|
||||||
|
|
||||||
def test_embedded_document_index_meta(self):
|
|
||||||
"""Ensure that embedded document indexes are created explicitly
|
|
||||||
"""
|
|
||||||
class Rank(EmbeddedDocument):
|
|
||||||
title = StringField(required=True)
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField(required=True)
|
|
||||||
rank = EmbeddedDocumentField(Rank, required=False)
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'indexes': [
|
|
||||||
'rank.title',
|
|
||||||
],
|
|
||||||
'allow_inheritance': False
|
|
||||||
}
|
|
||||||
|
|
||||||
self.assertEqual([{'fields': [('rank.title', 1)]}],
|
|
||||||
Person._meta['index_specs'])
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
# Indexes are lazy so use list() to perform query
|
|
||||||
list(Person.objects)
|
|
||||||
info = Person.objects._collection.index_information()
|
|
||||||
info = [value['key'] for key, value in info.iteritems()]
|
|
||||||
self.assertTrue([('rank.title', 1)] in info)
|
|
||||||
|
|
||||||
def test_explicit_geo2d_index(self):
|
|
||||||
"""Ensure that geo2d indexes work when created via meta[indexes]
|
|
||||||
"""
|
|
||||||
class Place(Document):
|
|
||||||
location = DictField()
|
|
||||||
meta = {
|
|
||||||
'allow_inheritance': True,
|
|
||||||
'indexes': [
|
|
||||||
'*location.point',
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
self.assertEqual([{'fields': [('location.point', '2d')]}],
|
|
||||||
Place._meta['index_specs'])
|
|
||||||
|
|
||||||
Place.ensure_indexes()
|
|
||||||
info = Place._get_collection().index_information()
|
|
||||||
info = [value['key'] for key, value in info.iteritems()]
|
|
||||||
self.assertTrue([('location.point', '2d')] in info)
|
|
||||||
|
|
||||||
def test_explicit_geo2d_index_embedded(self):
|
|
||||||
"""Ensure that geo2d indexes work when created via meta[indexes]
|
|
||||||
"""
|
|
||||||
class EmbeddedLocation(EmbeddedDocument):
|
|
||||||
location = DictField()
|
|
||||||
|
|
||||||
class Place(Document):
|
|
||||||
current = DictField(field=EmbeddedDocumentField('EmbeddedLocation'))
|
|
||||||
meta = {
|
|
||||||
'allow_inheritance': True,
|
|
||||||
'indexes': [
|
|
||||||
'*current.location.point',
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
self.assertEqual([{'fields': [('current.location.point', '2d')]}],
|
|
||||||
Place._meta['index_specs'])
|
|
||||||
|
|
||||||
Place.ensure_indexes()
|
|
||||||
info = Place._get_collection().index_information()
|
|
||||||
info = [value['key'] for key, value in info.iteritems()]
|
|
||||||
self.assertTrue([('current.location.point', '2d')] in info)
|
|
||||||
|
|
||||||
def test_dictionary_indexes(self):
|
|
||||||
"""Ensure that indexes are used when meta[indexes] contains
|
|
||||||
dictionaries instead of lists.
|
|
||||||
"""
|
|
||||||
class BlogPost(Document):
|
|
||||||
date = DateTimeField(db_field='addDate', default=datetime.now)
|
|
||||||
category = StringField()
|
|
||||||
tags = ListField(StringField())
|
|
||||||
meta = {
|
|
||||||
'indexes': [
|
|
||||||
{'fields': ['-date'], 'unique': True, 'sparse': True},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
self.assertEqual([{'fields': [('addDate', -1)], 'unique': True,
|
|
||||||
'sparse': True}],
|
|
||||||
BlogPost._meta['index_specs'])
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
info = BlogPost.objects._collection.index_information()
|
|
||||||
# _id, '-date'
|
|
||||||
self.assertEqual(len(info), 2)
|
|
||||||
|
|
||||||
# Indexes are lazy so use list() to perform query
|
|
||||||
list(BlogPost.objects)
|
|
||||||
info = BlogPost.objects._collection.index_information()
|
|
||||||
info = [(value['key'],
|
|
||||||
value.get('unique', False),
|
|
||||||
value.get('sparse', False))
|
|
||||||
for key, value in info.iteritems()]
|
|
||||||
self.assertTrue(([('addDate', -1)], True, True) in info)
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
def test_abstract_index_inheritance(self):
|
|
||||||
|
|
||||||
class UserBase(Document):
|
|
||||||
user_guid = StringField(required=True)
|
|
||||||
meta = {
|
|
||||||
'abstract': True,
|
|
||||||
'indexes': ['user_guid'],
|
|
||||||
'allow_inheritance': True
|
|
||||||
}
|
|
||||||
|
|
||||||
class Person(UserBase):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'indexes': ['name'],
|
|
||||||
}
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
Person(name="test", user_guid='123').save()
|
|
||||||
|
|
||||||
self.assertEqual(1, Person.objects.count())
|
|
||||||
info = Person.objects._collection.index_information()
|
|
||||||
self.assertEqual(sorted(info.keys()),
|
|
||||||
['_cls_1_name_1', '_cls_1_user_guid_1', '_id_'])
|
|
||||||
|
|
||||||
def test_disable_index_creation(self):
|
|
||||||
"""Tests setting auto_create_index to False on the connection will
|
|
||||||
disable any index generation.
|
|
||||||
"""
|
|
||||||
class User(Document):
|
|
||||||
meta = {
|
|
||||||
'allow_inheritance': True,
|
|
||||||
'indexes': ['user_guid'],
|
|
||||||
'auto_create_index': False
|
|
||||||
}
|
|
||||||
user_guid = StringField(required=True)
|
|
||||||
|
|
||||||
class MongoUser(User):
|
|
||||||
pass
|
|
||||||
|
|
||||||
User.drop_collection()
|
|
||||||
|
|
||||||
User(user_guid='123').save()
|
|
||||||
MongoUser(user_guid='123').save()
|
|
||||||
|
|
||||||
self.assertEqual(2, User.objects.count())
|
|
||||||
info = User.objects._collection.index_information()
|
|
||||||
self.assertEqual(info.keys(), ['_id_'])
|
|
||||||
|
|
||||||
User.ensure_indexes()
|
|
||||||
info = User.objects._collection.index_information()
|
|
||||||
self.assertEqual(sorted(info.keys()), ['_cls_1_user_guid_1', '_id_'])
|
|
||||||
User.drop_collection()
|
|
||||||
|
|
||||||
def test_embedded_document_index(self):
|
|
||||||
"""Tests settings an index on an embedded document
|
|
||||||
"""
|
|
||||||
class Date(EmbeddedDocument):
|
|
||||||
year = IntField(db_field='yr')
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
title = StringField()
|
|
||||||
date = EmbeddedDocumentField(Date)
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'indexes': [
|
|
||||||
'-date.year'
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
info = BlogPost.objects._collection.index_information()
|
|
||||||
self.assertEqual(sorted(info.keys()), ['_id_', 'date.yr_-1'])
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
def test_list_embedded_document_index(self):
|
|
||||||
"""Ensure list embedded documents can be indexed
|
|
||||||
"""
|
|
||||||
class Tag(EmbeddedDocument):
|
|
||||||
name = StringField(db_field='tag')
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
title = StringField()
|
|
||||||
tags = ListField(EmbeddedDocumentField(Tag))
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'indexes': [
|
|
||||||
'tags.name'
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
info = BlogPost.objects._collection.index_information()
|
|
||||||
# we don't use _cls in with list fields by default
|
|
||||||
self.assertEqual(sorted(info.keys()), ['_id_', 'tags.tag_1'])
|
|
||||||
|
|
||||||
post1 = BlogPost(title="Embedded Indexes tests in place",
|
|
||||||
tags=[Tag(name="about"), Tag(name="time")])
|
|
||||||
post1.save()
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
def test_recursive_embedded_objects_dont_break_indexes(self):
|
|
||||||
|
|
||||||
class RecursiveObject(EmbeddedDocument):
|
|
||||||
obj = EmbeddedDocumentField('self')
|
|
||||||
|
|
||||||
class RecursiveDocument(Document):
|
|
||||||
recursive_obj = EmbeddedDocumentField(RecursiveObject)
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
RecursiveDocument.ensure_indexes()
|
|
||||||
info = RecursiveDocument._get_collection().index_information()
|
|
||||||
self.assertEqual(sorted(info.keys()), ['_cls_1', '_id_'])
|
|
||||||
|
|
||||||
def test_covered_index(self):
|
|
||||||
"""Ensure that covered indexes can be used
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Test(Document):
|
|
||||||
a = IntField()
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'indexes': ['a'],
|
|
||||||
'allow_inheritance': False
|
|
||||||
}
|
|
||||||
|
|
||||||
Test.drop_collection()
|
|
||||||
|
|
||||||
obj = Test(a=1)
|
|
||||||
obj.save()
|
|
||||||
|
|
||||||
# Need to be explicit about covered indexes as mongoDB doesn't know if
|
|
||||||
# the documents returned might have more keys in that here.
|
|
||||||
query_plan = Test.objects(id=obj.id).exclude('a').explain()
|
|
||||||
self.assertFalse(query_plan['indexOnly'])
|
|
||||||
|
|
||||||
query_plan = Test.objects(id=obj.id).only('id').explain()
|
|
||||||
self.assertTrue(query_plan['indexOnly'])
|
|
||||||
|
|
||||||
query_plan = Test.objects(a=1).only('a').exclude('id').explain()
|
|
||||||
self.assertTrue(query_plan['indexOnly'])
|
|
||||||
|
|
||||||
def test_index_on_id(self):
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
meta = {
|
|
||||||
'indexes': [
|
|
||||||
['categories', 'id']
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
title = StringField(required=True)
|
|
||||||
description = StringField(required=True)
|
|
||||||
categories = ListField()
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
indexes = BlogPost.objects._collection.index_information()
|
|
||||||
self.assertEqual(indexes['categories_1__id_1']['key'],
|
|
||||||
[('categories', 1), ('_id', 1)])
|
|
||||||
|
|
||||||
def test_hint(self):
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
tags = ListField(StringField())
|
|
||||||
meta = {
|
|
||||||
'indexes': [
|
|
||||||
'tags',
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
for i in xrange(0, 10):
|
|
||||||
tags = [("tag %i" % n) for n in xrange(0, i % 2)]
|
|
||||||
BlogPost(tags=tags).save()
|
|
||||||
|
|
||||||
self.assertEqual(BlogPost.objects.count(), 10)
|
|
||||||
self.assertEqual(BlogPost.objects.hint().count(), 10)
|
|
||||||
self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10)
|
|
||||||
|
|
||||||
self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).count(), 10)
|
|
||||||
|
|
||||||
def invalid_index():
|
|
||||||
BlogPost.objects.hint('tags')
|
|
||||||
self.assertRaises(TypeError, invalid_index)
|
|
||||||
|
|
||||||
def invalid_index_2():
|
|
||||||
return BlogPost.objects.hint(('tags', 1))
|
|
||||||
self.assertRaises(TypeError, invalid_index_2)
|
|
||||||
|
|
||||||
def test_unique(self):
|
|
||||||
"""Ensure that uniqueness constraints are applied to fields.
|
|
||||||
"""
|
|
||||||
class BlogPost(Document):
|
|
||||||
title = StringField()
|
|
||||||
slug = StringField(unique=True)
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
post1 = BlogPost(title='test1', slug='test')
|
|
||||||
post1.save()
|
|
||||||
|
|
||||||
# Two posts with the same slug is not allowed
|
|
||||||
post2 = BlogPost(title='test2', slug='test')
|
|
||||||
self.assertRaises(NotUniqueError, post2.save)
|
|
||||||
|
|
||||||
# Ensure backwards compatibilty for errors
|
|
||||||
self.assertRaises(OperationError, post2.save)
|
|
||||||
|
|
||||||
def test_unique_with(self):
|
|
||||||
"""Ensure that unique_with constraints are applied to fields.
|
|
||||||
"""
|
|
||||||
class Date(EmbeddedDocument):
|
|
||||||
year = IntField(db_field='yr')
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
title = StringField()
|
|
||||||
date = EmbeddedDocumentField(Date)
|
|
||||||
slug = StringField(unique_with='date.year')
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
post1 = BlogPost(title='test1', date=Date(year=2009), slug='test')
|
|
||||||
post1.save()
|
|
||||||
|
|
||||||
# day is different so won't raise exception
|
|
||||||
post2 = BlogPost(title='test2', date=Date(year=2010), slug='test')
|
|
||||||
post2.save()
|
|
||||||
|
|
||||||
# Now there will be two docs with the same slug and the same day: fail
|
|
||||||
post3 = BlogPost(title='test3', date=Date(year=2010), slug='test')
|
|
||||||
self.assertRaises(OperationError, post3.save)
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
def test_unique_embedded_document(self):
|
|
||||||
"""Ensure that uniqueness constraints are applied to fields on embedded documents.
|
|
||||||
"""
|
|
||||||
class SubDocument(EmbeddedDocument):
|
|
||||||
year = IntField(db_field='yr')
|
|
||||||
slug = StringField(unique=True)
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
title = StringField()
|
|
||||||
sub = EmbeddedDocumentField(SubDocument)
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
post1 = BlogPost(title='test1',
|
|
||||||
sub=SubDocument(year=2009, slug="test"))
|
|
||||||
post1.save()
|
|
||||||
|
|
||||||
# sub.slug is different so won't raise exception
|
|
||||||
post2 = BlogPost(title='test2',
|
|
||||||
sub=SubDocument(year=2010, slug='another-slug'))
|
|
||||||
post2.save()
|
|
||||||
|
|
||||||
# Now there will be two docs with the same sub.slug
|
|
||||||
post3 = BlogPost(title='test3',
|
|
||||||
sub=SubDocument(year=2010, slug='test'))
|
|
||||||
self.assertRaises(NotUniqueError, post3.save)
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
def test_unique_with_embedded_document_and_embedded_unique(self):
|
|
||||||
"""Ensure that uniqueness constraints are applied to fields on
|
|
||||||
embedded documents. And work with unique_with as well.
|
|
||||||
"""
|
|
||||||
class SubDocument(EmbeddedDocument):
|
|
||||||
year = IntField(db_field='yr')
|
|
||||||
slug = StringField(unique=True)
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
title = StringField(unique_with='sub.year')
|
|
||||||
sub = EmbeddedDocumentField(SubDocument)
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
post1 = BlogPost(title='test1',
|
|
||||||
sub=SubDocument(year=2009, slug="test"))
|
|
||||||
post1.save()
|
|
||||||
|
|
||||||
# sub.slug is different so won't raise exception
|
|
||||||
post2 = BlogPost(title='test2',
|
|
||||||
sub=SubDocument(year=2010, slug='another-slug'))
|
|
||||||
post2.save()
|
|
||||||
|
|
||||||
# Now there will be two docs with the same sub.slug
|
|
||||||
post3 = BlogPost(title='test3',
|
|
||||||
sub=SubDocument(year=2010, slug='test'))
|
|
||||||
self.assertRaises(NotUniqueError, post3.save)
|
|
||||||
|
|
||||||
# Now there will be two docs with the same title and year
|
|
||||||
post3 = BlogPost(title='test1',
|
|
||||||
sub=SubDocument(year=2009, slug='test-1'))
|
|
||||||
self.assertRaises(NotUniqueError, post3.save)
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
def test_ttl_indexes(self):
|
|
||||||
|
|
||||||
class Log(Document):
|
|
||||||
created = DateTimeField(default=datetime.now)
|
|
||||||
meta = {
|
|
||||||
'indexes': [
|
|
||||||
{'fields': ['created'], 'expireAfterSeconds': 3600}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
Log.drop_collection()
|
|
||||||
|
|
||||||
if pymongo.version_tuple[0] < 2 and pymongo.version_tuple[1] < 3:
|
|
||||||
raise SkipTest('pymongo needs to be 2.3 or higher for this test')
|
|
||||||
|
|
||||||
connection = get_connection()
|
|
||||||
version_array = connection.server_info()['versionArray']
|
|
||||||
if version_array[0] < 2 and version_array[1] < 2:
|
|
||||||
raise SkipTest('MongoDB needs to be 2.2 or higher for this test')
|
|
||||||
|
|
||||||
# Indexes are lazy so use list() to perform query
|
|
||||||
list(Log.objects)
|
|
||||||
info = Log.objects._collection.index_information()
|
|
||||||
self.assertEqual(3600,
|
|
||||||
info['created_1']['expireAfterSeconds'])
|
|
||||||
|
|
||||||
def test_unique_and_indexes(self):
|
|
||||||
"""Ensure that 'unique' constraints aren't overridden by
|
|
||||||
meta.indexes.
|
|
||||||
"""
|
|
||||||
class Customer(Document):
|
|
||||||
cust_id = IntField(unique=True, required=True)
|
|
||||||
meta = {
|
|
||||||
'indexes': ['cust_id'],
|
|
||||||
'allow_inheritance': False,
|
|
||||||
}
|
|
||||||
|
|
||||||
Customer.drop_collection()
|
|
||||||
cust = Customer(cust_id=1)
|
|
||||||
cust.save()
|
|
||||||
|
|
||||||
cust_dupe = Customer(cust_id=1)
|
|
||||||
try:
|
|
||||||
cust_dupe.save()
|
|
||||||
raise AssertionError("We saved a dupe!")
|
|
||||||
except NotUniqueError:
|
|
||||||
pass
|
|
||||||
Customer.drop_collection()
|
|
||||||
|
|
||||||
def test_unique_and_primary(self):
|
|
||||||
"""If you set a field as primary, then unexpected behaviour can occur.
|
|
||||||
You won't create a duplicate but you will update an existing document.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
name = StringField(primary_key=True, unique=True)
|
|
||||||
password = StringField()
|
|
||||||
|
|
||||||
User.drop_collection()
|
|
||||||
|
|
||||||
user = User(name='huangz', password='secret')
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
user = User(name='huangz', password='secret2')
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
self.assertEqual(User.objects.count(), 1)
|
|
||||||
self.assertEqual(User.objects.get().password, 'secret2')
|
|
||||||
|
|
||||||
User.drop_collection()
|
|
||||||
|
|
||||||
def test_index_with_pk(self):
|
|
||||||
"""Ensure you can use `pk` as part of a query"""
|
|
||||||
|
|
||||||
class Comment(EmbeddedDocument):
|
|
||||||
comment_id = IntField(required=True)
|
|
||||||
|
|
||||||
try:
|
|
||||||
class BlogPost(Document):
|
|
||||||
comments = EmbeddedDocumentField(Comment)
|
|
||||||
meta = {'indexes': [
|
|
||||||
{'fields': ['pk', 'comments.comment_id'],
|
|
||||||
'unique': True}]}
|
|
||||||
except UnboundLocalError:
|
|
||||||
self.fail('Unbound local error at index + pk definition')
|
|
||||||
|
|
||||||
info = BlogPost.objects._collection.index_information()
|
|
||||||
info = [value['key'] for key, value in info.iteritems()]
|
|
||||||
index_item = [('_id', 1), ('comments.comment_id', 1)]
|
|
||||||
self.assertTrue(index_item in info)
|
|
||||||
|
|
||||||
def test_compound_key_embedded(self):
|
|
||||||
|
|
||||||
class CompoundKey(EmbeddedDocument):
|
|
||||||
name = StringField(required=True)
|
|
||||||
term = StringField(required=True)
|
|
||||||
|
|
||||||
class Report(Document):
|
|
||||||
key = EmbeddedDocumentField(CompoundKey, primary_key=True)
|
|
||||||
text = StringField()
|
|
||||||
|
|
||||||
Report.drop_collection()
|
|
||||||
|
|
||||||
my_key = CompoundKey(name="n", term="ok")
|
|
||||||
report = Report(text="OK", key=my_key).save()
|
|
||||||
|
|
||||||
self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}},
|
|
||||||
report.to_mongo())
|
|
||||||
self.assertEqual(report, Report.objects.get(pk=my_key))
|
|
||||||
|
|
||||||
def test_compound_key_dictfield(self):
|
|
||||||
|
|
||||||
class Report(Document):
|
|
||||||
key = DictField(primary_key=True)
|
|
||||||
text = StringField()
|
|
||||||
|
|
||||||
Report.drop_collection()
|
|
||||||
|
|
||||||
my_key = {"name": "n", "term": "ok"}
|
|
||||||
report = Report(text="OK", key=my_key).save()
|
|
||||||
|
|
||||||
self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}},
|
|
||||||
report.to_mongo())
|
|
||||||
self.assertEqual(report, Report.objects.get(pk=my_key))
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
@@ -1,449 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
import sys
|
|
||||||
sys.path[0:0] = [""]
|
|
||||||
import unittest
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from tests.fixtures import Base
|
|
||||||
|
|
||||||
from mongoengine import Document, EmbeddedDocument, connect
|
|
||||||
from mongoengine.connection import get_db
|
|
||||||
from mongoengine.fields import (BooleanField, GenericReferenceField,
|
|
||||||
IntField, StringField)
|
|
||||||
|
|
||||||
__all__ = ('InheritanceTest', )
|
|
||||||
|
|
||||||
|
|
||||||
class InheritanceTest(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
self.db = get_db()
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
for collection in self.db.collection_names():
|
|
||||||
if 'system.' in collection:
|
|
||||||
continue
|
|
||||||
self.db.drop_collection(collection)
|
|
||||||
|
|
||||||
def test_superclasses(self):
|
|
||||||
"""Ensure that the correct list of superclasses is assembled.
|
|
||||||
"""
|
|
||||||
class Animal(Document):
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
class Fish(Animal): pass
|
|
||||||
class Guppy(Fish): pass
|
|
||||||
class Mammal(Animal): pass
|
|
||||||
class Dog(Mammal): pass
|
|
||||||
class Human(Mammal): pass
|
|
||||||
|
|
||||||
self.assertEqual(Animal._superclasses, ())
|
|
||||||
self.assertEqual(Fish._superclasses, ('Animal',))
|
|
||||||
self.assertEqual(Guppy._superclasses, ('Animal', 'Animal.Fish'))
|
|
||||||
self.assertEqual(Mammal._superclasses, ('Animal',))
|
|
||||||
self.assertEqual(Dog._superclasses, ('Animal', 'Animal.Mammal'))
|
|
||||||
self.assertEqual(Human._superclasses, ('Animal', 'Animal.Mammal'))
|
|
||||||
|
|
||||||
def test_external_superclasses(self):
|
|
||||||
"""Ensure that the correct list of super classes is assembled when
|
|
||||||
importing part of the model.
|
|
||||||
"""
|
|
||||||
class Animal(Base): pass
|
|
||||||
class Fish(Animal): pass
|
|
||||||
class Guppy(Fish): pass
|
|
||||||
class Mammal(Animal): pass
|
|
||||||
class Dog(Mammal): pass
|
|
||||||
class Human(Mammal): pass
|
|
||||||
|
|
||||||
self.assertEqual(Animal._superclasses, ('Base', ))
|
|
||||||
self.assertEqual(Fish._superclasses, ('Base', 'Base.Animal',))
|
|
||||||
self.assertEqual(Guppy._superclasses, ('Base', 'Base.Animal',
|
|
||||||
'Base.Animal.Fish'))
|
|
||||||
self.assertEqual(Mammal._superclasses, ('Base', 'Base.Animal',))
|
|
||||||
self.assertEqual(Dog._superclasses, ('Base', 'Base.Animal',
|
|
||||||
'Base.Animal.Mammal'))
|
|
||||||
self.assertEqual(Human._superclasses, ('Base', 'Base.Animal',
|
|
||||||
'Base.Animal.Mammal'))
|
|
||||||
|
|
||||||
def test_subclasses(self):
|
|
||||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
|
||||||
assembled.
|
|
||||||
"""
|
|
||||||
class Animal(Document):
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
class Fish(Animal): pass
|
|
||||||
class Guppy(Fish): pass
|
|
||||||
class Mammal(Animal): pass
|
|
||||||
class Dog(Mammal): pass
|
|
||||||
class Human(Mammal): pass
|
|
||||||
|
|
||||||
self.assertEqual(Animal._subclasses, ('Animal',
|
|
||||||
'Animal.Fish',
|
|
||||||
'Animal.Fish.Guppy',
|
|
||||||
'Animal.Mammal',
|
|
||||||
'Animal.Mammal.Dog',
|
|
||||||
'Animal.Mammal.Human'))
|
|
||||||
self.assertEqual(Fish._subclasses, ('Animal.Fish',
|
|
||||||
'Animal.Fish.Guppy',))
|
|
||||||
self.assertEqual(Guppy._subclasses, ('Animal.Fish.Guppy',))
|
|
||||||
self.assertEqual(Mammal._subclasses, ('Animal.Mammal',
|
|
||||||
'Animal.Mammal.Dog',
|
|
||||||
'Animal.Mammal.Human'))
|
|
||||||
self.assertEqual(Human._subclasses, ('Animal.Mammal.Human',))
|
|
||||||
|
|
||||||
def test_external_subclasses(self):
|
|
||||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
|
||||||
assembled when importing part of the model.
|
|
||||||
"""
|
|
||||||
class Animal(Base): pass
|
|
||||||
class Fish(Animal): pass
|
|
||||||
class Guppy(Fish): pass
|
|
||||||
class Mammal(Animal): pass
|
|
||||||
class Dog(Mammal): pass
|
|
||||||
class Human(Mammal): pass
|
|
||||||
|
|
||||||
self.assertEqual(Animal._subclasses, ('Base.Animal',
|
|
||||||
'Base.Animal.Fish',
|
|
||||||
'Base.Animal.Fish.Guppy',
|
|
||||||
'Base.Animal.Mammal',
|
|
||||||
'Base.Animal.Mammal.Dog',
|
|
||||||
'Base.Animal.Mammal.Human'))
|
|
||||||
self.assertEqual(Fish._subclasses, ('Base.Animal.Fish',
|
|
||||||
'Base.Animal.Fish.Guppy',))
|
|
||||||
self.assertEqual(Guppy._subclasses, ('Base.Animal.Fish.Guppy',))
|
|
||||||
self.assertEqual(Mammal._subclasses, ('Base.Animal.Mammal',
|
|
||||||
'Base.Animal.Mammal.Dog',
|
|
||||||
'Base.Animal.Mammal.Human'))
|
|
||||||
self.assertEqual(Human._subclasses, ('Base.Animal.Mammal.Human',))
|
|
||||||
|
|
||||||
def test_dynamic_declarations(self):
|
|
||||||
"""Test that declaring an extra class updates meta data"""
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
self.assertEqual(Animal._superclasses, ())
|
|
||||||
self.assertEqual(Animal._subclasses, ('Animal',))
|
|
||||||
|
|
||||||
# Test dynamically adding a class changes the meta data
|
|
||||||
class Fish(Animal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
self.assertEqual(Animal._superclasses, ())
|
|
||||||
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish'))
|
|
||||||
|
|
||||||
self.assertEqual(Fish._superclasses, ('Animal', ))
|
|
||||||
self.assertEqual(Fish._subclasses, ('Animal.Fish',))
|
|
||||||
|
|
||||||
# Test dynamically adding an inherited class changes the meta data
|
|
||||||
class Pike(Fish):
|
|
||||||
pass
|
|
||||||
|
|
||||||
self.assertEqual(Animal._superclasses, ())
|
|
||||||
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish',
|
|
||||||
'Animal.Fish.Pike'))
|
|
||||||
|
|
||||||
self.assertEqual(Fish._superclasses, ('Animal', ))
|
|
||||||
self.assertEqual(Fish._subclasses, ('Animal.Fish', 'Animal.Fish.Pike'))
|
|
||||||
|
|
||||||
self.assertEqual(Pike._superclasses, ('Animal', 'Animal.Fish'))
|
|
||||||
self.assertEqual(Pike._subclasses, ('Animal.Fish.Pike',))
|
|
||||||
|
|
||||||
def test_inheritance_meta_data(self):
|
|
||||||
"""Ensure that document may inherit fields from a superclass document.
|
|
||||||
"""
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
age = IntField()
|
|
||||||
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
class Employee(Person):
|
|
||||||
salary = IntField()
|
|
||||||
|
|
||||||
self.assertEqual(['age', 'id', 'name', 'salary'],
|
|
||||||
sorted(Employee._fields.keys()))
|
|
||||||
self.assertEqual(Employee._get_collection_name(),
|
|
||||||
Person._get_collection_name())
|
|
||||||
|
|
||||||
def test_inheritance_to_mongo_keys(self):
|
|
||||||
"""Ensure that document may inherit fields from a superclass document.
|
|
||||||
"""
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
age = IntField()
|
|
||||||
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
class Employee(Person):
|
|
||||||
salary = IntField()
|
|
||||||
|
|
||||||
self.assertEqual(['age', 'id', 'name', 'salary'],
|
|
||||||
sorted(Employee._fields.keys()))
|
|
||||||
self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(),
|
|
||||||
['_cls', 'name', 'age'])
|
|
||||||
self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(),
|
|
||||||
['_cls', 'name', 'age', 'salary'])
|
|
||||||
self.assertEqual(Employee._get_collection_name(),
|
|
||||||
Person._get_collection_name())
|
|
||||||
|
|
||||||
def test_indexes_and_multiple_inheritance(self):
|
|
||||||
""" Ensure that all of the indexes are created for a document with
|
|
||||||
multiple inheritance.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class A(Document):
|
|
||||||
a = StringField()
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'allow_inheritance': True,
|
|
||||||
'indexes': ['a']
|
|
||||||
}
|
|
||||||
|
|
||||||
class B(Document):
|
|
||||||
b = StringField()
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'allow_inheritance': True,
|
|
||||||
'indexes': ['b']
|
|
||||||
}
|
|
||||||
|
|
||||||
class C(A, B):
|
|
||||||
pass
|
|
||||||
|
|
||||||
A.drop_collection()
|
|
||||||
B.drop_collection()
|
|
||||||
C.drop_collection()
|
|
||||||
|
|
||||||
C.ensure_indexes()
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
sorted([idx['key'] for idx in C._get_collection().index_information().values()]),
|
|
||||||
sorted([[(u'_cls', 1), (u'b', 1)], [(u'_id', 1)], [(u'_cls', 1), (u'a', 1)]])
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_polymorphic_queries(self):
|
|
||||||
"""Ensure that the correct subclasses are returned from a query
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
class Fish(Animal): pass
|
|
||||||
class Mammal(Animal): pass
|
|
||||||
class Dog(Mammal): pass
|
|
||||||
class Human(Mammal): pass
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
|
|
||||||
Animal().save()
|
|
||||||
Fish().save()
|
|
||||||
Mammal().save()
|
|
||||||
Dog().save()
|
|
||||||
Human().save()
|
|
||||||
|
|
||||||
classes = [obj.__class__ for obj in Animal.objects]
|
|
||||||
self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human])
|
|
||||||
|
|
||||||
classes = [obj.__class__ for obj in Mammal.objects]
|
|
||||||
self.assertEqual(classes, [Mammal, Dog, Human])
|
|
||||||
|
|
||||||
classes = [obj.__class__ for obj in Human.objects]
|
|
||||||
self.assertEqual(classes, [Human])
|
|
||||||
|
|
||||||
def test_allow_inheritance(self):
|
|
||||||
"""Ensure that inheritance may be disabled on simple classes and that
|
|
||||||
_cls and _subclasses will not be used.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
def create_dog_class():
|
|
||||||
class Dog(Animal):
|
|
||||||
pass
|
|
||||||
|
|
||||||
self.assertRaises(ValueError, create_dog_class)
|
|
||||||
|
|
||||||
# Check that _cls etc aren't present on simple documents
|
|
||||||
dog = Animal(name='dog').save()
|
|
||||||
self.assertEqual(dog.to_mongo().keys(), ['_id', 'name'])
|
|
||||||
|
|
||||||
collection = self.db[Animal._get_collection_name()]
|
|
||||||
obj = collection.find_one()
|
|
||||||
self.assertFalse('_cls' in obj)
|
|
||||||
|
|
||||||
def test_cant_turn_off_inheritance_on_subclass(self):
|
|
||||||
"""Ensure if inheritance is on in a subclass you cant turn it off
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
def create_mammal_class():
|
|
||||||
class Mammal(Animal):
|
|
||||||
meta = {'allow_inheritance': False}
|
|
||||||
self.assertRaises(ValueError, create_mammal_class)
|
|
||||||
|
|
||||||
def test_allow_inheritance_abstract_document(self):
|
|
||||||
"""Ensure that abstract documents can set inheritance rules and that
|
|
||||||
_cls will not be used.
|
|
||||||
"""
|
|
||||||
class FinalDocument(Document):
|
|
||||||
meta = {'abstract': True,
|
|
||||||
'allow_inheritance': False}
|
|
||||||
|
|
||||||
class Animal(FinalDocument):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
def create_mammal_class():
|
|
||||||
class Mammal(Animal):
|
|
||||||
pass
|
|
||||||
self.assertRaises(ValueError, create_mammal_class)
|
|
||||||
|
|
||||||
# Check that _cls isn't present in simple documents
|
|
||||||
doc = Animal(name='dog')
|
|
||||||
self.assertFalse('_cls' in doc.to_mongo())
|
|
||||||
|
|
||||||
def test_allow_inheritance_embedded_document(self):
|
|
||||||
"""Ensure embedded documents respect inheritance
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Comment(EmbeddedDocument):
|
|
||||||
content = StringField()
|
|
||||||
|
|
||||||
def create_special_comment():
|
|
||||||
class SpecialComment(Comment):
|
|
||||||
pass
|
|
||||||
|
|
||||||
self.assertRaises(ValueError, create_special_comment)
|
|
||||||
|
|
||||||
doc = Comment(content='test')
|
|
||||||
self.assertFalse('_cls' in doc.to_mongo())
|
|
||||||
|
|
||||||
class Comment(EmbeddedDocument):
|
|
||||||
content = StringField()
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
doc = Comment(content='test')
|
|
||||||
self.assertTrue('_cls' in doc.to_mongo())
|
|
||||||
|
|
||||||
def test_document_inheritance(self):
|
|
||||||
"""Ensure mutliple inheritance of abstract documents
|
|
||||||
"""
|
|
||||||
class DateCreatedDocument(Document):
|
|
||||||
meta = {
|
|
||||||
'allow_inheritance': True,
|
|
||||||
'abstract': True,
|
|
||||||
}
|
|
||||||
|
|
||||||
class DateUpdatedDocument(Document):
|
|
||||||
meta = {
|
|
||||||
'allow_inheritance': True,
|
|
||||||
'abstract': True,
|
|
||||||
}
|
|
||||||
|
|
||||||
try:
|
|
||||||
class MyDocument(DateCreatedDocument, DateUpdatedDocument):
|
|
||||||
pass
|
|
||||||
except:
|
|
||||||
self.assertTrue(False, "Couldn't create MyDocument class")
|
|
||||||
|
|
||||||
def test_abstract_documents(self):
|
|
||||||
"""Ensure that a document superclass can be marked as abstract
|
|
||||||
thereby not using it as the name for the collection."""
|
|
||||||
|
|
||||||
defaults = {'index_background': True,
|
|
||||||
'index_drop_dups': True,
|
|
||||||
'index_opts': {'hello': 'world'},
|
|
||||||
'allow_inheritance': True,
|
|
||||||
'queryset_class': 'QuerySet',
|
|
||||||
'db_alias': 'myDB',
|
|
||||||
'shard_key': ('hello', 'world')}
|
|
||||||
|
|
||||||
meta_settings = {'abstract': True}
|
|
||||||
meta_settings.update(defaults)
|
|
||||||
|
|
||||||
class Animal(Document):
|
|
||||||
name = StringField()
|
|
||||||
meta = meta_settings
|
|
||||||
|
|
||||||
class Fish(Animal): pass
|
|
||||||
class Guppy(Fish): pass
|
|
||||||
|
|
||||||
class Mammal(Animal):
|
|
||||||
meta = {'abstract': True}
|
|
||||||
class Human(Mammal): pass
|
|
||||||
|
|
||||||
for k, v in defaults.iteritems():
|
|
||||||
for cls in [Animal, Fish, Guppy]:
|
|
||||||
self.assertEqual(cls._meta[k], v)
|
|
||||||
|
|
||||||
self.assertFalse('collection' in Animal._meta)
|
|
||||||
self.assertFalse('collection' in Mammal._meta)
|
|
||||||
|
|
||||||
self.assertEqual(Animal._get_collection_name(), None)
|
|
||||||
self.assertEqual(Mammal._get_collection_name(), None)
|
|
||||||
|
|
||||||
self.assertEqual(Fish._get_collection_name(), 'fish')
|
|
||||||
self.assertEqual(Guppy._get_collection_name(), 'fish')
|
|
||||||
self.assertEqual(Human._get_collection_name(), 'human')
|
|
||||||
|
|
||||||
def create_bad_abstract():
|
|
||||||
class EvilHuman(Human):
|
|
||||||
evil = BooleanField(default=True)
|
|
||||||
meta = {'abstract': True}
|
|
||||||
self.assertRaises(ValueError, create_bad_abstract)
|
|
||||||
|
|
||||||
def test_inherited_collections(self):
|
|
||||||
"""Ensure that subclassed documents don't override parents'
|
|
||||||
collections
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Drink(Document):
|
|
||||||
name = StringField()
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
class Drinker(Document):
|
|
||||||
drink = GenericReferenceField()
|
|
||||||
|
|
||||||
try:
|
|
||||||
warnings.simplefilter("error")
|
|
||||||
|
|
||||||
class AcloholicDrink(Drink):
|
|
||||||
meta = {'collection': 'booze'}
|
|
||||||
|
|
||||||
except SyntaxWarning:
|
|
||||||
warnings.simplefilter("ignore")
|
|
||||||
|
|
||||||
class AlcoholicDrink(Drink):
|
|
||||||
meta = {'collection': 'booze'}
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise AssertionError("SyntaxWarning should be triggered")
|
|
||||||
|
|
||||||
warnings.resetwarnings()
|
|
||||||
|
|
||||||
Drink.drop_collection()
|
|
||||||
AlcoholicDrink.drop_collection()
|
|
||||||
Drinker.drop_collection()
|
|
||||||
|
|
||||||
red_bull = Drink(name='Red Bull')
|
|
||||||
red_bull.save()
|
|
||||||
|
|
||||||
programmer = Drinker(drink=red_bull)
|
|
||||||
programmer.save()
|
|
||||||
|
|
||||||
beer = AlcoholicDrink(name='Beer')
|
|
||||||
beer.save()
|
|
||||||
real_person = Drinker(drink=beer)
|
|
||||||
real_person.save()
|
|
||||||
|
|
||||||
self.assertEqual(Drinker.objects[0].drink.name, red_bull.name)
|
|
||||||
self.assertEqual(Drinker.objects[1].drink.name, beer.name)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,85 +0,0 @@
|
|||||||
import sys
|
|
||||||
sys.path[0:0] = [""]
|
|
||||||
|
|
||||||
import unittest
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
from nose.plugins.skip import SkipTest
|
|
||||||
from datetime import datetime
|
|
||||||
from bson import ObjectId
|
|
||||||
|
|
||||||
import pymongo
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
|
|
||||||
__all__ = ("TestJson",)
|
|
||||||
|
|
||||||
|
|
||||||
class TestJson(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
|
|
||||||
def test_json_simple(self):
|
|
||||||
|
|
||||||
class Embedded(EmbeddedDocument):
|
|
||||||
string = StringField()
|
|
||||||
|
|
||||||
class Doc(Document):
|
|
||||||
string = StringField()
|
|
||||||
embedded_field = EmbeddedDocumentField(Embedded)
|
|
||||||
|
|
||||||
doc = Doc(string="Hi", embedded_field=Embedded(string="Hi"))
|
|
||||||
|
|
||||||
doc_json = doc.to_json(sort_keys=True, separators=(',', ':'))
|
|
||||||
expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}"""
|
|
||||||
self.assertEqual(doc_json, expected_json)
|
|
||||||
|
|
||||||
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
|
||||||
|
|
||||||
def test_json_complex(self):
|
|
||||||
|
|
||||||
if pymongo.version_tuple[0] <= 2 and pymongo.version_tuple[1] <= 3:
|
|
||||||
raise SkipTest("Need pymongo 2.4 as has a fix for DBRefs")
|
|
||||||
|
|
||||||
class EmbeddedDoc(EmbeddedDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Simple(Document):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Doc(Document):
|
|
||||||
string_field = StringField(default='1')
|
|
||||||
int_field = IntField(default=1)
|
|
||||||
float_field = FloatField(default=1.1)
|
|
||||||
boolean_field = BooleanField(default=True)
|
|
||||||
datetime_field = DateTimeField(default=datetime.now)
|
|
||||||
embedded_document_field = EmbeddedDocumentField(EmbeddedDoc,
|
|
||||||
default=lambda: EmbeddedDoc())
|
|
||||||
list_field = ListField(default=lambda: [1, 2, 3])
|
|
||||||
dict_field = DictField(default=lambda: {"hello": "world"})
|
|
||||||
objectid_field = ObjectIdField(default=ObjectId)
|
|
||||||
reference_field = ReferenceField(Simple, default=lambda:
|
|
||||||
Simple().save())
|
|
||||||
map_field = MapField(IntField(), default=lambda: {"simple": 1})
|
|
||||||
decimal_field = DecimalField(default=1.0)
|
|
||||||
complex_datetime_field = ComplexDateTimeField(default=datetime.now)
|
|
||||||
url_field = URLField(default="http://mongoengine.org")
|
|
||||||
dynamic_field = DynamicField(default=1)
|
|
||||||
generic_reference_field = GenericReferenceField(
|
|
||||||
default=lambda: Simple().save())
|
|
||||||
sorted_list_field = SortedListField(IntField(),
|
|
||||||
default=lambda: [1, 2, 3])
|
|
||||||
email_field = EmailField(default="ross@example.com")
|
|
||||||
geo_point_field = GeoPointField(default=lambda: [1, 2])
|
|
||||||
sequence_field = SequenceField()
|
|
||||||
uuid_field = UUIDField(default=uuid.uuid4)
|
|
||||||
generic_embedded_document_field = GenericEmbeddedDocumentField(
|
|
||||||
default=lambda: EmbeddedDoc())
|
|
||||||
|
|
||||||
doc = Doc()
|
|
||||||
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
356
tests/document/test_class_methods.py
Normal file
356
tests/document/test_class_methods.py
Normal file
@@ -0,0 +1,356 @@
|
|||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
from mongoengine.pymongo_support import list_collection_names
|
||||||
|
from mongoengine.queryset import NULLIFY, PULL
|
||||||
|
|
||||||
|
|
||||||
|
class TestClassMethods(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
connect(db="mongoenginetest")
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
non_field = True
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
for collection in list_collection_names(self.db):
|
||||||
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
|
def test_definition(self):
|
||||||
|
"""Ensure that document may be defined using fields.
|
||||||
|
"""
|
||||||
|
assert ["_cls", "age", "id", "name"] == sorted(self.Person._fields.keys())
|
||||||
|
assert ["IntField", "ObjectIdField", "StringField", "StringField"] == sorted(
|
||||||
|
[x.__class__.__name__ for x in self.Person._fields.values()]
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_get_db(self):
|
||||||
|
"""Ensure that get_db returns the expected db.
|
||||||
|
"""
|
||||||
|
db = self.Person._get_db()
|
||||||
|
assert self.db == db
|
||||||
|
|
||||||
|
def test_get_collection_name(self):
|
||||||
|
"""Ensure that get_collection_name returns the expected collection
|
||||||
|
name.
|
||||||
|
"""
|
||||||
|
collection_name = "person"
|
||||||
|
assert collection_name == self.Person._get_collection_name()
|
||||||
|
|
||||||
|
def test_get_collection(self):
|
||||||
|
"""Ensure that get_collection returns the expected collection.
|
||||||
|
"""
|
||||||
|
collection_name = "person"
|
||||||
|
collection = self.Person._get_collection()
|
||||||
|
assert self.db[collection_name] == collection
|
||||||
|
|
||||||
|
def test_drop_collection(self):
|
||||||
|
"""Ensure that the collection may be dropped from the database.
|
||||||
|
"""
|
||||||
|
collection_name = "person"
|
||||||
|
self.Person(name="Test").save()
|
||||||
|
assert collection_name in list_collection_names(self.db)
|
||||||
|
|
||||||
|
self.Person.drop_collection()
|
||||||
|
assert collection_name not in list_collection_names(self.db)
|
||||||
|
|
||||||
|
def test_register_delete_rule(self):
|
||||||
|
"""Ensure that register delete rule adds a delete rule to the document
|
||||||
|
meta.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Job(Document):
|
||||||
|
employee = ReferenceField(self.Person)
|
||||||
|
|
||||||
|
assert self.Person._meta.get("delete_rules") is None
|
||||||
|
|
||||||
|
self.Person.register_delete_rule(Job, "employee", NULLIFY)
|
||||||
|
assert self.Person._meta["delete_rules"] == {(Job, "employee"): NULLIFY}
|
||||||
|
|
||||||
|
def test_compare_indexes(self):
|
||||||
|
""" Ensure that the indexes are properly created and that
|
||||||
|
compare_indexes identifies the missing/extra indexes
|
||||||
|
"""
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
author = StringField()
|
||||||
|
title = StringField()
|
||||||
|
description = StringField()
|
||||||
|
tags = StringField()
|
||||||
|
|
||||||
|
meta = {"indexes": [("author", "title")]}
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
BlogPost.ensure_indexes()
|
||||||
|
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||||
|
|
||||||
|
BlogPost.ensure_index(["author", "description"])
|
||||||
|
assert BlogPost.compare_indexes() == {
|
||||||
|
"missing": [],
|
||||||
|
"extra": [[("author", 1), ("description", 1)]],
|
||||||
|
}
|
||||||
|
|
||||||
|
BlogPost._get_collection().drop_index("author_1_description_1")
|
||||||
|
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||||
|
|
||||||
|
BlogPost._get_collection().drop_index("author_1_title_1")
|
||||||
|
assert BlogPost.compare_indexes() == {
|
||||||
|
"missing": [[("author", 1), ("title", 1)]],
|
||||||
|
"extra": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_compare_indexes_inheritance(self):
|
||||||
|
""" Ensure that the indexes are properly created and that
|
||||||
|
compare_indexes identifies the missing/extra indexes for subclassed
|
||||||
|
documents (_cls included)
|
||||||
|
"""
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
author = StringField()
|
||||||
|
title = StringField()
|
||||||
|
description = StringField()
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
class BlogPostWithTags(BlogPost):
|
||||||
|
tags = StringField()
|
||||||
|
tag_list = ListField(StringField())
|
||||||
|
|
||||||
|
meta = {"indexes": [("author", "tags")]}
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
BlogPost.ensure_indexes()
|
||||||
|
BlogPostWithTags.ensure_indexes()
|
||||||
|
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||||
|
|
||||||
|
BlogPostWithTags.ensure_index(["author", "tag_list"])
|
||||||
|
assert BlogPost.compare_indexes() == {
|
||||||
|
"missing": [],
|
||||||
|
"extra": [[("_cls", 1), ("author", 1), ("tag_list", 1)]],
|
||||||
|
}
|
||||||
|
|
||||||
|
BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tag_list_1")
|
||||||
|
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||||
|
|
||||||
|
BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tags_1")
|
||||||
|
assert BlogPost.compare_indexes() == {
|
||||||
|
"missing": [[("_cls", 1), ("author", 1), ("tags", 1)]],
|
||||||
|
"extra": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_compare_indexes_multiple_subclasses(self):
|
||||||
|
""" Ensure that compare_indexes behaves correctly if called from a
|
||||||
|
class, which base class has multiple subclasses
|
||||||
|
"""
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
author = StringField()
|
||||||
|
title = StringField()
|
||||||
|
description = StringField()
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
class BlogPostWithTags(BlogPost):
|
||||||
|
tags = StringField()
|
||||||
|
tag_list = ListField(StringField())
|
||||||
|
|
||||||
|
meta = {"indexes": [("author", "tags")]}
|
||||||
|
|
||||||
|
class BlogPostWithCustomField(BlogPost):
|
||||||
|
custom = DictField()
|
||||||
|
|
||||||
|
meta = {"indexes": [("author", "custom")]}
|
||||||
|
|
||||||
|
BlogPost.ensure_indexes()
|
||||||
|
BlogPostWithTags.ensure_indexes()
|
||||||
|
BlogPostWithCustomField.ensure_indexes()
|
||||||
|
|
||||||
|
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||||
|
assert BlogPostWithTags.compare_indexes() == {"missing": [], "extra": []}
|
||||||
|
assert BlogPostWithCustomField.compare_indexes() == {"missing": [], "extra": []}
|
||||||
|
|
||||||
|
def test_compare_indexes_for_text_indexes(self):
|
||||||
|
""" Ensure that compare_indexes behaves correctly for text indexes """
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
a = StringField()
|
||||||
|
b = StringField()
|
||||||
|
meta = {
|
||||||
|
"indexes": [
|
||||||
|
{
|
||||||
|
"fields": ["$a", "$b"],
|
||||||
|
"default_language": "english",
|
||||||
|
"weights": {"a": 10, "b": 2},
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
Doc.ensure_indexes()
|
||||||
|
actual = Doc.compare_indexes()
|
||||||
|
expected = {"missing": [], "extra": []}
|
||||||
|
assert actual == expected
|
||||||
|
|
||||||
|
def test_list_indexes_inheritance(self):
|
||||||
|
""" ensure that all of the indexes are listed regardless of the super-
|
||||||
|
or sub-class that we call it from
|
||||||
|
"""
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
author = StringField()
|
||||||
|
title = StringField()
|
||||||
|
description = StringField()
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
class BlogPostWithTags(BlogPost):
|
||||||
|
tags = StringField()
|
||||||
|
|
||||||
|
meta = {"indexes": [("author", "tags")]}
|
||||||
|
|
||||||
|
class BlogPostWithTagsAndExtraText(BlogPostWithTags):
|
||||||
|
extra_text = StringField()
|
||||||
|
|
||||||
|
meta = {"indexes": [("author", "tags", "extra_text")]}
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
BlogPost.ensure_indexes()
|
||||||
|
BlogPostWithTags.ensure_indexes()
|
||||||
|
BlogPostWithTagsAndExtraText.ensure_indexes()
|
||||||
|
|
||||||
|
assert BlogPost.list_indexes() == BlogPostWithTags.list_indexes()
|
||||||
|
assert BlogPost.list_indexes() == BlogPostWithTagsAndExtraText.list_indexes()
|
||||||
|
assert BlogPost.list_indexes() == [
|
||||||
|
[("_cls", 1), ("author", 1), ("tags", 1)],
|
||||||
|
[("_cls", 1), ("author", 1), ("tags", 1), ("extra_text", 1)],
|
||||||
|
[(u"_id", 1)],
|
||||||
|
[("_cls", 1)],
|
||||||
|
]
|
||||||
|
|
||||||
|
def test_register_delete_rule_inherited(self):
|
||||||
|
class Vaccine(Document):
|
||||||
|
name = StringField(required=True)
|
||||||
|
|
||||||
|
meta = {"indexes": ["name"]}
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
family = StringField(required=True)
|
||||||
|
vaccine_made = ListField(
|
||||||
|
ReferenceField("Vaccine", reverse_delete_rule=PULL)
|
||||||
|
)
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True, "indexes": ["family"]}
|
||||||
|
|
||||||
|
class Cat(Animal):
|
||||||
|
name = StringField(required=True)
|
||||||
|
|
||||||
|
assert Vaccine._meta["delete_rules"][(Animal, "vaccine_made")] == PULL
|
||||||
|
assert Vaccine._meta["delete_rules"][(Cat, "vaccine_made")] == PULL
|
||||||
|
|
||||||
|
def test_collection_naming(self):
|
||||||
|
"""Ensure that a collection with a specified name may be used.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class DefaultNamingTest(Document):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert "default_naming_test" == DefaultNamingTest._get_collection_name()
|
||||||
|
|
||||||
|
class CustomNamingTest(Document):
|
||||||
|
meta = {"collection": "pimp_my_collection"}
|
||||||
|
|
||||||
|
assert "pimp_my_collection" == CustomNamingTest._get_collection_name()
|
||||||
|
|
||||||
|
class DynamicNamingTest(Document):
|
||||||
|
meta = {"collection": lambda c: "DYNAMO"}
|
||||||
|
|
||||||
|
assert "DYNAMO" == DynamicNamingTest._get_collection_name()
|
||||||
|
|
||||||
|
# Use Abstract class to handle backwards compatibility
|
||||||
|
class BaseDocument(Document):
|
||||||
|
meta = {"abstract": True, "collection": lambda c: c.__name__.lower()}
|
||||||
|
|
||||||
|
class OldNamingConvention(BaseDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert "oldnamingconvention" == OldNamingConvention._get_collection_name()
|
||||||
|
|
||||||
|
class InheritedAbstractNamingTest(BaseDocument):
|
||||||
|
meta = {"collection": "wibble"}
|
||||||
|
|
||||||
|
assert "wibble" == InheritedAbstractNamingTest._get_collection_name()
|
||||||
|
|
||||||
|
# Mixin tests
|
||||||
|
class BaseMixin(object):
|
||||||
|
meta = {"collection": lambda c: c.__name__.lower()}
|
||||||
|
|
||||||
|
class OldMixinNamingConvention(Document, BaseMixin):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert (
|
||||||
|
"oldmixinnamingconvention"
|
||||||
|
== OldMixinNamingConvention._get_collection_name()
|
||||||
|
)
|
||||||
|
|
||||||
|
class BaseMixin(object):
|
||||||
|
meta = {"collection": lambda c: c.__name__.lower()}
|
||||||
|
|
||||||
|
class BaseDocument(Document, BaseMixin):
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
class MyDocument(BaseDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert "basedocument" == MyDocument._get_collection_name()
|
||||||
|
|
||||||
|
def test_custom_collection_name_operations(self):
|
||||||
|
"""Ensure that a collection with a specified name is used as expected.
|
||||||
|
"""
|
||||||
|
collection_name = "personCollTest"
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {"collection": collection_name}
|
||||||
|
|
||||||
|
Person(name="Test User").save()
|
||||||
|
assert collection_name in list_collection_names(self.db)
|
||||||
|
|
||||||
|
user_obj = self.db[collection_name].find_one()
|
||||||
|
assert user_obj["name"] == "Test User"
|
||||||
|
|
||||||
|
user_obj = Person.objects[0]
|
||||||
|
assert user_obj.name == "Test User"
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
assert collection_name not in list_collection_names(self.db)
|
||||||
|
|
||||||
|
def test_collection_name_and_primary(self):
|
||||||
|
"""Ensure that a collection with a specified name may be used.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField(primary_key=True)
|
||||||
|
meta = {"collection": "app"}
|
||||||
|
|
||||||
|
Person(name="Test User").save()
|
||||||
|
|
||||||
|
user_obj = Person.objects.first()
|
||||||
|
assert user_obj.name == "Test User"
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
953
tests/document/test_delta.py
Normal file
953
tests/document/test_delta.py
Normal file
@@ -0,0 +1,953 @@
|
|||||||
|
import unittest
|
||||||
|
|
||||||
|
from bson import SON
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.pymongo_support import list_collection_names
|
||||||
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
|
class TestDelta(MongoDBTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
super(TestDelta, self).setUp()
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
non_field = True
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
for collection in list_collection_names(self.db):
|
||||||
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
|
def test_delta(self):
|
||||||
|
self.delta(Document)
|
||||||
|
self.delta(DynamicDocument)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def delta(DocClass):
|
||||||
|
class Doc(DocClass):
|
||||||
|
string_field = StringField()
|
||||||
|
int_field = IntField()
|
||||||
|
dict_field = DictField()
|
||||||
|
list_field = ListField()
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
assert doc._get_changed_fields() == []
|
||||||
|
assert doc._delta() == ({}, {})
|
||||||
|
|
||||||
|
doc.string_field = "hello"
|
||||||
|
assert doc._get_changed_fields() == ["string_field"]
|
||||||
|
assert doc._delta() == ({"string_field": "hello"}, {})
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.int_field = 1
|
||||||
|
assert doc._get_changed_fields() == ["int_field"]
|
||||||
|
assert doc._delta() == ({"int_field": 1}, {})
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
dict_value = {"hello": "world", "ping": "pong"}
|
||||||
|
doc.dict_field = dict_value
|
||||||
|
assert doc._get_changed_fields() == ["dict_field"]
|
||||||
|
assert doc._delta() == ({"dict_field": dict_value}, {})
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
list_value = ["1", 2, {"hello": "world"}]
|
||||||
|
doc.list_field = list_value
|
||||||
|
assert doc._get_changed_fields() == ["list_field"]
|
||||||
|
assert doc._delta() == ({"list_field": list_value}, {})
|
||||||
|
|
||||||
|
# Test unsetting
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.dict_field = {}
|
||||||
|
assert doc._get_changed_fields() == ["dict_field"]
|
||||||
|
assert doc._delta() == ({}, {"dict_field": 1})
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.list_field = []
|
||||||
|
assert doc._get_changed_fields() == ["list_field"]
|
||||||
|
assert doc._delta() == ({}, {"list_field": 1})
|
||||||
|
|
||||||
|
def test_delta_recursive(self):
|
||||||
|
self.delta_recursive(Document, EmbeddedDocument)
|
||||||
|
self.delta_recursive(DynamicDocument, EmbeddedDocument)
|
||||||
|
self.delta_recursive(Document, DynamicEmbeddedDocument)
|
||||||
|
self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument)
|
||||||
|
|
||||||
|
def delta_recursive(self, DocClass, EmbeddedClass):
|
||||||
|
class Embedded(EmbeddedClass):
|
||||||
|
id = StringField()
|
||||||
|
string_field = StringField()
|
||||||
|
int_field = IntField()
|
||||||
|
dict_field = DictField()
|
||||||
|
list_field = ListField()
|
||||||
|
|
||||||
|
class Doc(DocClass):
|
||||||
|
string_field = StringField()
|
||||||
|
int_field = IntField()
|
||||||
|
dict_field = DictField()
|
||||||
|
list_field = ListField()
|
||||||
|
embedded_field = EmbeddedDocumentField(Embedded)
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
assert doc._get_changed_fields() == []
|
||||||
|
assert doc._delta() == ({}, {})
|
||||||
|
|
||||||
|
embedded_1 = Embedded()
|
||||||
|
embedded_1.id = "010101"
|
||||||
|
embedded_1.string_field = "hello"
|
||||||
|
embedded_1.int_field = 1
|
||||||
|
embedded_1.dict_field = {"hello": "world"}
|
||||||
|
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
||||||
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
|
assert doc._get_changed_fields() == ["embedded_field"]
|
||||||
|
|
||||||
|
embedded_delta = {
|
||||||
|
"id": "010101",
|
||||||
|
"string_field": "hello",
|
||||||
|
"int_field": 1,
|
||||||
|
"dict_field": {"hello": "world"},
|
||||||
|
"list_field": ["1", 2, {"hello": "world"}],
|
||||||
|
}
|
||||||
|
assert doc.embedded_field._delta() == (embedded_delta, {})
|
||||||
|
assert doc._delta() == ({"embedded_field": embedded_delta}, {})
|
||||||
|
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.embedded_field.dict_field = {}
|
||||||
|
assert doc._get_changed_fields() == ["embedded_field.dict_field"]
|
||||||
|
assert doc.embedded_field._delta() == ({}, {"dict_field": 1})
|
||||||
|
assert doc._delta() == ({}, {"embedded_field.dict_field": 1})
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
assert doc.embedded_field.dict_field == {}
|
||||||
|
|
||||||
|
doc.embedded_field.list_field = []
|
||||||
|
assert doc._get_changed_fields() == ["embedded_field.list_field"]
|
||||||
|
assert doc.embedded_field._delta() == ({}, {"list_field": 1})
|
||||||
|
assert doc._delta() == ({}, {"embedded_field.list_field": 1})
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
assert doc.embedded_field.list_field == []
|
||||||
|
|
||||||
|
embedded_2 = Embedded()
|
||||||
|
embedded_2.string_field = "hello"
|
||||||
|
embedded_2.int_field = 1
|
||||||
|
embedded_2.dict_field = {"hello": "world"}
|
||||||
|
embedded_2.list_field = ["1", 2, {"hello": "world"}]
|
||||||
|
|
||||||
|
doc.embedded_field.list_field = ["1", 2, embedded_2]
|
||||||
|
assert doc._get_changed_fields() == ["embedded_field.list_field"]
|
||||||
|
|
||||||
|
assert doc.embedded_field._delta() == (
|
||||||
|
{
|
||||||
|
"list_field": [
|
||||||
|
"1",
|
||||||
|
2,
|
||||||
|
{
|
||||||
|
"_cls": "Embedded",
|
||||||
|
"string_field": "hello",
|
||||||
|
"dict_field": {"hello": "world"},
|
||||||
|
"int_field": 1,
|
||||||
|
"list_field": ["1", 2, {"hello": "world"}],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert doc._delta() == (
|
||||||
|
{
|
||||||
|
"embedded_field.list_field": [
|
||||||
|
"1",
|
||||||
|
2,
|
||||||
|
{
|
||||||
|
"_cls": "Embedded",
|
||||||
|
"string_field": "hello",
|
||||||
|
"dict_field": {"hello": "world"},
|
||||||
|
"int_field": 1,
|
||||||
|
"list_field": ["1", 2, {"hello": "world"}],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
assert doc.embedded_field.list_field[0] == "1"
|
||||||
|
assert doc.embedded_field.list_field[1] == 2
|
||||||
|
for k in doc.embedded_field.list_field[2]._fields:
|
||||||
|
assert doc.embedded_field.list_field[2][k] == embedded_2[k]
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].string_field = "world"
|
||||||
|
assert doc._get_changed_fields() == ["embedded_field.list_field.2.string_field"]
|
||||||
|
assert doc.embedded_field._delta() == (
|
||||||
|
{"list_field.2.string_field": "world"},
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
assert doc._delta() == (
|
||||||
|
{"embedded_field.list_field.2.string_field": "world"},
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
assert doc.embedded_field.list_field[2].string_field == "world"
|
||||||
|
|
||||||
|
# Test multiple assignments
|
||||||
|
doc.embedded_field.list_field[2].string_field = "hello world"
|
||||||
|
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||||
|
assert doc._get_changed_fields() == ["embedded_field.list_field.2"]
|
||||||
|
assert doc.embedded_field._delta() == (
|
||||||
|
{
|
||||||
|
"list_field.2": {
|
||||||
|
"_cls": "Embedded",
|
||||||
|
"string_field": "hello world",
|
||||||
|
"int_field": 1,
|
||||||
|
"list_field": ["1", 2, {"hello": "world"}],
|
||||||
|
"dict_field": {"hello": "world"},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
assert doc._delta() == (
|
||||||
|
{
|
||||||
|
"embedded_field.list_field.2": {
|
||||||
|
"_cls": "Embedded",
|
||||||
|
"string_field": "hello world",
|
||||||
|
"int_field": 1,
|
||||||
|
"list_field": ["1", 2, {"hello": "world"}],
|
||||||
|
"dict_field": {"hello": "world"},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
assert doc.embedded_field.list_field[2].string_field == "hello world"
|
||||||
|
|
||||||
|
# Test list native methods
|
||||||
|
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||||
|
assert doc._delta() == (
|
||||||
|
{"embedded_field.list_field.2.list_field": [2, {"hello": "world"}]},
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].list_field.append(1)
|
||||||
|
assert doc._delta() == (
|
||||||
|
{"embedded_field.list_field.2.list_field": [2, {"hello": "world"}, 1]},
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1]
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}]
|
||||||
|
|
||||||
|
del doc.embedded_field.list_field[2].list_field[2]["hello"]
|
||||||
|
assert doc._delta() == (
|
||||||
|
{},
|
||||||
|
{"embedded_field.list_field.2.list_field.2.hello": 1},
|
||||||
|
)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
del doc.embedded_field.list_field[2].list_field
|
||||||
|
assert doc._delta() == ({}, {"embedded_field.list_field.2.list_field": 1})
|
||||||
|
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.dict_field["Embedded"] = embedded_1
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.dict_field["Embedded"].string_field = "Hello World"
|
||||||
|
assert doc._get_changed_fields() == ["dict_field.Embedded.string_field"]
|
||||||
|
assert doc._delta() == ({"dict_field.Embedded.string_field": "Hello World"}, {})
|
||||||
|
|
||||||
|
def test_circular_reference_deltas(self):
|
||||||
|
self.circular_reference_deltas(Document, Document)
|
||||||
|
self.circular_reference_deltas(Document, DynamicDocument)
|
||||||
|
self.circular_reference_deltas(DynamicDocument, Document)
|
||||||
|
self.circular_reference_deltas(DynamicDocument, DynamicDocument)
|
||||||
|
|
||||||
|
def circular_reference_deltas(self, DocClass1, DocClass2):
|
||||||
|
class Person(DocClass1):
|
||||||
|
name = StringField()
|
||||||
|
owns = ListField(ReferenceField("Organization"))
|
||||||
|
|
||||||
|
class Organization(DocClass2):
|
||||||
|
name = StringField()
|
||||||
|
owner = ReferenceField("Person")
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
Organization.drop_collection()
|
||||||
|
|
||||||
|
person = Person(name="owner").save()
|
||||||
|
organization = Organization(name="company").save()
|
||||||
|
|
||||||
|
person.owns.append(organization)
|
||||||
|
organization.owner = person
|
||||||
|
|
||||||
|
person.save()
|
||||||
|
organization.save()
|
||||||
|
|
||||||
|
p = Person.objects[0].select_related()
|
||||||
|
o = Organization.objects.first()
|
||||||
|
assert p.owns[0] == o
|
||||||
|
assert o.owner == p
|
||||||
|
|
||||||
|
def test_circular_reference_deltas_2(self):
|
||||||
|
self.circular_reference_deltas_2(Document, Document)
|
||||||
|
self.circular_reference_deltas_2(Document, DynamicDocument)
|
||||||
|
self.circular_reference_deltas_2(DynamicDocument, Document)
|
||||||
|
self.circular_reference_deltas_2(DynamicDocument, DynamicDocument)
|
||||||
|
|
||||||
|
def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True):
|
||||||
|
class Person(DocClass1):
|
||||||
|
name = StringField()
|
||||||
|
owns = ListField(ReferenceField("Organization", dbref=dbref))
|
||||||
|
employer = ReferenceField("Organization", dbref=dbref)
|
||||||
|
|
||||||
|
class Organization(DocClass2):
|
||||||
|
name = StringField()
|
||||||
|
owner = ReferenceField("Person", dbref=dbref)
|
||||||
|
employees = ListField(ReferenceField("Person", dbref=dbref))
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
Organization.drop_collection()
|
||||||
|
|
||||||
|
person = Person(name="owner").save()
|
||||||
|
employee = Person(name="employee").save()
|
||||||
|
organization = Organization(name="company").save()
|
||||||
|
|
||||||
|
person.owns.append(organization)
|
||||||
|
organization.owner = person
|
||||||
|
|
||||||
|
organization.employees.append(employee)
|
||||||
|
employee.employer = organization
|
||||||
|
|
||||||
|
person.save()
|
||||||
|
organization.save()
|
||||||
|
employee.save()
|
||||||
|
|
||||||
|
p = Person.objects.get(name="owner")
|
||||||
|
e = Person.objects.get(name="employee")
|
||||||
|
o = Organization.objects.first()
|
||||||
|
|
||||||
|
assert p.owns[0] == o
|
||||||
|
assert o.owner == p
|
||||||
|
assert e.employer == o
|
||||||
|
|
||||||
|
return person, organization, employee
|
||||||
|
|
||||||
|
def test_delta_db_field(self):
|
||||||
|
self.delta_db_field(Document)
|
||||||
|
self.delta_db_field(DynamicDocument)
|
||||||
|
|
||||||
|
def delta_db_field(self, DocClass):
|
||||||
|
class Doc(DocClass):
|
||||||
|
string_field = StringField(db_field="db_string_field")
|
||||||
|
int_field = IntField(db_field="db_int_field")
|
||||||
|
dict_field = DictField(db_field="db_dict_field")
|
||||||
|
list_field = ListField(db_field="db_list_field")
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
assert doc._get_changed_fields() == []
|
||||||
|
assert doc._delta() == ({}, {})
|
||||||
|
|
||||||
|
doc.string_field = "hello"
|
||||||
|
assert doc._get_changed_fields() == ["db_string_field"]
|
||||||
|
assert doc._delta() == ({"db_string_field": "hello"}, {})
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.int_field = 1
|
||||||
|
assert doc._get_changed_fields() == ["db_int_field"]
|
||||||
|
assert doc._delta() == ({"db_int_field": 1}, {})
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
dict_value = {"hello": "world", "ping": "pong"}
|
||||||
|
doc.dict_field = dict_value
|
||||||
|
assert doc._get_changed_fields() == ["db_dict_field"]
|
||||||
|
assert doc._delta() == ({"db_dict_field": dict_value}, {})
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
list_value = ["1", 2, {"hello": "world"}]
|
||||||
|
doc.list_field = list_value
|
||||||
|
assert doc._get_changed_fields() == ["db_list_field"]
|
||||||
|
assert doc._delta() == ({"db_list_field": list_value}, {})
|
||||||
|
|
||||||
|
# Test unsetting
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.dict_field = {}
|
||||||
|
assert doc._get_changed_fields() == ["db_dict_field"]
|
||||||
|
assert doc._delta() == ({}, {"db_dict_field": 1})
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.list_field = []
|
||||||
|
assert doc._get_changed_fields() == ["db_list_field"]
|
||||||
|
assert doc._delta() == ({}, {"db_list_field": 1})
|
||||||
|
|
||||||
|
# Test it saves that data
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc.string_field = "hello"
|
||||||
|
doc.int_field = 1
|
||||||
|
doc.dict_field = {"hello": "world"}
|
||||||
|
doc.list_field = ["1", 2, {"hello": "world"}]
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
assert doc.string_field == "hello"
|
||||||
|
assert doc.int_field == 1
|
||||||
|
assert doc.dict_field == {"hello": "world"}
|
||||||
|
assert doc.list_field == ["1", 2, {"hello": "world"}]
|
||||||
|
|
||||||
|
def test_delta_recursive_db_field_on_doc_and_embeddeddoc(self):
|
||||||
|
self.delta_recursive_db_field(Document, EmbeddedDocument)
|
||||||
|
|
||||||
|
def test_delta_recursive_db_field_on_doc_and_dynamicembeddeddoc(self):
|
||||||
|
self.delta_recursive_db_field(Document, DynamicEmbeddedDocument)
|
||||||
|
|
||||||
|
def test_delta_recursive_db_field_on_dynamicdoc_and_embeddeddoc(self):
|
||||||
|
self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument)
|
||||||
|
|
||||||
|
def test_delta_recursive_db_field_on_dynamicdoc_and_dynamicembeddeddoc(self):
|
||||||
|
self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def delta_recursive_db_field(DocClass, EmbeddedClass):
|
||||||
|
class Embedded(EmbeddedClass):
|
||||||
|
string_field = StringField(db_field="db_string_field")
|
||||||
|
int_field = IntField(db_field="db_int_field")
|
||||||
|
dict_field = DictField(db_field="db_dict_field")
|
||||||
|
list_field = ListField(db_field="db_list_field")
|
||||||
|
|
||||||
|
class Doc(DocClass):
|
||||||
|
string_field = StringField(db_field="db_string_field")
|
||||||
|
int_field = IntField(db_field="db_int_field")
|
||||||
|
dict_field = DictField(db_field="db_dict_field")
|
||||||
|
list_field = ListField(db_field="db_list_field")
|
||||||
|
embedded_field = EmbeddedDocumentField(
|
||||||
|
Embedded, db_field="db_embedded_field"
|
||||||
|
)
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
assert doc._get_changed_fields() == []
|
||||||
|
assert doc._delta() == ({}, {})
|
||||||
|
|
||||||
|
embedded_1 = Embedded()
|
||||||
|
embedded_1.string_field = "hello"
|
||||||
|
embedded_1.int_field = 1
|
||||||
|
embedded_1.dict_field = {"hello": "world"}
|
||||||
|
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
||||||
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
|
assert doc._get_changed_fields() == ["db_embedded_field"]
|
||||||
|
|
||||||
|
embedded_delta = {
|
||||||
|
"db_string_field": "hello",
|
||||||
|
"db_int_field": 1,
|
||||||
|
"db_dict_field": {"hello": "world"},
|
||||||
|
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||||
|
}
|
||||||
|
assert doc.embedded_field._delta() == (embedded_delta, {})
|
||||||
|
assert doc._delta() == ({"db_embedded_field": embedded_delta}, {})
|
||||||
|
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.embedded_field.dict_field = {}
|
||||||
|
assert doc._get_changed_fields() == ["db_embedded_field.db_dict_field"]
|
||||||
|
assert doc.embedded_field._delta() == ({}, {"db_dict_field": 1})
|
||||||
|
assert doc._delta() == ({}, {"db_embedded_field.db_dict_field": 1})
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
assert doc.embedded_field.dict_field == {}
|
||||||
|
|
||||||
|
assert doc._get_changed_fields() == []
|
||||||
|
doc.embedded_field.list_field = []
|
||||||
|
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"]
|
||||||
|
assert doc.embedded_field._delta() == ({}, {"db_list_field": 1})
|
||||||
|
assert doc._delta() == ({}, {"db_embedded_field.db_list_field": 1})
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
assert doc.embedded_field.list_field == []
|
||||||
|
|
||||||
|
embedded_2 = Embedded()
|
||||||
|
embedded_2.string_field = "hello"
|
||||||
|
embedded_2.int_field = 1
|
||||||
|
embedded_2.dict_field = {"hello": "world"}
|
||||||
|
embedded_2.list_field = ["1", 2, {"hello": "world"}]
|
||||||
|
|
||||||
|
doc.embedded_field.list_field = ["1", 2, embedded_2]
|
||||||
|
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"]
|
||||||
|
assert doc.embedded_field._delta() == (
|
||||||
|
{
|
||||||
|
"db_list_field": [
|
||||||
|
"1",
|
||||||
|
2,
|
||||||
|
{
|
||||||
|
"_cls": "Embedded",
|
||||||
|
"db_string_field": "hello",
|
||||||
|
"db_dict_field": {"hello": "world"},
|
||||||
|
"db_int_field": 1,
|
||||||
|
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert doc._delta() == (
|
||||||
|
{
|
||||||
|
"db_embedded_field.db_list_field": [
|
||||||
|
"1",
|
||||||
|
2,
|
||||||
|
{
|
||||||
|
"_cls": "Embedded",
|
||||||
|
"db_string_field": "hello",
|
||||||
|
"db_dict_field": {"hello": "world"},
|
||||||
|
"db_int_field": 1,
|
||||||
|
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
doc.save()
|
||||||
|
assert doc._get_changed_fields() == []
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
assert doc.embedded_field.list_field[0] == "1"
|
||||||
|
assert doc.embedded_field.list_field[1] == 2
|
||||||
|
for k in doc.embedded_field.list_field[2]._fields:
|
||||||
|
assert doc.embedded_field.list_field[2][k] == embedded_2[k]
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].string_field = "world"
|
||||||
|
assert doc._get_changed_fields() == [
|
||||||
|
"db_embedded_field.db_list_field.2.db_string_field"
|
||||||
|
]
|
||||||
|
assert doc.embedded_field._delta() == (
|
||||||
|
{"db_list_field.2.db_string_field": "world"},
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
assert doc._delta() == (
|
||||||
|
{"db_embedded_field.db_list_field.2.db_string_field": "world"},
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
assert doc.embedded_field.list_field[2].string_field == "world"
|
||||||
|
|
||||||
|
# Test multiple assignments
|
||||||
|
doc.embedded_field.list_field[2].string_field = "hello world"
|
||||||
|
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||||
|
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field.2"]
|
||||||
|
assert doc.embedded_field._delta() == (
|
||||||
|
{
|
||||||
|
"db_list_field.2": {
|
||||||
|
"_cls": "Embedded",
|
||||||
|
"db_string_field": "hello world",
|
||||||
|
"db_int_field": 1,
|
||||||
|
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||||
|
"db_dict_field": {"hello": "world"},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
assert doc._delta() == (
|
||||||
|
{
|
||||||
|
"db_embedded_field.db_list_field.2": {
|
||||||
|
"_cls": "Embedded",
|
||||||
|
"db_string_field": "hello world",
|
||||||
|
"db_int_field": 1,
|
||||||
|
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||||
|
"db_dict_field": {"hello": "world"},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
assert doc.embedded_field.list_field[2].string_field == "hello world"
|
||||||
|
|
||||||
|
# Test list native methods
|
||||||
|
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||||
|
assert doc._delta() == (
|
||||||
|
{
|
||||||
|
"db_embedded_field.db_list_field.2.db_list_field": [
|
||||||
|
2,
|
||||||
|
{"hello": "world"},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].list_field.append(1)
|
||||||
|
assert doc._delta() == (
|
||||||
|
{
|
||||||
|
"db_embedded_field.db_list_field.2.db_list_field": [
|
||||||
|
2,
|
||||||
|
{"hello": "world"},
|
||||||
|
1,
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1]
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}]
|
||||||
|
|
||||||
|
del doc.embedded_field.list_field[2].list_field[2]["hello"]
|
||||||
|
assert doc._delta() == (
|
||||||
|
{},
|
||||||
|
{"db_embedded_field.db_list_field.2.db_list_field.2.hello": 1},
|
||||||
|
)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
assert doc._delta() == ({}, {},)
|
||||||
|
del doc.embedded_field.list_field[2].list_field
|
||||||
|
assert doc._delta() == (
|
||||||
|
{},
|
||||||
|
{"db_embedded_field.db_list_field.2.db_list_field": 1},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_delta_for_dynamic_documents(self):
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
p = Person(name="James", age=34)
|
||||||
|
assert p._delta() == (
|
||||||
|
SON([("_cls", "Person"), ("name", "James"), ("age", 34)]),
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
|
||||||
|
p.doc = 123
|
||||||
|
del p.doc
|
||||||
|
assert p._delta() == (
|
||||||
|
SON([("_cls", "Person"), ("name", "James"), ("age", 34)]),
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
|
||||||
|
p = Person()
|
||||||
|
p.name = "Dean"
|
||||||
|
p.age = 22
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p.age = 24
|
||||||
|
assert p.age == 24
|
||||||
|
assert p._get_changed_fields() == ["age"]
|
||||||
|
assert p._delta() == ({"age": 24}, {})
|
||||||
|
|
||||||
|
p = Person.objects(age=22).get()
|
||||||
|
p.age = 24
|
||||||
|
assert p.age == 24
|
||||||
|
assert p._get_changed_fields() == ["age"]
|
||||||
|
assert p._delta() == ({"age": 24}, {})
|
||||||
|
|
||||||
|
p.save()
|
||||||
|
assert 1 == Person.objects(age=24).count()
|
||||||
|
|
||||||
|
def test_dynamic_delta(self):
|
||||||
|
class Doc(DynamicDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
assert doc._get_changed_fields() == []
|
||||||
|
assert doc._delta() == ({}, {})
|
||||||
|
|
||||||
|
doc.string_field = "hello"
|
||||||
|
assert doc._get_changed_fields() == ["string_field"]
|
||||||
|
assert doc._delta() == ({"string_field": "hello"}, {})
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.int_field = 1
|
||||||
|
assert doc._get_changed_fields() == ["int_field"]
|
||||||
|
assert doc._delta() == ({"int_field": 1}, {})
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
dict_value = {"hello": "world", "ping": "pong"}
|
||||||
|
doc.dict_field = dict_value
|
||||||
|
assert doc._get_changed_fields() == ["dict_field"]
|
||||||
|
assert doc._delta() == ({"dict_field": dict_value}, {})
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
list_value = ["1", 2, {"hello": "world"}]
|
||||||
|
doc.list_field = list_value
|
||||||
|
assert doc._get_changed_fields() == ["list_field"]
|
||||||
|
assert doc._delta() == ({"list_field": list_value}, {})
|
||||||
|
|
||||||
|
# Test unsetting
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.dict_field = {}
|
||||||
|
assert doc._get_changed_fields() == ["dict_field"]
|
||||||
|
assert doc._delta() == ({}, {"dict_field": 1})
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.list_field = []
|
||||||
|
assert doc._get_changed_fields() == ["list_field"]
|
||||||
|
assert doc._delta() == ({}, {"list_field": 1})
|
||||||
|
|
||||||
|
def test_delta_with_dbref_true(self):
|
||||||
|
person, organization, employee = self.circular_reference_deltas_2(
|
||||||
|
Document, Document, True
|
||||||
|
)
|
||||||
|
employee.name = "test"
|
||||||
|
|
||||||
|
assert organization._get_changed_fields() == []
|
||||||
|
|
||||||
|
updates, removals = organization._delta()
|
||||||
|
assert removals == {}
|
||||||
|
assert updates == {}
|
||||||
|
|
||||||
|
organization.employees.append(person)
|
||||||
|
updates, removals = organization._delta()
|
||||||
|
assert removals == {}
|
||||||
|
assert "employees" in updates
|
||||||
|
|
||||||
|
def test_delta_with_dbref_false(self):
|
||||||
|
person, organization, employee = self.circular_reference_deltas_2(
|
||||||
|
Document, Document, False
|
||||||
|
)
|
||||||
|
employee.name = "test"
|
||||||
|
|
||||||
|
assert organization._get_changed_fields() == []
|
||||||
|
|
||||||
|
updates, removals = organization._delta()
|
||||||
|
assert removals == {}
|
||||||
|
assert updates == {}
|
||||||
|
|
||||||
|
organization.employees.append(person)
|
||||||
|
updates, removals = organization._delta()
|
||||||
|
assert removals == {}
|
||||||
|
assert "employees" in updates
|
||||||
|
|
||||||
|
def test_nested_nested_fields_mark_as_changed(self):
|
||||||
|
class EmbeddedDoc(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class MyDoc(Document):
|
||||||
|
subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc)))
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
MyDoc.drop_collection()
|
||||||
|
|
||||||
|
MyDoc(name="testcase1", subs={"a": {"b": EmbeddedDoc(name="foo")}}).save()
|
||||||
|
|
||||||
|
mydoc = MyDoc.objects.first()
|
||||||
|
subdoc = mydoc.subs["a"]["b"]
|
||||||
|
subdoc.name = "bar"
|
||||||
|
|
||||||
|
assert subdoc._get_changed_fields() == ["name"]
|
||||||
|
assert mydoc._get_changed_fields() == ["subs.a.b.name"]
|
||||||
|
|
||||||
|
mydoc._clear_changed_fields()
|
||||||
|
assert mydoc._get_changed_fields() == []
|
||||||
|
|
||||||
|
def test_nested_nested_fields_db_field_set__gets_mark_as_changed_and_cleaned(self):
|
||||||
|
class EmbeddedDoc(EmbeddedDocument):
|
||||||
|
name = StringField(db_field="db_name")
|
||||||
|
|
||||||
|
class MyDoc(Document):
|
||||||
|
embed = EmbeddedDocumentField(EmbeddedDoc, db_field="db_embed")
|
||||||
|
name = StringField(db_field="db_name")
|
||||||
|
|
||||||
|
MyDoc.drop_collection()
|
||||||
|
|
||||||
|
MyDoc(name="testcase1", embed=EmbeddedDoc(name="foo")).save()
|
||||||
|
|
||||||
|
mydoc = MyDoc.objects.first()
|
||||||
|
mydoc.embed.name = "foo1"
|
||||||
|
|
||||||
|
assert mydoc.embed._get_changed_fields() == ["db_name"]
|
||||||
|
assert mydoc._get_changed_fields() == ["db_embed.db_name"]
|
||||||
|
|
||||||
|
mydoc = MyDoc.objects.first()
|
||||||
|
embed = EmbeddedDoc(name="foo2")
|
||||||
|
embed.name = "bar"
|
||||||
|
mydoc.embed = embed
|
||||||
|
|
||||||
|
assert embed._get_changed_fields() == ["db_name"]
|
||||||
|
assert mydoc._get_changed_fields() == ["db_embed"]
|
||||||
|
|
||||||
|
mydoc._clear_changed_fields()
|
||||||
|
assert mydoc._get_changed_fields() == []
|
||||||
|
|
||||||
|
def test_lower_level_mark_as_changed(self):
|
||||||
|
class EmbeddedDoc(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class MyDoc(Document):
|
||||||
|
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||||
|
|
||||||
|
MyDoc.drop_collection()
|
||||||
|
|
||||||
|
MyDoc().save()
|
||||||
|
|
||||||
|
mydoc = MyDoc.objects.first()
|
||||||
|
mydoc.subs["a"] = EmbeddedDoc()
|
||||||
|
assert mydoc._get_changed_fields() == ["subs.a"]
|
||||||
|
|
||||||
|
subdoc = mydoc.subs["a"]
|
||||||
|
subdoc.name = "bar"
|
||||||
|
|
||||||
|
assert subdoc._get_changed_fields() == ["name"]
|
||||||
|
assert mydoc._get_changed_fields() == ["subs.a"]
|
||||||
|
mydoc.save()
|
||||||
|
|
||||||
|
mydoc._clear_changed_fields()
|
||||||
|
assert mydoc._get_changed_fields() == []
|
||||||
|
|
||||||
|
def test_upper_level_mark_as_changed(self):
|
||||||
|
class EmbeddedDoc(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class MyDoc(Document):
|
||||||
|
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||||
|
|
||||||
|
MyDoc.drop_collection()
|
||||||
|
|
||||||
|
MyDoc(subs={"a": EmbeddedDoc(name="foo")}).save()
|
||||||
|
|
||||||
|
mydoc = MyDoc.objects.first()
|
||||||
|
subdoc = mydoc.subs["a"]
|
||||||
|
subdoc.name = "bar"
|
||||||
|
|
||||||
|
assert subdoc._get_changed_fields() == ["name"]
|
||||||
|
assert mydoc._get_changed_fields() == ["subs.a.name"]
|
||||||
|
|
||||||
|
mydoc.subs["a"] = EmbeddedDoc()
|
||||||
|
assert mydoc._get_changed_fields() == ["subs.a"]
|
||||||
|
mydoc.save()
|
||||||
|
|
||||||
|
mydoc._clear_changed_fields()
|
||||||
|
assert mydoc._get_changed_fields() == []
|
||||||
|
|
||||||
|
def test_referenced_object_changed_attributes(self):
|
||||||
|
"""Ensures that when you save a new reference to a field, the referenced object isn't altered"""
|
||||||
|
|
||||||
|
class Organization(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
org = ReferenceField("Organization", required=True)
|
||||||
|
|
||||||
|
Organization.drop_collection()
|
||||||
|
User.drop_collection()
|
||||||
|
|
||||||
|
org1 = Organization(name="Org 1")
|
||||||
|
org1.save()
|
||||||
|
|
||||||
|
org2 = Organization(name="Org 2")
|
||||||
|
org2.save()
|
||||||
|
|
||||||
|
user = User(name="Fred", org=org1)
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
org1.reload()
|
||||||
|
org2.reload()
|
||||||
|
user.reload()
|
||||||
|
assert org1.name == "Org 1"
|
||||||
|
assert org2.name == "Org 2"
|
||||||
|
assert user.name == "Fred"
|
||||||
|
|
||||||
|
user.name = "Harold"
|
||||||
|
user.org = org2
|
||||||
|
|
||||||
|
org2.name = "New Org 2"
|
||||||
|
assert org2.name == "New Org 2"
|
||||||
|
|
||||||
|
user.save()
|
||||||
|
org2.save()
|
||||||
|
|
||||||
|
assert org2.name == "New Org 2"
|
||||||
|
org2.reload()
|
||||||
|
assert org2.name == "New Org 2"
|
||||||
|
|
||||||
|
def test_delta_for_nested_map_fields(self):
|
||||||
|
class UInfoDocument(Document):
|
||||||
|
phone = StringField()
|
||||||
|
|
||||||
|
class EmbeddedRole(EmbeddedDocument):
|
||||||
|
type = StringField()
|
||||||
|
|
||||||
|
class EmbeddedUser(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
roles = MapField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||||
|
rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||||
|
info = ReferenceField(UInfoDocument)
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
users = MapField(field=EmbeddedDocumentField(EmbeddedUser))
|
||||||
|
num = IntField(default=-1)
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
|
||||||
|
doc = Doc(num=1)
|
||||||
|
doc.users["007"] = EmbeddedUser(name="Agent007")
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
uinfo = UInfoDocument(phone="79089269066")
|
||||||
|
uinfo.save()
|
||||||
|
|
||||||
|
d = Doc.objects(num=1).first()
|
||||||
|
d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin")
|
||||||
|
d.users["007"]["rolist"].append(EmbeddedRole(type="oops"))
|
||||||
|
d.users["007"]["info"] = uinfo
|
||||||
|
delta = d._delta()
|
||||||
|
assert True == ("users.007.roles.666" in delta[0])
|
||||||
|
assert True == ("users.007.rolist" in delta[0])
|
||||||
|
assert True == ("users.007.info" in delta[0])
|
||||||
|
assert "superadmin" == delta[0]["users.007.roles.666"]["type"]
|
||||||
|
assert "oops" == delta[0]["users.007.rolist"][0]["type"]
|
||||||
|
assert uinfo.id == delta[0]["users.007.info"]
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
439
tests/document/test_dynamic.py
Normal file
439
tests/document/test_dynamic.py
Normal file
@@ -0,0 +1,439 @@
|
|||||||
|
import unittest
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
__all__ = ("TestDynamicDocument",)
|
||||||
|
|
||||||
|
|
||||||
|
class TestDynamicDocument(MongoDBTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
super(TestDynamicDocument, self).setUp()
|
||||||
|
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def test_simple_dynamic_document(self):
|
||||||
|
"""Ensures simple dynamic documents are saved correctly"""
|
||||||
|
|
||||||
|
p = self.Person()
|
||||||
|
p.name = "James"
|
||||||
|
p.age = 34
|
||||||
|
|
||||||
|
assert p.to_mongo() == {"_cls": "Person", "name": "James", "age": 34}
|
||||||
|
assert p.to_mongo().keys() == ["_cls", "name", "age"]
|
||||||
|
p.save()
|
||||||
|
assert p.to_mongo().keys() == ["_id", "_cls", "name", "age"]
|
||||||
|
|
||||||
|
assert self.Person.objects.first().age == 34
|
||||||
|
|
||||||
|
# Confirm no changes to self.Person
|
||||||
|
assert not hasattr(self.Person, "age")
|
||||||
|
|
||||||
|
def test_dynamic_document_parse_values_in_constructor_like_document_do(self):
|
||||||
|
class ProductDynamicDocument(DynamicDocument):
|
||||||
|
title = StringField()
|
||||||
|
price = FloatField()
|
||||||
|
|
||||||
|
class ProductDocument(Document):
|
||||||
|
title = StringField()
|
||||||
|
price = FloatField()
|
||||||
|
|
||||||
|
product = ProductDocument(title="Blabla", price="12.5")
|
||||||
|
dyn_product = ProductDynamicDocument(title="Blabla", price="12.5")
|
||||||
|
assert product.price == dyn_product.price == 12.5
|
||||||
|
|
||||||
|
def test_change_scope_of_variable(self):
|
||||||
|
"""Test changing the scope of a dynamic field has no adverse effects"""
|
||||||
|
p = self.Person()
|
||||||
|
p.name = "Dean"
|
||||||
|
p.misc = 22
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
p.misc = {"hello": "world"}
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
assert p.misc == {"hello": "world"}
|
||||||
|
|
||||||
|
def test_delete_dynamic_field(self):
|
||||||
|
"""Test deleting a dynamic field works"""
|
||||||
|
self.Person.drop_collection()
|
||||||
|
p = self.Person()
|
||||||
|
p.name = "Dean"
|
||||||
|
p.misc = 22
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
p.misc = {"hello": "world"}
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
assert p.misc == {"hello": "world"}
|
||||||
|
collection = self.db[self.Person._get_collection_name()]
|
||||||
|
obj = collection.find_one()
|
||||||
|
assert sorted(obj.keys()) == ["_cls", "_id", "misc", "name"]
|
||||||
|
|
||||||
|
del p.misc
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
assert not hasattr(p, "misc")
|
||||||
|
|
||||||
|
obj = collection.find_one()
|
||||||
|
assert sorted(obj.keys()) == ["_cls", "_id", "name"]
|
||||||
|
|
||||||
|
def test_reload_after_unsetting(self):
|
||||||
|
p = self.Person()
|
||||||
|
p.misc = 22
|
||||||
|
p.save()
|
||||||
|
p.update(unset__misc=1)
|
||||||
|
p.reload()
|
||||||
|
|
||||||
|
def test_reload_dynamic_field(self):
|
||||||
|
self.Person.objects.delete()
|
||||||
|
p = self.Person.objects.create()
|
||||||
|
p.update(age=1)
|
||||||
|
|
||||||
|
assert len(p._data) == 3
|
||||||
|
assert sorted(p._data.keys()) == ["_cls", "id", "name"]
|
||||||
|
|
||||||
|
p.reload()
|
||||||
|
assert len(p._data) == 4
|
||||||
|
assert sorted(p._data.keys()) == ["_cls", "age", "id", "name"]
|
||||||
|
|
||||||
|
def test_fields_without_underscore(self):
|
||||||
|
"""Ensure we can query dynamic fields"""
|
||||||
|
Person = self.Person
|
||||||
|
|
||||||
|
p = self.Person(name="Dean")
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
||||||
|
assert raw_p == {"_cls": u"Person", "_id": p.id, "name": u"Dean"}
|
||||||
|
|
||||||
|
p.name = "OldDean"
|
||||||
|
p.newattr = "garbage"
|
||||||
|
p.save()
|
||||||
|
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
||||||
|
assert raw_p == {
|
||||||
|
"_cls": u"Person",
|
||||||
|
"_id": p.id,
|
||||||
|
"name": "OldDean",
|
||||||
|
"newattr": u"garbage",
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_fields_containing_underscore(self):
|
||||||
|
"""Ensure we can query dynamic fields"""
|
||||||
|
|
||||||
|
class WeirdPerson(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
_name = StringField()
|
||||||
|
|
||||||
|
WeirdPerson.drop_collection()
|
||||||
|
|
||||||
|
p = WeirdPerson(name="Dean", _name="Dean")
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
||||||
|
assert raw_p == {"_id": p.id, "_name": u"Dean", "name": u"Dean"}
|
||||||
|
|
||||||
|
p.name = "OldDean"
|
||||||
|
p._name = "NewDean"
|
||||||
|
p._newattr1 = "garbage" # Unknown fields won't be added
|
||||||
|
p.save()
|
||||||
|
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
||||||
|
assert raw_p == {"_id": p.id, "_name": u"NewDean", "name": u"OldDean"}
|
||||||
|
|
||||||
|
def test_dynamic_document_queries(self):
|
||||||
|
"""Ensure we can query dynamic fields"""
|
||||||
|
p = self.Person()
|
||||||
|
p.name = "Dean"
|
||||||
|
p.age = 22
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
assert 1 == self.Person.objects(age=22).count()
|
||||||
|
p = self.Person.objects(age=22)
|
||||||
|
p = p.get()
|
||||||
|
assert 22 == p.age
|
||||||
|
|
||||||
|
def test_complex_dynamic_document_queries(self):
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
p = Person(name="test")
|
||||||
|
p.age = "ten"
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p1 = Person(name="test1")
|
||||||
|
p1.age = "less then ten and a half"
|
||||||
|
p1.save()
|
||||||
|
|
||||||
|
p2 = Person(name="test2")
|
||||||
|
p2.age = 10
|
||||||
|
p2.save()
|
||||||
|
|
||||||
|
assert Person.objects(age__icontains="ten").count() == 2
|
||||||
|
assert Person.objects(age__gte=10).count() == 1
|
||||||
|
|
||||||
|
def test_complex_data_lookups(self):
|
||||||
|
"""Ensure you can query dynamic document dynamic fields"""
|
||||||
|
p = self.Person()
|
||||||
|
p.misc = {"hello": "world"}
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
assert 1 == self.Person.objects(misc__hello="world").count()
|
||||||
|
|
||||||
|
def test_three_level_complex_data_lookups(self):
|
||||||
|
"""Ensure you can query three level document dynamic fields"""
|
||||||
|
self.Person.objects.create(misc={"hello": {"hello2": "world"}})
|
||||||
|
assert 1 == self.Person.objects(misc__hello__hello2="world").count()
|
||||||
|
|
||||||
|
def test_complex_embedded_document_validation(self):
|
||||||
|
"""Ensure embedded dynamic documents may be validated"""
|
||||||
|
|
||||||
|
class Embedded(DynamicEmbeddedDocument):
|
||||||
|
content = URLField()
|
||||||
|
|
||||||
|
class Doc(DynamicDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
|
||||||
|
embedded_doc_1 = Embedded(content="http://mongoengine.org")
|
||||||
|
embedded_doc_1.validate()
|
||||||
|
|
||||||
|
embedded_doc_2 = Embedded(content="this is not a url")
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
embedded_doc_2.validate()
|
||||||
|
|
||||||
|
doc.embedded_field_1 = embedded_doc_1
|
||||||
|
doc.embedded_field_2 = embedded_doc_2
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
doc.validate()
|
||||||
|
|
||||||
|
def test_inheritance(self):
|
||||||
|
"""Ensure that dynamic document plays nice with inheritance"""
|
||||||
|
|
||||||
|
class Employee(self.Person):
|
||||||
|
salary = IntField()
|
||||||
|
|
||||||
|
Employee.drop_collection()
|
||||||
|
|
||||||
|
assert "name" in Employee._fields
|
||||||
|
assert "salary" in Employee._fields
|
||||||
|
assert Employee._get_collection_name() == self.Person._get_collection_name()
|
||||||
|
|
||||||
|
joe_bloggs = Employee()
|
||||||
|
joe_bloggs.name = "Joe Bloggs"
|
||||||
|
joe_bloggs.salary = 10
|
||||||
|
joe_bloggs.age = 20
|
||||||
|
joe_bloggs.save()
|
||||||
|
|
||||||
|
assert 1 == self.Person.objects(age=20).count()
|
||||||
|
assert 1 == Employee.objects(age=20).count()
|
||||||
|
|
||||||
|
joe_bloggs = self.Person.objects.first()
|
||||||
|
assert isinstance(joe_bloggs, Employee)
|
||||||
|
|
||||||
|
def test_embedded_dynamic_document(self):
|
||||||
|
"""Test dynamic embedded documents"""
|
||||||
|
|
||||||
|
class Embedded(DynamicEmbeddedDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Doc(DynamicDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
|
||||||
|
embedded_1 = Embedded()
|
||||||
|
embedded_1.string_field = "hello"
|
||||||
|
embedded_1.int_field = 1
|
||||||
|
embedded_1.dict_field = {"hello": "world"}
|
||||||
|
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
||||||
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
|
assert doc.to_mongo() == {
|
||||||
|
"embedded_field": {
|
||||||
|
"_cls": "Embedded",
|
||||||
|
"string_field": "hello",
|
||||||
|
"int_field": 1,
|
||||||
|
"dict_field": {"hello": "world"},
|
||||||
|
"list_field": ["1", 2, {"hello": "world"}],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
assert doc.embedded_field.__class__ == Embedded
|
||||||
|
assert doc.embedded_field.string_field == "hello"
|
||||||
|
assert doc.embedded_field.int_field == 1
|
||||||
|
assert doc.embedded_field.dict_field == {"hello": "world"}
|
||||||
|
assert doc.embedded_field.list_field == ["1", 2, {"hello": "world"}]
|
||||||
|
|
||||||
|
def test_complex_embedded_documents(self):
|
||||||
|
"""Test complex dynamic embedded documents setups"""
|
||||||
|
|
||||||
|
class Embedded(DynamicEmbeddedDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Doc(DynamicDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
|
||||||
|
embedded_1 = Embedded()
|
||||||
|
embedded_1.string_field = "hello"
|
||||||
|
embedded_1.int_field = 1
|
||||||
|
embedded_1.dict_field = {"hello": "world"}
|
||||||
|
|
||||||
|
embedded_2 = Embedded()
|
||||||
|
embedded_2.string_field = "hello"
|
||||||
|
embedded_2.int_field = 1
|
||||||
|
embedded_2.dict_field = {"hello": "world"}
|
||||||
|
embedded_2.list_field = ["1", 2, {"hello": "world"}]
|
||||||
|
|
||||||
|
embedded_1.list_field = ["1", 2, embedded_2]
|
||||||
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
|
assert doc.to_mongo() == {
|
||||||
|
"embedded_field": {
|
||||||
|
"_cls": "Embedded",
|
||||||
|
"string_field": "hello",
|
||||||
|
"int_field": 1,
|
||||||
|
"dict_field": {"hello": "world"},
|
||||||
|
"list_field": [
|
||||||
|
"1",
|
||||||
|
2,
|
||||||
|
{
|
||||||
|
"_cls": "Embedded",
|
||||||
|
"string_field": "hello",
|
||||||
|
"int_field": 1,
|
||||||
|
"dict_field": {"hello": "world"},
|
||||||
|
"list_field": ["1", 2, {"hello": "world"}],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
doc.save()
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
assert doc.embedded_field.__class__ == Embedded
|
||||||
|
assert doc.embedded_field.string_field == "hello"
|
||||||
|
assert doc.embedded_field.int_field == 1
|
||||||
|
assert doc.embedded_field.dict_field == {"hello": "world"}
|
||||||
|
assert doc.embedded_field.list_field[0] == "1"
|
||||||
|
assert doc.embedded_field.list_field[1] == 2
|
||||||
|
|
||||||
|
embedded_field = doc.embedded_field.list_field[2]
|
||||||
|
|
||||||
|
assert embedded_field.__class__ == Embedded
|
||||||
|
assert embedded_field.string_field == "hello"
|
||||||
|
assert embedded_field.int_field == 1
|
||||||
|
assert embedded_field.dict_field == {"hello": "world"}
|
||||||
|
assert embedded_field.list_field == ["1", 2, {"hello": "world"}]
|
||||||
|
|
||||||
|
def test_dynamic_and_embedded(self):
|
||||||
|
"""Ensure embedded documents play nicely"""
|
||||||
|
|
||||||
|
class Address(EmbeddedDocument):
|
||||||
|
city = StringField()
|
||||||
|
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
Person(name="Ross", address=Address(city="London")).save()
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.address.city = "Lundenne"
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
assert Person.objects.first().address.city == "Lundenne"
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.address = Address(city="Londinium")
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
assert Person.objects.first().address.city == "Londinium"
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.age = 35
|
||||||
|
person.save()
|
||||||
|
assert Person.objects.first().age == 35
|
||||||
|
|
||||||
|
def test_dynamic_embedded_works_with_only(self):
|
||||||
|
"""Ensure custom fieldnames on a dynamic embedded document are found by qs.only()"""
|
||||||
|
|
||||||
|
class Address(DynamicEmbeddedDocument):
|
||||||
|
city = StringField()
|
||||||
|
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
address = EmbeddedDocumentField(Address)
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
Person(
|
||||||
|
name="Eric", address=Address(city="San Francisco", street_number="1337")
|
||||||
|
).save()
|
||||||
|
|
||||||
|
assert Person.objects.first().address.street_number == "1337"
|
||||||
|
assert (
|
||||||
|
Person.objects.only("address__street_number").first().address.street_number
|
||||||
|
== "1337"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_dynamic_and_embedded_dict_access(self):
|
||||||
|
"""Ensure embedded dynamic documents work with dict[] style access"""
|
||||||
|
|
||||||
|
class Address(EmbeddedDocument):
|
||||||
|
city = StringField()
|
||||||
|
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
Person(name="Ross", address=Address(city="London")).save()
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.attrval = "This works"
|
||||||
|
|
||||||
|
person["phone"] = "555-1212" # but this should too
|
||||||
|
|
||||||
|
# Same thing two levels deep
|
||||||
|
person["address"]["city"] = "Lundenne"
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
assert Person.objects.first().address.city == "Lundenne"
|
||||||
|
|
||||||
|
assert Person.objects.first().phone == "555-1212"
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.address = Address(city="Londinium")
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
assert Person.objects.first().address.city == "Londinium"
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person["age"] = 35
|
||||||
|
person.save()
|
||||||
|
assert Person.objects.first().age == 35
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
1081
tests/document/test_indexes.py
Normal file
1081
tests/document/test_indexes.py
Normal file
File diff suppressed because it is too large
Load Diff
630
tests/document/test_inheritance.py
Normal file
630
tests/document/test_inheritance.py
Normal file
@@ -0,0 +1,630 @@
|
|||||||
|
import unittest
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from mongoengine import (
|
||||||
|
BooleanField,
|
||||||
|
Document,
|
||||||
|
EmbeddedDocument,
|
||||||
|
EmbeddedDocumentField,
|
||||||
|
GenericReferenceField,
|
||||||
|
IntField,
|
||||||
|
ReferenceField,
|
||||||
|
StringField,
|
||||||
|
)
|
||||||
|
from mongoengine.pymongo_support import list_collection_names
|
||||||
|
from tests.fixtures import Base
|
||||||
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
|
class TestInheritance(MongoDBTestCase):
|
||||||
|
def tearDown(self):
|
||||||
|
for collection in list_collection_names(self.db):
|
||||||
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
|
def test_constructor_cls(self):
|
||||||
|
# Ensures _cls is properly set during construction
|
||||||
|
# and when object gets reloaded (prevent regression of #1950)
|
||||||
|
class EmbedData(EmbeddedDocument):
|
||||||
|
data = StringField()
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
class DataDoc(Document):
|
||||||
|
name = StringField()
|
||||||
|
embed = EmbeddedDocumentField(EmbedData)
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
test_doc = DataDoc(name="test", embed=EmbedData(data="data"))
|
||||||
|
assert test_doc._cls == "DataDoc"
|
||||||
|
assert test_doc.embed._cls == "EmbedData"
|
||||||
|
test_doc.save()
|
||||||
|
saved_doc = DataDoc.objects.with_id(test_doc.id)
|
||||||
|
assert test_doc._cls == saved_doc._cls
|
||||||
|
assert test_doc.embed._cls == saved_doc.embed._cls
|
||||||
|
test_doc.delete()
|
||||||
|
|
||||||
|
def test_superclasses(self):
|
||||||
|
"""Ensure that the correct list of superclasses is assembled.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
class Fish(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Guppy(Fish):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Mammal(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Dog(Mammal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Human(Mammal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert Animal._superclasses == ()
|
||||||
|
assert Fish._superclasses == ("Animal",)
|
||||||
|
assert Guppy._superclasses == ("Animal", "Animal.Fish")
|
||||||
|
assert Mammal._superclasses == ("Animal",)
|
||||||
|
assert Dog._superclasses == ("Animal", "Animal.Mammal")
|
||||||
|
assert Human._superclasses == ("Animal", "Animal.Mammal")
|
||||||
|
|
||||||
|
def test_external_superclasses(self):
|
||||||
|
"""Ensure that the correct list of super classes is assembled when
|
||||||
|
importing part of the model.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Animal(Base):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Fish(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Guppy(Fish):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Mammal(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Dog(Mammal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Human(Mammal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert Animal._superclasses == ("Base",)
|
||||||
|
assert Fish._superclasses == ("Base", "Base.Animal")
|
||||||
|
assert Guppy._superclasses == ("Base", "Base.Animal", "Base.Animal.Fish")
|
||||||
|
assert Mammal._superclasses == ("Base", "Base.Animal")
|
||||||
|
assert Dog._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal")
|
||||||
|
assert Human._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal")
|
||||||
|
|
||||||
|
def test_subclasses(self):
|
||||||
|
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||||
|
assembled.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
class Fish(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Guppy(Fish):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Mammal(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Dog(Mammal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Human(Mammal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert Animal._subclasses == (
|
||||||
|
"Animal",
|
||||||
|
"Animal.Fish",
|
||||||
|
"Animal.Fish.Guppy",
|
||||||
|
"Animal.Mammal",
|
||||||
|
"Animal.Mammal.Dog",
|
||||||
|
"Animal.Mammal.Human",
|
||||||
|
)
|
||||||
|
assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Guppy")
|
||||||
|
assert Guppy._subclasses == ("Animal.Fish.Guppy",)
|
||||||
|
assert Mammal._subclasses == (
|
||||||
|
"Animal.Mammal",
|
||||||
|
"Animal.Mammal.Dog",
|
||||||
|
"Animal.Mammal.Human",
|
||||||
|
)
|
||||||
|
assert Human._subclasses == ("Animal.Mammal.Human",)
|
||||||
|
|
||||||
|
def test_external_subclasses(self):
|
||||||
|
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||||
|
assembled when importing part of the model.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Animal(Base):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Fish(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Guppy(Fish):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Mammal(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Dog(Mammal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Human(Mammal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert Animal._subclasses == (
|
||||||
|
"Base.Animal",
|
||||||
|
"Base.Animal.Fish",
|
||||||
|
"Base.Animal.Fish.Guppy",
|
||||||
|
"Base.Animal.Mammal",
|
||||||
|
"Base.Animal.Mammal.Dog",
|
||||||
|
"Base.Animal.Mammal.Human",
|
||||||
|
)
|
||||||
|
assert Fish._subclasses == ("Base.Animal.Fish", "Base.Animal.Fish.Guppy")
|
||||||
|
assert Guppy._subclasses == ("Base.Animal.Fish.Guppy",)
|
||||||
|
assert Mammal._subclasses == (
|
||||||
|
"Base.Animal.Mammal",
|
||||||
|
"Base.Animal.Mammal.Dog",
|
||||||
|
"Base.Animal.Mammal.Human",
|
||||||
|
)
|
||||||
|
assert Human._subclasses == ("Base.Animal.Mammal.Human",)
|
||||||
|
|
||||||
|
def test_dynamic_declarations(self):
|
||||||
|
"""Test that declaring an extra class updates meta data"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
assert Animal._superclasses == ()
|
||||||
|
assert Animal._subclasses == ("Animal",)
|
||||||
|
|
||||||
|
# Test dynamically adding a class changes the meta data
|
||||||
|
class Fish(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert Animal._superclasses == ()
|
||||||
|
assert Animal._subclasses == ("Animal", "Animal.Fish")
|
||||||
|
|
||||||
|
assert Fish._superclasses == ("Animal",)
|
||||||
|
assert Fish._subclasses == ("Animal.Fish",)
|
||||||
|
|
||||||
|
# Test dynamically adding an inherited class changes the meta data
|
||||||
|
class Pike(Fish):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert Animal._superclasses == ()
|
||||||
|
assert Animal._subclasses == ("Animal", "Animal.Fish", "Animal.Fish.Pike")
|
||||||
|
|
||||||
|
assert Fish._superclasses == ("Animal",)
|
||||||
|
assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Pike")
|
||||||
|
|
||||||
|
assert Pike._superclasses == ("Animal", "Animal.Fish")
|
||||||
|
assert Pike._subclasses == ("Animal.Fish.Pike",)
|
||||||
|
|
||||||
|
def test_inheritance_meta_data(self):
|
||||||
|
"""Ensure that document may inherit fields from a superclass document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
class Employee(Person):
|
||||||
|
salary = IntField()
|
||||||
|
|
||||||
|
assert ["_cls", "age", "id", "name", "salary"] == sorted(
|
||||||
|
Employee._fields.keys()
|
||||||
|
)
|
||||||
|
assert Employee._get_collection_name() == Person._get_collection_name()
|
||||||
|
|
||||||
|
def test_inheritance_to_mongo_keys(self):
|
||||||
|
"""Ensure that document may inherit fields from a superclass document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
class Employee(Person):
|
||||||
|
salary = IntField()
|
||||||
|
|
||||||
|
assert ["_cls", "age", "id", "name", "salary"] == sorted(
|
||||||
|
Employee._fields.keys()
|
||||||
|
)
|
||||||
|
assert Person(name="Bob", age=35).to_mongo().keys() == ["_cls", "name", "age"]
|
||||||
|
assert Employee(name="Bob", age=35, salary=0).to_mongo().keys() == [
|
||||||
|
"_cls",
|
||||||
|
"name",
|
||||||
|
"age",
|
||||||
|
"salary",
|
||||||
|
]
|
||||||
|
assert Employee._get_collection_name() == Person._get_collection_name()
|
||||||
|
|
||||||
|
def test_indexes_and_multiple_inheritance(self):
|
||||||
|
""" Ensure that all of the indexes are created for a document with
|
||||||
|
multiple inheritance.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class A(Document):
|
||||||
|
a = StringField()
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True, "indexes": ["a"]}
|
||||||
|
|
||||||
|
class B(Document):
|
||||||
|
b = StringField()
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True, "indexes": ["b"]}
|
||||||
|
|
||||||
|
class C(A, B):
|
||||||
|
pass
|
||||||
|
|
||||||
|
A.drop_collection()
|
||||||
|
B.drop_collection()
|
||||||
|
C.drop_collection()
|
||||||
|
|
||||||
|
C.ensure_indexes()
|
||||||
|
|
||||||
|
assert sorted(
|
||||||
|
[idx["key"] for idx in C._get_collection().index_information().values()]
|
||||||
|
) == sorted(
|
||||||
|
[[(u"_cls", 1), (u"b", 1)], [(u"_id", 1)], [(u"_cls", 1), (u"a", 1)]]
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_polymorphic_queries(self):
|
||||||
|
"""Ensure that the correct subclasses are returned from a query
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
class Fish(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Mammal(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Dog(Mammal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Human(Mammal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Animal.drop_collection()
|
||||||
|
|
||||||
|
Animal().save()
|
||||||
|
Fish().save()
|
||||||
|
Mammal().save()
|
||||||
|
Dog().save()
|
||||||
|
Human().save()
|
||||||
|
|
||||||
|
classes = [obj.__class__ for obj in Animal.objects]
|
||||||
|
assert classes == [Animal, Fish, Mammal, Dog, Human]
|
||||||
|
|
||||||
|
classes = [obj.__class__ for obj in Mammal.objects]
|
||||||
|
assert classes == [Mammal, Dog, Human]
|
||||||
|
|
||||||
|
classes = [obj.__class__ for obj in Human.objects]
|
||||||
|
assert classes == [Human]
|
||||||
|
|
||||||
|
def test_allow_inheritance(self):
|
||||||
|
"""Ensure that inheritance is disabled by default on simple
|
||||||
|
classes and that _cls will not be used.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
# can't inherit because Animal didn't explicitly allow inheritance
|
||||||
|
with pytest.raises(ValueError, match="Document Animal may not be subclassed"):
|
||||||
|
|
||||||
|
class Dog(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Check that _cls etc aren't present on simple documents
|
||||||
|
dog = Animal(name="dog").save()
|
||||||
|
assert dog.to_mongo().keys() == ["_id", "name"]
|
||||||
|
|
||||||
|
collection = self.db[Animal._get_collection_name()]
|
||||||
|
obj = collection.find_one()
|
||||||
|
assert "_cls" not in obj
|
||||||
|
|
||||||
|
def test_cant_turn_off_inheritance_on_subclass(self):
|
||||||
|
"""Ensure if inheritance is on in a subclass you cant turn it off.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
with pytest.raises(ValueError) as exc_info:
|
||||||
|
|
||||||
|
class Mammal(Animal):
|
||||||
|
meta = {"allow_inheritance": False}
|
||||||
|
|
||||||
|
assert (
|
||||||
|
str(exc_info.value)
|
||||||
|
== 'Only direct subclasses of Document may set "allow_inheritance" to False'
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_allow_inheritance_abstract_document(self):
|
||||||
|
"""Ensure that abstract documents can set inheritance rules and that
|
||||||
|
_cls will not be used.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class FinalDocument(Document):
|
||||||
|
meta = {"abstract": True, "allow_inheritance": False}
|
||||||
|
|
||||||
|
class Animal(FinalDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
|
||||||
|
class Mammal(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Check that _cls isn't present in simple documents
|
||||||
|
doc = Animal(name="dog")
|
||||||
|
assert "_cls" not in doc.to_mongo()
|
||||||
|
|
||||||
|
def test_using_abstract_class_in_reference_field(self):
|
||||||
|
# Ensures no regression of #1920
|
||||||
|
class AbstractHuman(Document):
|
||||||
|
meta = {"abstract": True}
|
||||||
|
|
||||||
|
class Dad(AbstractHuman):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Home(Document):
|
||||||
|
dad = ReferenceField(AbstractHuman) # Referencing the abstract class
|
||||||
|
address = StringField()
|
||||||
|
|
||||||
|
dad = Dad(name="5").save()
|
||||||
|
Home(dad=dad, address="street").save()
|
||||||
|
|
||||||
|
home = Home.objects.first()
|
||||||
|
home.address = "garbage"
|
||||||
|
home.save() # Was failing with ValidationError
|
||||||
|
|
||||||
|
def test_abstract_class_referencing_self(self):
|
||||||
|
# Ensures no regression of #1920
|
||||||
|
class Human(Document):
|
||||||
|
meta = {"abstract": True}
|
||||||
|
creator = ReferenceField("self", dbref=True)
|
||||||
|
|
||||||
|
class User(Human):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
user = User(name="John").save()
|
||||||
|
user2 = User(name="Foo", creator=user).save()
|
||||||
|
|
||||||
|
user2 = User.objects.with_id(user2.id)
|
||||||
|
user2.name = "Bar"
|
||||||
|
user2.save() # Was failing with ValidationError
|
||||||
|
|
||||||
|
def test_abstract_handle_ids_in_metaclass_properly(self):
|
||||||
|
class City(Document):
|
||||||
|
continent = StringField()
|
||||||
|
meta = {"abstract": True, "allow_inheritance": False}
|
||||||
|
|
||||||
|
class EuropeanCity(City):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
||||||
|
assert len(berlin._db_field_map) == len(berlin._fields_ordered)
|
||||||
|
assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered)
|
||||||
|
assert len(berlin._fields_ordered) == 3
|
||||||
|
assert berlin._fields_ordered[0] == "id"
|
||||||
|
|
||||||
|
def test_auto_id_not_set_if_specific_in_parent_class(self):
|
||||||
|
class City(Document):
|
||||||
|
continent = StringField()
|
||||||
|
city_id = IntField(primary_key=True)
|
||||||
|
meta = {"abstract": True, "allow_inheritance": False}
|
||||||
|
|
||||||
|
class EuropeanCity(City):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
||||||
|
assert len(berlin._db_field_map) == len(berlin._fields_ordered)
|
||||||
|
assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered)
|
||||||
|
assert len(berlin._fields_ordered) == 3
|
||||||
|
assert berlin._fields_ordered[0] == "city_id"
|
||||||
|
|
||||||
|
def test_auto_id_vs_non_pk_id_field(self):
|
||||||
|
class City(Document):
|
||||||
|
continent = StringField()
|
||||||
|
id = IntField()
|
||||||
|
meta = {"abstract": True, "allow_inheritance": False}
|
||||||
|
|
||||||
|
class EuropeanCity(City):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
||||||
|
assert len(berlin._db_field_map) == len(berlin._fields_ordered)
|
||||||
|
assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered)
|
||||||
|
assert len(berlin._fields_ordered) == 4
|
||||||
|
assert berlin._fields_ordered[0] == "auto_id_0"
|
||||||
|
berlin.save()
|
||||||
|
assert berlin.pk == berlin.auto_id_0
|
||||||
|
|
||||||
|
def test_abstract_document_creation_does_not_fail(self):
|
||||||
|
class City(Document):
|
||||||
|
continent = StringField()
|
||||||
|
meta = {"abstract": True, "allow_inheritance": False}
|
||||||
|
|
||||||
|
city = City(continent="asia")
|
||||||
|
assert city.pk is None
|
||||||
|
# TODO: expected error? Shouldn't we create a new error type?
|
||||||
|
with pytest.raises(KeyError):
|
||||||
|
setattr(city, "pk", 1)
|
||||||
|
|
||||||
|
def test_allow_inheritance_embedded_document(self):
|
||||||
|
"""Ensure embedded documents respect inheritance."""
|
||||||
|
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
content = StringField()
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
|
||||||
|
class SpecialComment(Comment):
|
||||||
|
pass
|
||||||
|
|
||||||
|
doc = Comment(content="test")
|
||||||
|
assert "_cls" not in doc.to_mongo()
|
||||||
|
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
content = StringField()
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
doc = Comment(content="test")
|
||||||
|
assert "_cls" in doc.to_mongo()
|
||||||
|
|
||||||
|
def test_document_inheritance(self):
|
||||||
|
"""Ensure mutliple inheritance of abstract documents
|
||||||
|
"""
|
||||||
|
|
||||||
|
class DateCreatedDocument(Document):
|
||||||
|
meta = {"allow_inheritance": True, "abstract": True}
|
||||||
|
|
||||||
|
class DateUpdatedDocument(Document):
|
||||||
|
meta = {"allow_inheritance": True, "abstract": True}
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
class MyDocument(DateCreatedDocument, DateUpdatedDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
assert False, "Couldn't create MyDocument class"
|
||||||
|
|
||||||
|
def test_abstract_documents(self):
|
||||||
|
"""Ensure that a document superclass can be marked as abstract
|
||||||
|
thereby not using it as the name for the collection."""
|
||||||
|
|
||||||
|
defaults = {
|
||||||
|
"index_background": True,
|
||||||
|
"index_opts": {"hello": "world"},
|
||||||
|
"allow_inheritance": True,
|
||||||
|
"queryset_class": "QuerySet",
|
||||||
|
"db_alias": "myDB",
|
||||||
|
"shard_key": ("hello", "world"),
|
||||||
|
}
|
||||||
|
|
||||||
|
meta_settings = {"abstract": True}
|
||||||
|
meta_settings.update(defaults)
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = meta_settings
|
||||||
|
|
||||||
|
class Fish(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Guppy(Fish):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Mammal(Animal):
|
||||||
|
meta = {"abstract": True}
|
||||||
|
|
||||||
|
class Human(Mammal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
for k, v in defaults.items():
|
||||||
|
for cls in [Animal, Fish, Guppy]:
|
||||||
|
assert cls._meta[k] == v
|
||||||
|
|
||||||
|
assert "collection" not in Animal._meta
|
||||||
|
assert "collection" not in Mammal._meta
|
||||||
|
|
||||||
|
assert Animal._get_collection_name() is None
|
||||||
|
assert Mammal._get_collection_name() is None
|
||||||
|
|
||||||
|
assert Fish._get_collection_name() == "fish"
|
||||||
|
assert Guppy._get_collection_name() == "fish"
|
||||||
|
assert Human._get_collection_name() == "human"
|
||||||
|
|
||||||
|
# ensure that a subclass of a non-abstract class can't be abstract
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
|
||||||
|
class EvilHuman(Human):
|
||||||
|
evil = BooleanField(default=True)
|
||||||
|
meta = {"abstract": True}
|
||||||
|
|
||||||
|
def test_abstract_embedded_documents(self):
|
||||||
|
# 789: EmbeddedDocument shouldn't inherit abstract
|
||||||
|
class A(EmbeddedDocument):
|
||||||
|
meta = {"abstract": True}
|
||||||
|
|
||||||
|
class B(A):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert not B._meta["abstract"]
|
||||||
|
|
||||||
|
def test_inherited_collections(self):
|
||||||
|
"""Ensure that subclassed documents don't override parents'
|
||||||
|
collections
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Drink(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
class Drinker(Document):
|
||||||
|
drink = GenericReferenceField()
|
||||||
|
|
||||||
|
try:
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
|
class AcloholicDrink(Drink):
|
||||||
|
meta = {"collection": "booze"}
|
||||||
|
|
||||||
|
except SyntaxWarning:
|
||||||
|
warnings.simplefilter("ignore")
|
||||||
|
|
||||||
|
class AlcoholicDrink(Drink):
|
||||||
|
meta = {"collection": "booze"}
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise AssertionError("SyntaxWarning should be triggered")
|
||||||
|
|
||||||
|
warnings.resetwarnings()
|
||||||
|
|
||||||
|
Drink.drop_collection()
|
||||||
|
AlcoholicDrink.drop_collection()
|
||||||
|
Drinker.drop_collection()
|
||||||
|
|
||||||
|
red_bull = Drink(name="Red Bull")
|
||||||
|
red_bull.save()
|
||||||
|
|
||||||
|
programmer = Drinker(drink=red_bull)
|
||||||
|
programmer.save()
|
||||||
|
|
||||||
|
beer = AlcoholicDrink(name="Beer")
|
||||||
|
beer.save()
|
||||||
|
real_person = Drinker(drink=beer)
|
||||||
|
real_person.save()
|
||||||
|
|
||||||
|
assert Drinker.objects[0].drink.name == red_bull.name
|
||||||
|
assert Drinker.objects[1].drink.name == beer.name
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
3916
tests/document/test_instance.py
Normal file
3916
tests/document/test_instance.py
Normal file
File diff suppressed because it is too large
Load Diff
106
tests/document/test_json_serialisation.py
Normal file
106
tests/document/test_json_serialisation.py
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
import unittest
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from bson import ObjectId
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
|
class TestJson(MongoDBTestCase):
|
||||||
|
def test_json_names(self):
|
||||||
|
"""
|
||||||
|
Going to test reported issue:
|
||||||
|
https://github.com/MongoEngine/mongoengine/issues/654
|
||||||
|
where the reporter asks for the availability to perform
|
||||||
|
a to_json with the original class names and not the abreviated
|
||||||
|
mongodb document keys
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Embedded(EmbeddedDocument):
|
||||||
|
string = StringField(db_field="s")
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
string = StringField(db_field="s")
|
||||||
|
embedded = EmbeddedDocumentField(Embedded, db_field="e")
|
||||||
|
|
||||||
|
doc = Doc(string="Hello", embedded=Embedded(string="Inner Hello"))
|
||||||
|
doc_json = doc.to_json(
|
||||||
|
sort_keys=True, use_db_field=False, separators=(",", ":")
|
||||||
|
)
|
||||||
|
|
||||||
|
expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}"""
|
||||||
|
|
||||||
|
assert doc_json == expected_json
|
||||||
|
|
||||||
|
def test_json_simple(self):
|
||||||
|
class Embedded(EmbeddedDocument):
|
||||||
|
string = StringField()
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
string = StringField()
|
||||||
|
embedded_field = EmbeddedDocumentField(Embedded)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return (
|
||||||
|
self.string == other.string
|
||||||
|
and self.embedded_field == other.embedded_field
|
||||||
|
)
|
||||||
|
|
||||||
|
doc = Doc(string="Hi", embedded_field=Embedded(string="Hi"))
|
||||||
|
|
||||||
|
doc_json = doc.to_json(sort_keys=True, separators=(",", ":"))
|
||||||
|
expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}"""
|
||||||
|
assert doc_json == expected_json
|
||||||
|
|
||||||
|
assert doc == Doc.from_json(doc.to_json())
|
||||||
|
|
||||||
|
def test_json_complex(self):
|
||||||
|
class EmbeddedDoc(EmbeddedDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Simple(Document):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
string_field = StringField(default="1")
|
||||||
|
int_field = IntField(default=1)
|
||||||
|
float_field = FloatField(default=1.1)
|
||||||
|
boolean_field = BooleanField(default=True)
|
||||||
|
datetime_field = DateTimeField(default=datetime.now)
|
||||||
|
embedded_document_field = EmbeddedDocumentField(
|
||||||
|
EmbeddedDoc, default=lambda: EmbeddedDoc()
|
||||||
|
)
|
||||||
|
list_field = ListField(default=lambda: [1, 2, 3])
|
||||||
|
dict_field = DictField(default=lambda: {"hello": "world"})
|
||||||
|
objectid_field = ObjectIdField(default=ObjectId)
|
||||||
|
reference_field = ReferenceField(Simple, default=lambda: Simple().save())
|
||||||
|
map_field = MapField(IntField(), default=lambda: {"simple": 1})
|
||||||
|
decimal_field = DecimalField(default=1.0)
|
||||||
|
complex_datetime_field = ComplexDateTimeField(default=datetime.now)
|
||||||
|
url_field = URLField(default="http://mongoengine.org")
|
||||||
|
dynamic_field = DynamicField(default=1)
|
||||||
|
generic_reference_field = GenericReferenceField(
|
||||||
|
default=lambda: Simple().save()
|
||||||
|
)
|
||||||
|
sorted_list_field = SortedListField(IntField(), default=lambda: [1, 2, 3])
|
||||||
|
email_field = EmailField(default="ross@example.com")
|
||||||
|
geo_point_field = GeoPointField(default=lambda: [1, 2])
|
||||||
|
sequence_field = SequenceField()
|
||||||
|
uuid_field = UUIDField(default=uuid.uuid4)
|
||||||
|
generic_embedded_document_field = GenericEmbeddedDocumentField(
|
||||||
|
default=lambda: EmbeddedDoc()
|
||||||
|
)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
import json
|
||||||
|
|
||||||
|
return json.loads(self.to_json()) == json.loads(other.to_json())
|
||||||
|
|
||||||
|
doc = Doc()
|
||||||
|
assert doc == Doc.from_json(doc.to_json())
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
222
tests/document/test_validation.py
Normal file
222
tests/document/test_validation.py
Normal file
@@ -0,0 +1,222 @@
|
|||||||
|
import unittest
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from tests.utils import MongoDBTestCase
|
||||||
|
|
||||||
|
|
||||||
|
class TestValidatorError(MongoDBTestCase):
|
||||||
|
def test_to_dict(self):
|
||||||
|
"""Ensure a ValidationError handles error to_dict correctly.
|
||||||
|
"""
|
||||||
|
error = ValidationError("root")
|
||||||
|
assert error.to_dict() == {}
|
||||||
|
|
||||||
|
# 1st level error schema
|
||||||
|
error.errors = {"1st": ValidationError("bad 1st")}
|
||||||
|
assert "1st" in error.to_dict()
|
||||||
|
assert error.to_dict()["1st"] == "bad 1st"
|
||||||
|
|
||||||
|
# 2nd level error schema
|
||||||
|
error.errors = {
|
||||||
|
"1st": ValidationError(
|
||||||
|
"bad 1st", errors={"2nd": ValidationError("bad 2nd")}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
assert "1st" in error.to_dict()
|
||||||
|
assert isinstance(error.to_dict()["1st"], dict)
|
||||||
|
assert "2nd" in error.to_dict()["1st"]
|
||||||
|
assert error.to_dict()["1st"]["2nd"] == "bad 2nd"
|
||||||
|
|
||||||
|
# moar levels
|
||||||
|
error.errors = {
|
||||||
|
"1st": ValidationError(
|
||||||
|
"bad 1st",
|
||||||
|
errors={
|
||||||
|
"2nd": ValidationError(
|
||||||
|
"bad 2nd",
|
||||||
|
errors={
|
||||||
|
"3rd": ValidationError(
|
||||||
|
"bad 3rd", errors={"4th": ValidationError("Inception")}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
assert "1st" in error.to_dict()
|
||||||
|
assert "2nd" in error.to_dict()["1st"]
|
||||||
|
assert "3rd" in error.to_dict()["1st"]["2nd"]
|
||||||
|
assert "4th" in error.to_dict()["1st"]["2nd"]["3rd"]
|
||||||
|
assert error.to_dict()["1st"]["2nd"]["3rd"]["4th"] == "Inception"
|
||||||
|
|
||||||
|
assert error.message == "root(2nd.3rd.4th.Inception: ['1st'])"
|
||||||
|
|
||||||
|
def test_model_validation(self):
|
||||||
|
class User(Document):
|
||||||
|
username = StringField(primary_key=True)
|
||||||
|
name = StringField(required=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
User().validate()
|
||||||
|
except ValidationError as e:
|
||||||
|
assert "User:None" in e.message
|
||||||
|
assert e.to_dict() == {
|
||||||
|
"username": "Field is required",
|
||||||
|
"name": "Field is required",
|
||||||
|
}
|
||||||
|
|
||||||
|
user = User(username="RossC0", name="Ross").save()
|
||||||
|
user.name = None
|
||||||
|
try:
|
||||||
|
user.save()
|
||||||
|
except ValidationError as e:
|
||||||
|
assert "User:RossC0" in e.message
|
||||||
|
assert e.to_dict() == {"name": "Field is required"}
|
||||||
|
|
||||||
|
def test_fields_rewrite(self):
|
||||||
|
class BasePerson(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
meta = {"abstract": True}
|
||||||
|
|
||||||
|
class Person(BasePerson):
|
||||||
|
name = StringField(required=True)
|
||||||
|
|
||||||
|
p = Person(age=15)
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
p.validate()
|
||||||
|
|
||||||
|
def test_embedded_document_validation(self):
|
||||||
|
"""Ensure that embedded documents may be validated.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
date = DateTimeField()
|
||||||
|
content = StringField(required=True)
|
||||||
|
|
||||||
|
comment = Comment()
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
comment.validate()
|
||||||
|
|
||||||
|
comment.content = "test"
|
||||||
|
comment.validate()
|
||||||
|
|
||||||
|
comment.date = 4
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
comment.validate()
|
||||||
|
|
||||||
|
comment.date = datetime.now()
|
||||||
|
comment.validate()
|
||||||
|
assert comment._instance is None
|
||||||
|
|
||||||
|
def test_embedded_db_field_validate(self):
|
||||||
|
class SubDoc(EmbeddedDocument):
|
||||||
|
val = IntField(required=True)
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
id = StringField(primary_key=True)
|
||||||
|
e = EmbeddedDocumentField(SubDoc, db_field="eb")
|
||||||
|
|
||||||
|
try:
|
||||||
|
Doc(id="bad").validate()
|
||||||
|
except ValidationError as e:
|
||||||
|
assert "SubDoc:None" in e.message
|
||||||
|
assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}}
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
|
||||||
|
Doc(id="test", e=SubDoc(val=15)).save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
keys = doc._data.keys()
|
||||||
|
assert 2 == len(keys)
|
||||||
|
assert "e" in keys
|
||||||
|
assert "id" in keys
|
||||||
|
|
||||||
|
doc.e.val = "OK"
|
||||||
|
try:
|
||||||
|
doc.save()
|
||||||
|
except ValidationError as e:
|
||||||
|
assert "Doc:test" in e.message
|
||||||
|
assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}}
|
||||||
|
|
||||||
|
def test_embedded_weakref(self):
|
||||||
|
class SubDoc(EmbeddedDocument):
|
||||||
|
val = IntField(required=True)
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
e = EmbeddedDocumentField(SubDoc, db_field="eb")
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
|
||||||
|
d1 = Doc()
|
||||||
|
d2 = Doc()
|
||||||
|
|
||||||
|
s = SubDoc()
|
||||||
|
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
s.validate()
|
||||||
|
|
||||||
|
d1.e = s
|
||||||
|
d2.e = s
|
||||||
|
|
||||||
|
del d1
|
||||||
|
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
d2.validate()
|
||||||
|
|
||||||
|
def test_parent_reference_in_child_document(self):
|
||||||
|
"""
|
||||||
|
Test to ensure a ReferenceField can store a reference to a parent
|
||||||
|
class when inherited. Issue #954.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Parent(Document):
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
reference = ReferenceField("self")
|
||||||
|
|
||||||
|
class Child(Parent):
|
||||||
|
pass
|
||||||
|
|
||||||
|
parent = Parent()
|
||||||
|
parent.save()
|
||||||
|
|
||||||
|
child = Child(reference=parent)
|
||||||
|
|
||||||
|
# Saving child should not raise a ValidationError
|
||||||
|
try:
|
||||||
|
child.save()
|
||||||
|
except ValidationError as e:
|
||||||
|
self.fail("ValidationError raised: %s" % e.message)
|
||||||
|
|
||||||
|
def test_parent_reference_set_as_attribute_in_child_document(self):
|
||||||
|
"""
|
||||||
|
Test to ensure a ReferenceField can store a reference to a parent
|
||||||
|
class when inherited and when set via attribute. Issue #954.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Parent(Document):
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
reference = ReferenceField("self")
|
||||||
|
|
||||||
|
class Child(Parent):
|
||||||
|
pass
|
||||||
|
|
||||||
|
parent = Parent()
|
||||||
|
parent.save()
|
||||||
|
|
||||||
|
child = Child()
|
||||||
|
child.reference = parent
|
||||||
|
|
||||||
|
# Saving the child should not raise a ValidationError
|
||||||
|
try:
|
||||||
|
child.save()
|
||||||
|
except ValidationError as e:
|
||||||
|
self.fail("ValidationError raised: %s" % e.message)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
@@ -1,146 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
import sys
|
|
||||||
sys.path[0:0] = [""]
|
|
||||||
|
|
||||||
import unittest
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
|
|
||||||
__all__ = ("ValidatorErrorTest",)
|
|
||||||
|
|
||||||
|
|
||||||
class ValidatorErrorTest(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
|
|
||||||
def test_to_dict(self):
|
|
||||||
"""Ensure a ValidationError handles error to_dict correctly.
|
|
||||||
"""
|
|
||||||
error = ValidationError('root')
|
|
||||||
self.assertEqual(error.to_dict(), {})
|
|
||||||
|
|
||||||
# 1st level error schema
|
|
||||||
error.errors = {'1st': ValidationError('bad 1st'), }
|
|
||||||
self.assertTrue('1st' in error.to_dict())
|
|
||||||
self.assertEqual(error.to_dict()['1st'], 'bad 1st')
|
|
||||||
|
|
||||||
# 2nd level error schema
|
|
||||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
|
||||||
'2nd': ValidationError('bad 2nd'),
|
|
||||||
})}
|
|
||||||
self.assertTrue('1st' in error.to_dict())
|
|
||||||
self.assertTrue(isinstance(error.to_dict()['1st'], dict))
|
|
||||||
self.assertTrue('2nd' in error.to_dict()['1st'])
|
|
||||||
self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd')
|
|
||||||
|
|
||||||
# moar levels
|
|
||||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
|
||||||
'2nd': ValidationError('bad 2nd', errors={
|
|
||||||
'3rd': ValidationError('bad 3rd', errors={
|
|
||||||
'4th': ValidationError('Inception'),
|
|
||||||
}),
|
|
||||||
}),
|
|
||||||
})}
|
|
||||||
self.assertTrue('1st' in error.to_dict())
|
|
||||||
self.assertTrue('2nd' in error.to_dict()['1st'])
|
|
||||||
self.assertTrue('3rd' in error.to_dict()['1st']['2nd'])
|
|
||||||
self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd'])
|
|
||||||
self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'],
|
|
||||||
'Inception')
|
|
||||||
|
|
||||||
self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])")
|
|
||||||
|
|
||||||
def test_model_validation(self):
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
username = StringField(primary_key=True)
|
|
||||||
name = StringField(required=True)
|
|
||||||
|
|
||||||
try:
|
|
||||||
User().validate()
|
|
||||||
except ValidationError, e:
|
|
||||||
self.assertTrue("User:None" in e.message)
|
|
||||||
self.assertEqual(e.to_dict(), {
|
|
||||||
'username': 'Field is required',
|
|
||||||
'name': 'Field is required'})
|
|
||||||
|
|
||||||
user = User(username="RossC0", name="Ross").save()
|
|
||||||
user.name = None
|
|
||||||
try:
|
|
||||||
user.save()
|
|
||||||
except ValidationError, e:
|
|
||||||
self.assertTrue("User:RossC0" in e.message)
|
|
||||||
self.assertEqual(e.to_dict(), {
|
|
||||||
'name': 'Field is required'})
|
|
||||||
|
|
||||||
def test_fields_rewrite(self):
|
|
||||||
class BasePerson(Document):
|
|
||||||
name = StringField()
|
|
||||||
age = IntField()
|
|
||||||
meta = {'abstract': True}
|
|
||||||
|
|
||||||
class Person(BasePerson):
|
|
||||||
name = StringField(required=True)
|
|
||||||
|
|
||||||
p = Person(age=15)
|
|
||||||
self.assertRaises(ValidationError, p.validate)
|
|
||||||
|
|
||||||
def test_embedded_document_validation(self):
|
|
||||||
"""Ensure that embedded documents may be validated.
|
|
||||||
"""
|
|
||||||
class Comment(EmbeddedDocument):
|
|
||||||
date = DateTimeField()
|
|
||||||
content = StringField(required=True)
|
|
||||||
|
|
||||||
comment = Comment()
|
|
||||||
self.assertRaises(ValidationError, comment.validate)
|
|
||||||
|
|
||||||
comment.content = 'test'
|
|
||||||
comment.validate()
|
|
||||||
|
|
||||||
comment.date = 4
|
|
||||||
self.assertRaises(ValidationError, comment.validate)
|
|
||||||
|
|
||||||
comment.date = datetime.now()
|
|
||||||
comment.validate()
|
|
||||||
self.assertEqual(comment._instance, None)
|
|
||||||
|
|
||||||
def test_embedded_db_field_validate(self):
|
|
||||||
|
|
||||||
class SubDoc(EmbeddedDocument):
|
|
||||||
val = IntField(required=True)
|
|
||||||
|
|
||||||
class Doc(Document):
|
|
||||||
id = StringField(primary_key=True)
|
|
||||||
e = EmbeddedDocumentField(SubDoc, db_field='eb')
|
|
||||||
|
|
||||||
try:
|
|
||||||
Doc(id="bad").validate()
|
|
||||||
except ValidationError, e:
|
|
||||||
self.assertTrue("SubDoc:None" in e.message)
|
|
||||||
self.assertEqual(e.to_dict(), {
|
|
||||||
"e": {'val': 'OK could not be converted to int'}})
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
|
|
||||||
Doc(id="test", e=SubDoc(val=15)).save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
keys = doc._data.keys()
|
|
||||||
self.assertEqual(2, len(keys))
|
|
||||||
self.assertTrue('e' in keys)
|
|
||||||
self.assertTrue('id' in keys)
|
|
||||||
|
|
||||||
doc.e.val = "OK"
|
|
||||||
try:
|
|
||||||
doc.save()
|
|
||||||
except ValidationError, e:
|
|
||||||
self.assertTrue("Doc:test" in e.message)
|
|
||||||
self.assertEqual(e.to_dict(), {
|
|
||||||
"e": {'val': 'OK could not be converted to int'}})
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
from fields import *
|
|
||||||
from file_tests import *
|
|
||||||
from geo import *
|
|
||||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user