Compare commits
1543 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3ffc9dffc2 | ||
|
|
048c84ab95 | ||
|
|
a7470360d2 | ||
|
|
50f1ca91d4 | ||
|
|
0d37e1cd98 | ||
|
|
9aa77bb3c9 | ||
|
|
fd11244966 | ||
|
|
d060da094f | ||
|
|
306f9c5ffd | ||
|
|
5ef5611682 | ||
|
|
ebdd2d730c | ||
|
|
1ddf8b3159 | ||
|
|
a6bc870815 | ||
|
|
56cd73823e | ||
|
|
6299015039 | ||
|
|
11b7cfb5ff | ||
|
|
367f49ce1c | ||
|
|
8165131419 | ||
|
|
e402157b4d | ||
|
|
967da7944f | ||
|
|
89f1c21f20 | ||
|
|
7e706190a5 | ||
|
|
36a3770673 | ||
|
|
bc92f78afb | ||
|
|
f7e22d2b8b | ||
|
|
0b1e11ba1f | ||
|
|
10e0b1daec | ||
|
|
731d8fc6be | ||
|
|
f6d0b53ae5 | ||
|
|
0efb90deb6 | ||
|
|
b16eabd2b6 | ||
|
|
f8350409ad | ||
|
|
5b498bd8d6 | ||
|
|
941042d0ba | ||
|
|
9251ce312b | ||
|
|
96a964a183 | ||
|
|
9e513e08ae | ||
|
|
9dfee83e68 | ||
|
|
7cde979736 | ||
|
|
870ff1d4d9 | ||
|
|
52c162a478 | ||
|
|
ddd11c7ed2 | ||
|
|
2c119dea47 | ||
|
|
ebd1561682 | ||
|
|
3ccc495c75 | ||
|
|
0eda7a5a3c | ||
|
|
f2c16452c6 | ||
|
|
a2c429a4a5 | ||
|
|
4a71c5b424 | ||
|
|
268dd80cd0 | ||
|
|
3002e79c98 | ||
|
|
5eab348e82 | ||
|
|
1cdbade761 | ||
|
|
8c9afbd278 | ||
|
|
cd73654683 | ||
|
|
9654fe0d8d | ||
|
|
3d49c33c6a | ||
|
|
e58b3390aa | ||
|
|
92a1f5736b | ||
|
|
00a57f6cea | ||
|
|
1c345edc49 | ||
|
|
7aa1f47378 | ||
|
|
473d5ead7b | ||
|
|
68f760b563 | ||
|
|
9c1cd81adb | ||
|
|
85b81fb12a | ||
|
|
5d7444c115 | ||
|
|
b0c1ec04b5 | ||
|
|
6e2d2f33de | ||
|
|
5e65d27832 | ||
|
|
36993097b4 | ||
|
|
2447349383 | ||
|
|
7765f272ac | ||
|
|
13d8dfdb5f | ||
|
|
5e94637adc | ||
|
|
ac6e793bbe | ||
|
|
d0d9c3ea26 | ||
|
|
f7bc58a767 | ||
|
|
bafdf0381a | ||
|
|
3fc5dc8523 | ||
|
|
df4dc3492c | ||
|
|
10731b0fd8 | ||
|
|
cb9166aba4 | ||
|
|
fe62c3aacb | ||
|
|
c60ea40828 | ||
|
|
c59ea26845 | ||
|
|
9bd8b3e9a5 | ||
|
|
5271f3b4a0 | ||
|
|
8a7b619b77 | ||
|
|
88f96b0838 | ||
|
|
1e1e48732a | ||
|
|
3537897fc5 | ||
|
|
3653981416 | ||
|
|
94d1e566c0 | ||
|
|
a692316293 | ||
|
|
e2f3406e89 | ||
|
|
81c7007f80 | ||
|
|
e4f38b5665 | ||
|
|
14b6c471cf | ||
|
|
0d0befe23e | ||
|
|
efad628a87 | ||
|
|
c16e6d74e6 | ||
|
|
80db9e7716 | ||
|
|
7cf2a3e978 | ||
|
|
681b74a41c | ||
|
|
d39d10b9fb | ||
|
|
dff44ef74e | ||
|
|
485047f20b | ||
|
|
6affbbe865 | ||
|
|
e3600ef4de | ||
|
|
f0eaec98c7 | ||
|
|
6dcd7006d0 | ||
|
|
5de4812477 | ||
|
|
d5b28356bc | ||
|
|
76fddd0db0 | ||
|
|
1108586303 | ||
|
|
3f49923298 | ||
|
|
c277be8b6b | ||
|
|
6e083fa6a1 | ||
|
|
073091a06e | ||
|
|
03bfd01862 | ||
|
|
539f01d08e | ||
|
|
dcf3c86dce | ||
|
|
ec639cd6e9 | ||
|
|
420376d036 | ||
|
|
51e50bf0a9 | ||
|
|
c2d77f51bb | ||
|
|
b4d87d9128 | ||
|
|
4401a309ee | ||
|
|
b562e209d1 | ||
|
|
3a85422e8f | ||
|
|
e45397c975 | ||
|
|
1f9ec0c888 | ||
|
|
f8ee470e70 | ||
|
|
d02de0798f | ||
|
|
6fe074fb13 | ||
|
|
4db339c5f4 | ||
|
|
a525764359 | ||
|
|
f970d5878a | ||
|
|
cc0a2cbc6f | ||
|
|
add0b463f5 | ||
|
|
d80b1a7749 | ||
|
|
6186691259 | ||
|
|
b451cc567d | ||
|
|
757ff31661 | ||
|
|
97a98f0045 | ||
|
|
8f05896bc9 | ||
|
|
da7a8939df | ||
|
|
b6977a88ea | ||
|
|
eafbc7f20d | ||
|
|
c9a5710554 | ||
|
|
f10e946896 | ||
|
|
2f19b22bb2 | ||
|
|
d134e11c6d | ||
|
|
63edd16a92 | ||
|
|
37740dc010 | ||
|
|
04b85ddbf2 | ||
|
|
836dc96f67 | ||
|
|
49a7542b14 | ||
|
|
a84ffce5a0 | ||
|
|
210b3e5192 | ||
|
|
5f1d5ea056 | ||
|
|
19a7372ff9 | ||
|
|
cc5b60b004 | ||
|
|
b06f9dbf8d | ||
|
|
d9b8ee7895 | ||
|
|
e9ff655b0e | ||
|
|
d58341d7ae | ||
|
|
669d21a114 | ||
|
|
7e980a16d0 | ||
|
|
47df8deb58 | ||
|
|
dd006a502e | ||
|
|
07d3e52e6a | ||
|
|
fc1ce6d39b | ||
|
|
32d5c0c946 | ||
|
|
dfabfce01b | ||
|
|
74f3f4eb15 | ||
|
|
20cb0285f0 | ||
|
|
faf840f924 | ||
|
|
165bea5bb9 | ||
|
|
f7515cfca8 | ||
|
|
a762a10dec | ||
|
|
a192029901 | ||
|
|
67182713d9 | ||
|
|
e9464e32db | ||
|
|
2d6ae16912 | ||
|
|
f9cd8b1841 | ||
|
|
41a698b442 | ||
|
|
9f58bc9207 | ||
|
|
d36f6e7f24 | ||
|
|
eeb672feb9 | ||
|
|
063a162ce0 | ||
|
|
3e4a900279 | ||
|
|
43327ea4e1 | ||
|
|
0d2e84b16b | ||
|
|
3c78757778 | ||
|
|
d0245bb5ba | ||
|
|
3477b0107a | ||
|
|
8df9ff90cb | ||
|
|
d6b4ca7a98 | ||
|
|
2e18199eb2 | ||
|
|
025e17701b | ||
|
|
156ca44a13 | ||
|
|
39dac7d4db | ||
|
|
9ca632d518 | ||
|
|
4177fc6df2 | ||
|
|
d90890c08e | ||
|
|
1ca098c402 | ||
|
|
3208a7f15d | ||
|
|
8eda52e8e0 | ||
|
|
5b161b7445 | ||
|
|
8c1f8e54cd | ||
|
|
03d3c26a99 | ||
|
|
0cbd3663e4 | ||
|
|
f182daa85e | ||
|
|
de2f774e85 | ||
|
|
9d9a4afee9 | ||
|
|
0ea363c7fc | ||
|
|
d7ee47ee25 | ||
|
|
eb1b6e34c7 | ||
|
|
621b2b3f72 | ||
|
|
83da08ef7d | ||
|
|
9f551121fb | ||
|
|
ba48dfb4bf | ||
|
|
ed2ea24b75 | ||
|
|
eefbd3f597 | ||
|
|
e38bf63be0 | ||
|
|
e7ba5eb160 | ||
|
|
fff27f9b87 | ||
|
|
d58f594c17 | ||
|
|
9797d7a7fb | ||
|
|
c8b65317ef | ||
|
|
3a6dc77d36 | ||
|
|
4f70c27b56 | ||
|
|
ea46edf50a | ||
|
|
e5e88d792e | ||
|
|
6d68ad735c | ||
|
|
c44b98a7e1 | ||
|
|
445f9453c4 | ||
|
|
3364e040c8 | ||
|
|
692f00864d | ||
|
|
344dc64df8 | ||
|
|
473425a36a | ||
|
|
3ba58ebaae | ||
|
|
2c7b12c022 | ||
|
|
17eeeb7536 | ||
|
|
de5fbfde2c | ||
|
|
f5d02e1b10 | ||
|
|
e508625935 | ||
|
|
0b177ec4c1 | ||
|
|
87c965edd3 | ||
|
|
72dd9daa23 | ||
|
|
a68529fba8 | ||
|
|
06681a453f | ||
|
|
5907dde4a8 | ||
|
|
8e038dd563 | ||
|
|
50905ab459 | ||
|
|
7bb9c7d47f | ||
|
|
5c45eee817 | ||
|
|
0f9e4ef352 | ||
|
|
85173d188b | ||
|
|
d9ed33d1b1 | ||
|
|
e6ac8cab53 | ||
|
|
f890ebd0f4 | ||
|
|
e537369d98 | ||
|
|
9bbd8dbe62 | ||
|
|
09a5f5c8f3 | ||
|
|
b9e0f52526 | ||
|
|
1cdf71b647 | ||
|
|
3aff461039 | ||
|
|
bf74d7537c | ||
|
|
0c2fb6807e | ||
|
|
b9c9d127a2 | ||
|
|
286beca6c5 | ||
|
|
3a1521a34e | ||
|
|
c5b047d0cd | ||
|
|
485b811bd0 | ||
|
|
f335591045 | ||
|
|
1c10f3020b | ||
|
|
3074dad293 | ||
|
|
42f506adc6 | ||
|
|
50b755db0c | ||
|
|
420c3e0073 | ||
|
|
4a57fc33e4 | ||
|
|
25cdf16cc0 | ||
|
|
7f732459a1 | ||
|
|
9cc02d4dbe | ||
|
|
c528ac09d6 | ||
|
|
1a131ff120 | ||
|
|
accdd82970 | ||
|
|
3e8f02c64b | ||
|
|
3425264077 | ||
|
|
148f8b8a3a | ||
|
|
74343841e4 | ||
|
|
3b3738b36b | ||
|
|
b15c3f6a3f | ||
|
|
2459f9b0aa | ||
|
|
6ff1bd9b3c | ||
|
|
1bc2d2ec37 | ||
|
|
d7fd6a4628 | ||
|
|
9236f365fa | ||
|
|
90d22c2a28 | ||
|
|
c9f6e6b62a | ||
|
|
260d9377f5 | ||
|
|
22d1ce6319 | ||
|
|
6997e02476 | ||
|
|
155d79ff4d | ||
|
|
452cd125fa | ||
|
|
e62c35b040 | ||
|
|
d5ec3c6a31 | ||
|
|
ad983dc279 | ||
|
|
bb15bf8d13 | ||
|
|
94adc207ad | ||
|
|
376d1c97ab | ||
|
|
4fe87b40da | ||
|
|
b10d76cf4b | ||
|
|
3bdc9a2f09 | ||
|
|
9d52e18659 | ||
|
|
f6f7c12f0e | ||
|
|
219b28c97b | ||
|
|
3598fe0fb4 | ||
|
|
f9dd051ec9 | ||
|
|
68e4a27aaf | ||
|
|
b849c719a8 | ||
|
|
59e7617e82 | ||
|
|
b5e868655e | ||
|
|
027b3d36de | ||
|
|
653c4259ee | ||
|
|
9f5ab8149f | ||
|
|
66c6d14f7a | ||
|
|
2c0fc142a3 | ||
|
|
003454573c | ||
|
|
aa5a9ff1f4 | ||
|
|
28ef54986d | ||
|
|
0da2dfd191 | ||
|
|
787fc1cd8b | ||
|
|
dfdc0d92c3 | ||
|
|
f265915aa2 | ||
|
|
4228d06934 | ||
|
|
1a93b9b226 | ||
|
|
363e50abbe | ||
|
|
b8d53a6f0d | ||
|
|
4b45c0cd14 | ||
|
|
e7c0da38c2 | ||
|
|
8706fbe461 | ||
|
|
9ca96e4e17 | ||
|
|
99fe1da345 | ||
|
|
1986e82783 | ||
|
|
7073b9d395 | ||
|
|
f2049e9c18 | ||
|
|
f0f1308465 | ||
|
|
7d90aa76ff | ||
|
|
3cc2c617fd | ||
|
|
c31488add9 | ||
|
|
3d5b6ae332 | ||
|
|
59826c8cfd | ||
|
|
6f29d12386 | ||
|
|
0a89899ad0 | ||
|
|
e4af0e361a | ||
|
|
31ec7907b5 | ||
|
|
12f3f8c694 | ||
|
|
79098e997e | ||
|
|
dc1849bad5 | ||
|
|
e2d826c412 | ||
|
|
e6d796832e | ||
|
|
6f0a6df4f6 | ||
|
|
7a877a00d5 | ||
|
|
e8604d100e | ||
|
|
1647441ce8 | ||
|
|
9f8d6b3a00 | ||
|
|
0bfc96e459 | ||
|
|
3425574ddc | ||
|
|
4b2ad25405 | ||
|
|
3ce163b1a0 | ||
|
|
7c1ee28f13 | ||
|
|
2645e43da1 | ||
|
|
59bfe551a3 | ||
|
|
6a31736644 | ||
|
|
e2c78047b1 | ||
|
|
6a4351e44f | ||
|
|
adb60ef1ac | ||
|
|
3090adac04 | ||
|
|
b9253d86cc | ||
|
|
ab4d4e6230 | ||
|
|
7cd38c56c6 | ||
|
|
864053615b | ||
|
|
db2366f112 | ||
|
|
4defc82192 | ||
|
|
5949970a95 | ||
|
|
0ea4abda81 | ||
|
|
5c6035d636 | ||
|
|
a2183e3dcc | ||
|
|
99637151b5 | ||
|
|
a8e787c120 | ||
|
|
53339c7c72 | ||
|
|
3534bf7d70 | ||
|
|
1cf3989664 | ||
|
|
fd296918da | ||
|
|
8ad1f03dc5 | ||
|
|
fe7e17dbd5 | ||
|
|
d582394a42 | ||
|
|
02ef0df019 | ||
|
|
0dfd6aa518 | ||
|
|
0b23bc9cf2 | ||
|
|
f108c4288e | ||
|
|
9b9696aefd | ||
|
|
576e198ece | ||
|
|
52f85aab18 | ||
|
|
ab60fd0490 | ||
|
|
d79ae30f31 | ||
|
|
f27debe7f9 | ||
|
|
735e043ff6 | ||
|
|
6e7f2b73cf | ||
|
|
d645ce9745 | ||
|
|
7c08c140da | ||
|
|
81d402dc17 | ||
|
|
966fa12358 | ||
|
|
87792e1921 | ||
|
|
4c8296acc6 | ||
|
|
9989da07ed | ||
|
|
1c5e6a3425 | ||
|
|
eedf908770 | ||
|
|
5c9ef41403 | ||
|
|
0bf2ad5b67 | ||
|
|
a0e3f382cd | ||
|
|
f09c39b5d7 | ||
|
|
89c67bf259 | ||
|
|
ea666d4607 | ||
|
|
b8af154439 | ||
|
|
f594ece32a | ||
|
|
03beb6852a | ||
|
|
ab9e9a3329 | ||
|
|
a4b09344af | ||
|
|
8cb8aa392c | ||
|
|
3255519792 | ||
|
|
7e64bb2503 | ||
|
|
86a78402c3 | ||
|
|
ba276452fb | ||
|
|
4ffa8d0124 | ||
|
|
4bc5082681 | ||
|
|
0e3c34e1da | ||
|
|
658b3784ae | ||
|
|
0526f577ff | ||
|
|
bb1b9bc1d3 | ||
|
|
b1eeb77ddc | ||
|
|
999d4a7676 | ||
|
|
1b80193aac | ||
|
|
be8d39a48c | ||
|
|
a2f3d70f28 | ||
|
|
676a7bf712 | ||
|
|
e990a6c70c | ||
|
|
90fa0f6c4a | ||
|
|
22010d7d95 | ||
|
|
66279bd90f | ||
|
|
19da228855 | ||
|
|
9e67941bad | ||
|
|
0454fc74e9 | ||
|
|
2f6b1c7611 | ||
|
|
f00bed6058 | ||
|
|
529c522594 | ||
|
|
2bb9493fcf | ||
|
|
839ed8a64a | ||
|
|
500eb920e4 | ||
|
|
017a31ffd0 | ||
|
|
83b961c84d | ||
|
|
fa07423ca5 | ||
|
|
dd4af2df81 | ||
|
|
44bd8cb85b | ||
|
|
52d80ac23c | ||
|
|
43a5d73e14 | ||
|
|
abc764951d | ||
|
|
9cc6164026 | ||
|
|
475488b9f2 | ||
|
|
95b1783834 | ||
|
|
12c8b5c0b9 | ||
|
|
f99b7a811b | ||
|
|
0575abab23 | ||
|
|
9eebcf7beb | ||
|
|
ed74477150 | ||
|
|
2801b38c75 | ||
|
|
dc3fea875e | ||
|
|
aab8c2b687 | ||
|
|
3577773af3 | ||
|
|
dd023edc0f | ||
|
|
8ac9e6dc19 | ||
|
|
f45d4d781d | ||
|
|
c95652d6a8 | ||
|
|
97b37f75d3 | ||
|
|
95dae48778 | ||
|
|
73635033bd | ||
|
|
c1619d2a62 | ||
|
|
b87ef982f6 | ||
|
|
91aa90ad4a | ||
|
|
4b3cea9e78 | ||
|
|
2420b5e937 | ||
|
|
f23a976bea | ||
|
|
4226cd08f1 | ||
|
|
7a230f1693 | ||
|
|
a43d0d4612 | ||
|
|
78a40a0c70 | ||
|
|
2c69d8f0b0 | ||
|
|
0018c38b83 | ||
|
|
8df81571fc | ||
|
|
d1add62a06 | ||
|
|
c419f3379a | ||
|
|
69d57209f7 | ||
|
|
7ca81d6fb8 | ||
|
|
8a046bfa5d | ||
|
|
3628a7653c | ||
|
|
48f988acd7 | ||
|
|
6526923345 | ||
|
|
24fd1acce6 | ||
|
|
cbb9235dc5 | ||
|
|
19ec2c9bc9 | ||
|
|
6459d4c0b6 | ||
|
|
1304f2721f | ||
|
|
8bde0c0e53 | ||
|
|
598ffd3e5c | ||
|
|
1a4533a9cf | ||
|
|
601f0eb168 | ||
|
|
3070e0bf5d | ||
|
|
83c11a9834 | ||
|
|
5c912b930e | ||
|
|
1b17fb0ae7 | ||
|
|
d83e67c121 | ||
|
|
ae39ed94c9 | ||
|
|
1e51180d42 | ||
|
|
87ba69d02e | ||
|
|
8879d5560b | ||
|
|
c1621ee39c | ||
|
|
b0aa98edb4 | ||
|
|
a7a2fe0216 | ||
|
|
8e50f5fa3c | ||
|
|
31793520bf | ||
|
|
0b6b0368c5 | ||
|
|
d1d30a9280 | ||
|
|
420c6f2d1e | ||
|
|
34f06c4971 | ||
|
|
9cc4bbd49d | ||
|
|
f66b312869 | ||
|
|
2405ba8708 | ||
|
|
a91b6bff8b | ||
|
|
450dc11a68 | ||
|
|
1ce2f84ce5 | ||
|
|
f55b241cfa | ||
|
|
34d08ce8ef | ||
|
|
4f5aa8c43b | ||
|
|
27b375060d | ||
|
|
cbfdc401f7 | ||
|
|
b58bf3e0ce | ||
|
|
1fff7e9aca | ||
|
|
494b981b13 | ||
|
|
dd93995bd0 | ||
|
|
b3bb4add9c | ||
|
|
d305e71c27 | ||
|
|
0d92baa670 | ||
|
|
7a1b110f62 | ||
|
|
db8df057ce | ||
|
|
5d8ffded40 | ||
|
|
07f3e5356d | ||
|
|
1ece62f960 | ||
|
|
056c604dc3 | ||
|
|
2d08eec093 | ||
|
|
614b590551 | ||
|
|
6d90ce250a | ||
|
|
ea31846a19 | ||
|
|
e6317776c1 | ||
|
|
efeaba39a4 | ||
|
|
1a97dfd479 | ||
|
|
9fecf2b303 | ||
|
|
3d0d2f48ad | ||
|
|
581605e0e2 | ||
|
|
45d3a7f6ff | ||
|
|
7ca2ea0766 | ||
|
|
89220c142b | ||
|
|
c73ce3d220 | ||
|
|
b0f127af4e | ||
|
|
766d54795f | ||
|
|
bd41c6eea4 | ||
|
|
2435786713 | ||
|
|
9e7ea64bd2 | ||
|
|
89a6eee6af | ||
|
|
2ec1476e50 | ||
|
|
2d9b581f34 | ||
|
|
5bb63f645b | ||
|
|
a856c7cc37 | ||
|
|
26db9d8a9d | ||
|
|
8060179f6d | ||
|
|
77ebd87fed | ||
|
|
e4bc92235d | ||
|
|
27a4d83ce8 | ||
|
|
ece9b902f8 | ||
|
|
65a2f8a68b | ||
|
|
9c212306b8 | ||
|
|
1fdc7ce6bb | ||
|
|
0b22c140c5 | ||
|
|
944aa45459 | ||
|
|
c9842ba13a | ||
|
|
8840680303 | ||
|
|
376b9b1316 | ||
|
|
54bb1cb3d9 | ||
|
|
43468b474e | ||
|
|
28a957c684 | ||
|
|
ec5ddbf391 | ||
|
|
bab186e195 | ||
|
|
bc7e874476 | ||
|
|
97114b5948 | ||
|
|
45e015d71d | ||
|
|
0ff6531953 | ||
|
|
ba298c3cfc | ||
|
|
0479bea40b | ||
|
|
a536097804 | ||
|
|
bbefd0fdf9 | ||
|
|
2aa8b04c21 | ||
|
|
aeebdfec51 | ||
|
|
debfcdf498 | ||
|
|
5c4b33e8e6 | ||
|
|
eb54037b66 | ||
|
|
f48af8db3b | ||
|
|
97c5b957dd | ||
|
|
95e7397803 | ||
|
|
43a989978a | ||
|
|
27734a7c26 | ||
|
|
dd786d6fc4 | ||
|
|
be1c28fc45 | ||
|
|
20e41b3523 | ||
|
|
e07ecc5cf8 | ||
|
|
3360b72531 | ||
|
|
233b13d670 | ||
|
|
5bcbb4fdaa | ||
|
|
dbe2f5f2b8 | ||
|
|
ca8b58d66d | ||
|
|
f80f0b416f | ||
|
|
d7765511ee | ||
|
|
0240a09056 | ||
|
|
ab15c4eec9 | ||
|
|
4ce1ba81a6 | ||
|
|
530440b333 | ||
|
|
b80fda36af | ||
|
|
42d24263ef | ||
|
|
1e2797e7ce | ||
|
|
f7075766fc | ||
|
|
5647ca70bb | ||
|
|
2b8aa6bafc | ||
|
|
410443471c | ||
|
|
0bb9781b91 | ||
|
|
2769d6d7ca | ||
|
|
120b9433c2 | ||
|
|
605092bd88 | ||
|
|
a4a8c94374 | ||
|
|
0e93f6c0db | ||
|
|
aa2add39ad | ||
|
|
a928047147 | ||
|
|
c474ca0f13 | ||
|
|
88dc64653e | ||
|
|
5f4b70f3a9 | ||
|
|
51b429e5b0 | ||
|
|
360624eb6e | ||
|
|
d9d2291837 | ||
|
|
cbdf816232 | ||
|
|
2d71eb8a18 | ||
|
|
64d2532ce9 | ||
|
|
0376910f33 | ||
|
|
6d503119a1 | ||
|
|
bfae93e57e | ||
|
|
49a66ba81a | ||
|
|
a1d43fecd9 | ||
|
|
d0e42a4798 | ||
|
|
2a34358abc | ||
|
|
fd2bb8ea45 | ||
|
|
98e5daa0e0 | ||
|
|
ad2e119282 | ||
|
|
c20c30d8d1 | ||
|
|
66d215c9c1 | ||
|
|
46e088d379 | ||
|
|
bbdd15161a | ||
|
|
ea9dc8cfb8 | ||
|
|
6bd2ccc9bf | ||
|
|
56327c6b58 | ||
|
|
712e8a51e4 | ||
|
|
421f324f9e | ||
|
|
8fe4a70299 | ||
|
|
3af6d0dbfd | ||
|
|
e2bef076d3 | ||
|
|
1bf9f28f4b | ||
|
|
f1e7b97a93 | ||
|
|
8cfe13ad90 | ||
|
|
0f420abc8e | ||
|
|
3b5b715567 | ||
|
|
520051af25 | ||
|
|
7e376b40bb | ||
|
|
fd18a48608 | ||
|
|
64860c6287 | ||
|
|
58635b24ba | ||
|
|
3ec9dfc108 | ||
|
|
bd1572f11a | ||
|
|
540a0cc59c | ||
|
|
83eb4f6b16 | ||
|
|
95c58bd793 | ||
|
|
65591c7727 | ||
|
|
737cbf5f60 | ||
|
|
4c67cbb4b7 | ||
|
|
ed2cc2a60b | ||
|
|
859e9b3cc4 | ||
|
|
c34e79fad9 | ||
|
|
82446d641e | ||
|
|
9451c9f331 | ||
|
|
61411bb259 | ||
|
|
fcdb0eff8f | ||
|
|
30d9347272 | ||
|
|
7564bbdee8 | ||
|
|
69251e5000 | ||
|
|
6ecdc7b59d | ||
|
|
b7d0d8f0cc | ||
|
|
df52ed1162 | ||
|
|
aa6370dd5d | ||
|
|
c272b7901f | ||
|
|
c61de6540a | ||
|
|
3c7bf50089 | ||
|
|
32fc4152a7 | ||
|
|
bdf7187d5c | ||
|
|
1639576203 | ||
|
|
ae20c785ea | ||
|
|
a2eb876f8c | ||
|
|
5a1eaa0a98 | ||
|
|
398fd4a548 | ||
|
|
44b9fb66e1 | ||
|
|
2afa2171f9 | ||
|
|
1d7ea71c0d | ||
|
|
2a391f0f16 | ||
|
|
e9b8093dac | ||
|
|
6a229cfbc5 | ||
|
|
3300f409ba | ||
|
|
4466005363 | ||
|
|
296ef5bddf | ||
|
|
1f2a432e82 | ||
|
|
855933ab2a | ||
|
|
ece8d25187 | ||
|
|
589a720162 | ||
|
|
a59b518cf2 | ||
|
|
a15352a4f8 | ||
|
|
df65f3fc3f | ||
|
|
734986c1b5 | ||
|
|
4a9ed5f2f2 | ||
|
|
088f229865 | ||
|
|
cb2cb851e2 | ||
|
|
d3962c4f7d | ||
|
|
0301135f96 | ||
|
|
f59aa922ea | ||
|
|
f60a49d6f6 | ||
|
|
9a190eb00d | ||
|
|
6bad4bd415 | ||
|
|
50d9b0b796 | ||
|
|
12f884e3ac | ||
|
|
02b1aa7355 | ||
|
|
90bfa608dd | ||
|
|
13f38b1c1d | ||
|
|
1afe7240f4 | ||
|
|
7a41155178 | ||
|
|
39a20ea471 | ||
|
|
d8855a4a0f | ||
|
|
de8da78042 | ||
|
|
318b42dff2 | ||
|
|
0018674b62 | ||
|
|
82913e8d69 | ||
|
|
0d867a108d | ||
|
|
5ee4b4a5ac | ||
|
|
62219d9648 | ||
|
|
6d9bfff19c | ||
|
|
7614b92197 | ||
|
|
7c1afd0031 | ||
|
|
ca7b2371fb | ||
|
|
ed5fba6b0f | ||
|
|
2b3b3bf652 | ||
|
|
11daf706df | ||
|
|
4a269eb2c4 | ||
|
|
9b3899476c | ||
|
|
febb3d7e3d | ||
|
|
83e3c5c7d8 | ||
|
|
3c271845c9 | ||
|
|
56c4292164 | ||
|
|
2531ade3bb | ||
|
|
3e2f035400 | ||
|
|
e7bcb5e366 | ||
|
|
112e921ce2 | ||
|
|
216f15602b | ||
|
|
fbe1901e65 | ||
|
|
8d2bc444bb | ||
|
|
cf4a45da11 | ||
|
|
be78209f94 | ||
|
|
45b5bf73fe | ||
|
|
84f9e44b6c | ||
|
|
700bc1b4bb | ||
|
|
beef2ede25 | ||
|
|
9bfc838029 | ||
|
|
e9d7353294 | ||
|
|
a6948771d8 | ||
|
|
403977cd49 | ||
|
|
153538cef9 | ||
|
|
9f1196e982 | ||
|
|
6419a8d09a | ||
|
|
769cee3d64 | ||
|
|
fc460b775e | ||
|
|
ba59e498de | ||
|
|
939bd2bb1f | ||
|
|
e231f71b4a | ||
|
|
d06c5f036b | ||
|
|
071562d755 | ||
|
|
391f659af1 | ||
|
|
8a44232bfc | ||
|
|
9188f9bf62 | ||
|
|
0187a0e113 | ||
|
|
beacfae400 | ||
|
|
fdc385ea33 | ||
|
|
8b97808931 | ||
|
|
179c4a10c8 | ||
|
|
6cef571bfb | ||
|
|
fbe8b28b2e | ||
|
|
a8d91a56bf | ||
|
|
8d7291506e | ||
|
|
d9005ac2fc | ||
|
|
c775c0a80c | ||
|
|
700e2cd93d | ||
|
|
083f00be84 | ||
|
|
d00859ecfd | ||
|
|
4e73566c11 | ||
|
|
208a467b24 | ||
|
|
e1bb453f32 | ||
|
|
4607b08be5 | ||
|
|
aa5c776f3d | ||
|
|
0075c0a1e8 | ||
|
|
83fff80b0f | ||
|
|
5e553ffaf7 | ||
|
|
6d185b7f7a | ||
|
|
e80144e9f2 | ||
|
|
fa4b820931 | ||
|
|
63c5a4dd65 | ||
|
|
34646a414c | ||
|
|
5aeee9deb2 | ||
|
|
4c1509a62a | ||
|
|
bfdaae944d | ||
|
|
4e44198bbd | ||
|
|
a4e8177b76 | ||
|
|
81bf5cb78b | ||
|
|
a9fc476fb8 | ||
|
|
26f0c06624 | ||
|
|
59bd72a888 | ||
|
|
7d808b483e | ||
|
|
3ee60affa9 | ||
|
|
558b8123b5 | ||
|
|
ecdf2ae5c7 | ||
|
|
aa9ed614ad | ||
|
|
1acdb880fc | ||
|
|
7cd22aaf83 | ||
|
|
5eb63cfa30 | ||
|
|
5dc998ed52 | ||
|
|
8074094568 | ||
|
|
56d1139d71 | ||
|
|
165cdc8840 | ||
|
|
c42aef74de | ||
|
|
634e1f661f | ||
|
|
a1db437c42 | ||
|
|
b8e2bdc99f | ||
|
|
52d4ea7d78 | ||
|
|
7db5335420 | ||
|
|
62480fe940 | ||
|
|
3d7b30da77 | ||
|
|
8e87648d53 | ||
|
|
f842c90007 | ||
|
|
7f2b686ab5 | ||
|
|
b09c52fc7e | ||
|
|
202d6e414f | ||
|
|
3d817f145c | ||
|
|
181e191fee | ||
|
|
79ecf027dd | ||
|
|
76d771d20f | ||
|
|
4d5f602ee7 | ||
|
|
452bbcc19b | ||
|
|
24b8650026 | ||
|
|
269e6e29d6 | ||
|
|
c4b0002ddb | ||
|
|
53598781b8 | ||
|
|
0624cdd6e4 | ||
|
|
5fb9d61d28 | ||
|
|
7b1860d17b | ||
|
|
8797565606 | ||
|
|
3d97c41fe9 | ||
|
|
5edfeb2e29 | ||
|
|
268908b3b2 | ||
|
|
fb70b47acb | ||
|
|
591149b1f0 | ||
|
|
9a0a0b1bd4 | ||
|
|
219d316b49 | ||
|
|
3aa2233b5d | ||
|
|
d59862ae6e | ||
|
|
0a03f9a31a | ||
|
|
dca135190a | ||
|
|
aedcf3dc81 | ||
|
|
6961a9494f | ||
|
|
6d70ef1a08 | ||
|
|
e1fc15875d | ||
|
|
94ae1388b1 | ||
|
|
17728d4e74 | ||
|
|
417aa743ca | ||
|
|
2f26f7a827 | ||
|
|
09f9c59b3d | ||
|
|
bec6805296 | ||
|
|
d99c7c20cc | ||
|
|
60b6ad3fcf | ||
|
|
9b4d0f6450 | ||
|
|
1a2c74391c | ||
|
|
08288e591c | ||
|
|
823cf421fa | ||
|
|
3799f27734 | ||
|
|
a7edd8602c | ||
|
|
c081aca794 | ||
|
|
2ca6648227 | ||
|
|
1af54f93f5 | ||
|
|
a9cacd2e06 | ||
|
|
f7fbb3d2f6 | ||
|
|
adb7bbeea0 | ||
|
|
89c44cd14e | ||
|
|
8105bfd8b3 | ||
|
|
de5b678da3 | ||
|
|
66c53f949b | ||
|
|
fdc34869ca | ||
|
|
88b1a29719 | ||
|
|
b91db87ae0 | ||
|
|
050542c29b | ||
|
|
60f0491f62 | ||
|
|
b8a5791de6 | ||
|
|
2bc3948726 | ||
|
|
ee7d370751 | ||
|
|
a6449a7b2c | ||
|
|
bc9a09f52e | ||
|
|
1631788ab6 | ||
|
|
dd49d1d4bb | ||
|
|
d6c54c7c2a | ||
|
|
bda716ef9d | ||
|
|
d83d226396 | ||
|
|
91a0e499d9 | ||
|
|
f549d8c0bc | ||
|
|
10c30f2224 | ||
|
|
11621c6f5a | ||
|
|
97ac7e5476 | ||
|
|
b037fb3e21 | ||
|
|
10bc93dfa6 | ||
|
|
88cb8f3963 | ||
|
|
bd005575c4 | ||
|
|
ca3b004921 | ||
|
|
8071b23bff | ||
|
|
4bfed7e719 | ||
|
|
b76590dc01 | ||
|
|
4e462ffdb5 | ||
|
|
3c8cbcfee7 | ||
|
|
2a8543b3b7 | ||
|
|
fd2e40d735 | ||
|
|
5f05843403 | ||
|
|
8bdb42827c | ||
|
|
f6961ae9c1 | ||
|
|
3f301f6b0f | ||
|
|
89ad7ef1ab | ||
|
|
81b69648ef | ||
|
|
672a5f190b | ||
|
|
447dd62c03 | ||
|
|
c4db3b6cf2 | ||
|
|
2b1eb620fc | ||
|
|
4abfcb0188 | ||
|
|
048826f6f0 | ||
|
|
5446476d99 | ||
|
|
331f8b8ae7 | ||
|
|
63ee4fef1a | ||
|
|
376ca717fa | ||
|
|
7913ed1841 | ||
|
|
3f3f93b0fa | ||
|
|
6471c6e133 | ||
|
|
e3cbeb9df0 | ||
|
|
130fb9916d | ||
|
|
ac72722e57 | ||
|
|
382b9a61a8 | ||
|
|
13afead9fb | ||
|
|
72aa191e70 | ||
|
|
0d1804461d | ||
|
|
273412fda1 | ||
|
|
49764b51dc | ||
|
|
5834fa840c | ||
|
|
5eb895b952 | ||
|
|
d5fb3a9167 | ||
|
|
cb324595ef | ||
|
|
fa39789bac | ||
|
|
bbd3a6961e | ||
|
|
6eb0387a78 | ||
|
|
b3ef67a544 | ||
|
|
72995a4b3e | ||
|
|
7395ce5b22 | ||
|
|
a4c197a83c | ||
|
|
7a3412dc13 | ||
|
|
e079924632 | ||
|
|
7f0d3638ba | ||
|
|
cace665858 | ||
|
|
2a8d001213 | ||
|
|
a2b0266e01 | ||
|
|
1452d3fac5 | ||
|
|
031c507fde | ||
|
|
0fb629e24c | ||
|
|
0847687fd1 | ||
|
|
859de712b4 | ||
|
|
803164a993 | ||
|
|
147e33c3ca | ||
|
|
dc5a613bc7 | ||
|
|
16390c1dec | ||
|
|
4e6f91ae77 | ||
|
|
556e620c7a | ||
|
|
8e1d701c27 | ||
|
|
d51d95a28e | ||
|
|
3d15a3b3e2 | ||
|
|
84e611b91e | ||
|
|
4036e9fe34 | ||
|
|
b039a2293f | ||
|
|
87f486c4f1 | ||
|
|
14be7ba2e2 | ||
|
|
09c32a63ce | ||
|
|
08ba51f714 | ||
|
|
e3cd398f70 | ||
|
|
f41c5217c6 | ||
|
|
1b0323bc22 | ||
|
|
e04e5f42ef | ||
|
|
c24bc77c17 | ||
|
|
99f923e27f | ||
|
|
f3d265bbe0 | ||
|
|
5e7efcc8c2 | ||
|
|
62c8823e64 | ||
|
|
5cc9188c5b | ||
|
|
5e8604967c | ||
|
|
cae3f3eeff | ||
|
|
22a7ee5885 | ||
|
|
658b85d327 | ||
|
|
967e72723b | ||
|
|
5411cc5573 | ||
|
|
ffb3e8b7b9 | ||
|
|
94cad89e32 | ||
|
|
0338ac17b1 | ||
|
|
cb1dfdfac6 | ||
|
|
576db9ca88 | ||
|
|
4c2b83d9ca | ||
|
|
7cb24446ec | ||
|
|
0ed79a839d | ||
|
|
e518c51de3 | ||
|
|
ea35fb1c54 | ||
|
|
7b29378319 | ||
|
|
82fbe7128f | ||
|
|
c1fadcac85 | ||
|
|
fd7f882011 | ||
|
|
fb09fde209 | ||
|
|
b2848b8519 | ||
|
|
417bb1b35d | ||
|
|
199b4eb860 | ||
|
|
a66417e9d0 | ||
|
|
b9255f73c3 | ||
|
|
4b9bacf731 | ||
|
|
602d7dad00 | ||
|
|
d32dd9ff62 | ||
|
|
28b7ef2304 | ||
|
|
6dc2672dba | ||
|
|
9a949984ee | ||
|
|
aa32d43014 | ||
|
|
4174918476 | ||
|
|
6081fc6faf | ||
|
|
7c62fdc0b8 | ||
|
|
3c88faa889 | ||
|
|
d15f5ccbf4 | ||
|
|
cfcd77b193 | ||
|
|
525c25b9f6 | ||
|
|
c059ad47f2 | ||
|
|
48fd6c1344 | ||
|
|
1ee50922d9 | ||
|
|
d63bf0abde | ||
|
|
711db45c02 | ||
|
|
55e20bda12 | ||
|
|
56f00a64d7 | ||
|
|
8553022b0e | ||
|
|
74b5043ef9 | ||
|
|
0e45078116 | ||
|
|
7e87ed79ab | ||
|
|
7312db5c25 | ||
|
|
ec7effa0ef | ||
|
|
9a2cf206b2 | ||
|
|
40df08c74c | ||
|
|
5d778648e6 | ||
|
|
1fa47206aa | ||
|
|
6f5bd7b0b9 | ||
|
|
c903af032f | ||
|
|
9dd3504765 | ||
|
|
97a1310344 | ||
|
|
bf6f03a412 | ||
|
|
5ab13518db | ||
|
|
eb892241ee | ||
|
|
fac3f038a8 | ||
|
|
b1cdd1eb26 | ||
|
|
60c8254f58 | ||
|
|
2ce70448b0 | ||
|
|
3861103585 | ||
|
|
0708d1bedc | ||
|
|
c3a8840435 | ||
|
|
3246cf8bdd | ||
|
|
7ecf84395a | ||
|
|
32bab13a8a | ||
|
|
088c40f9f2 | ||
|
|
305fd4b232 | ||
|
|
fe5111743d | ||
|
|
8427877bd2 | ||
|
|
118c0deb7a | ||
|
|
1126c85903 | ||
|
|
13935fc335 | ||
|
|
36034ee15f | ||
|
|
1b72ea9cc1 | ||
|
|
04953351f1 | ||
|
|
07e71d9ce9 | ||
|
|
5f53cda3ab | ||
|
|
9260ff9e83 | ||
|
|
08d1689268 | ||
|
|
40b69baa29 | ||
|
|
b3251818cc | ||
|
|
da8a057ede | ||
|
|
efba9ef52a | ||
|
|
fb61c9a765 | ||
|
|
95c2643f63 | ||
|
|
fc2aff342b | ||
|
|
371dbf009f | ||
|
|
5d5a84dbcf | ||
|
|
7526272f84 | ||
|
|
5cbc76ea81 | ||
|
|
7ba40062d3 | ||
|
|
1781c4638b | ||
|
|
1a049ee49d | ||
|
|
31521ccff5 | ||
|
|
e3b4563c2b | ||
|
|
c3f5ed0e0e | ||
|
|
378b52321b | ||
|
|
98436f271e | ||
|
|
a76008e440 | ||
|
|
6cf0cf9e7d | ||
|
|
608f08c267 | ||
|
|
bd3340c73f | ||
|
|
c379ff883a | ||
|
|
3b7a8ce449 | ||
|
|
e9ad04f763 | ||
|
|
f0277736e2 | ||
|
|
49c978ad9e | ||
|
|
eeae1b4aea | ||
|
|
9432d1a194 | ||
|
|
3b2dbf1897 | ||
|
|
9c1ad5f631 | ||
|
|
c2fef4e791 | ||
|
|
76cbb66843 | ||
|
|
96dbeea171 | ||
|
|
829df581f0 | ||
|
|
bd84d08b95 | ||
|
|
2c7469c62a | ||
|
|
6f7d7537f2 | ||
|
|
d7c9694be0 | ||
|
|
69171282e9 | ||
|
|
53d66b7267 | ||
|
|
ba9813e5a3 | ||
|
|
ce8b3ea0a1 | ||
|
|
559fc46037 | ||
|
|
10c0b035ae | ||
|
|
37818d2d72 | ||
|
|
357dd0e7cc | ||
|
|
34b923b7ac | ||
|
|
846f5a868f | ||
|
|
0acb2d904d | ||
|
|
03a757bc6e | ||
|
|
0f68df3b4a | ||
|
|
07ef58c1a7 | ||
|
|
52f5deb456 | ||
|
|
e05e6b89f3 | ||
|
|
ffc8b21f67 | ||
|
|
16e1f72e65 | ||
|
|
620f4a222e | ||
|
|
f30fd71c5e | ||
|
|
3b55deb472 | ||
|
|
4d5164c580 | ||
|
|
5b118f64ec | ||
|
|
07dae64d66 | ||
|
|
501f033712 | ||
|
|
a68cb20266 | ||
|
|
3c98a4bff5 | ||
|
|
20eb920cb4 | ||
|
|
b06d794854 | ||
|
|
f3da5bc092 | ||
|
|
d21434dfd6 | ||
|
|
ad1aa5bd3e | ||
|
|
dd21ce9eac | ||
|
|
bba3aeb4fa | ||
|
|
86233bcdf5 | ||
|
|
4f3eacd72c | ||
|
|
67fcdca6d4 | ||
|
|
62cc8d2ab3 | ||
|
|
3a0523dd79 | ||
|
|
cec8b67b08 | ||
|
|
ca8c3981c4 | ||
|
|
ca56785cbc | ||
|
|
b12c34334c | ||
|
|
9c8411b251 | ||
|
|
66baa4eb61 | ||
|
|
89646439e7 | ||
|
|
bda4776a18 | ||
|
|
c6058fafed | ||
|
|
11950eabea | ||
|
|
e1282028a5 | ||
|
|
6b880aa8b3 | ||
|
|
a3830be4c9 | ||
|
|
ef15733efe | ||
|
|
f0c5dd1bce | ||
|
|
e868f37c60 | ||
|
|
18baa2dd7a | ||
|
|
2560145551 | ||
|
|
3b88a4f728 | ||
|
|
69989365c7 | ||
|
|
2b9c526b47 | ||
|
|
d7c42861fb | ||
|
|
e9d478ed9f | ||
|
|
d6cb5b9abe | ||
|
|
5580b003b5 | ||
|
|
67736c849d | ||
|
|
39e27735cc | ||
|
|
0902b95764 | ||
|
|
dc7181a3fd | ||
|
|
e93c4c87d8 | ||
|
|
dcec61e9b2 | ||
|
|
007f116bfa | ||
|
|
6817f3b7ba | ||
|
|
36993029ad | ||
|
|
012352cf24 | ||
|
|
26723992e3 | ||
|
|
3591593ac7 | ||
|
|
d3c2dfbaee | ||
|
|
b2b4456f74 | ||
|
|
f666141981 | ||
|
|
fb4c4e3e08 | ||
|
|
34fa5cd241 | ||
|
|
833fa3d94d | ||
|
|
92471445ec | ||
|
|
3acfd90720 | ||
|
|
4742328b90 | ||
|
|
b4c54b1b62 | ||
|
|
76cb851c40 | ||
|
|
3fcc0e9789 | ||
|
|
8e65154201 | ||
|
|
c0f7c4ca2d | ||
|
|
db2f64c290 | ||
|
|
a3c46fec07 | ||
|
|
62388cb740 | ||
|
|
9c9903664a | ||
|
|
556eed0151 | ||
|
|
4012722a8d | ||
|
|
4c68bc6c96 | ||
|
|
159923fae2 | ||
|
|
72c7a010ff | ||
|
|
2c8f004103 | ||
|
|
67a9b358a0 | ||
|
|
b5eb3ea1cd | ||
|
|
98bc0a7c10 | ||
|
|
bb24879149 | ||
|
|
3d6ee0ce00 | ||
|
|
ee72845701 | ||
|
|
d158727154 | ||
|
|
73092dcb33 | ||
|
|
91ddd310ba | ||
|
|
20dd7562e0 | ||
|
|
b7e84031e3 | ||
|
|
f11ee1f9cf | ||
|
|
449f5a00dc | ||
|
|
bd1bf9ba24 | ||
|
|
2af5f3c56e | ||
|
|
1849f75ad0 | ||
|
|
32e66b29f4 | ||
|
|
69012e8ad1 | ||
|
|
17642c8a8c | ||
|
|
f1aec68f23 | ||
|
|
3e30d71263 | ||
|
|
266f33adc4 | ||
|
|
dcc8d22cec | ||
|
|
9540555b26 | ||
|
|
185e7a6a7e | ||
|
|
c39f315ddc | ||
|
|
5b230b90b9 | ||
|
|
1ed9a36d0a | ||
|
|
e0911a5fe0 | ||
|
|
3297578e8d | ||
|
|
954d5c16d8 | ||
|
|
95efa39b52 | ||
|
|
c27ccc91d2 | ||
|
|
4fb6fcabef | ||
|
|
2635e41f69 | ||
|
|
ba01817ee3 | ||
|
|
1e1d7073c8 | ||
|
|
40eb23a97a | ||
|
|
f4711699e4 | ||
|
|
3b62cf80cd | ||
|
|
d99c5973c3 | ||
|
|
7de9adc6b1 | ||
|
|
17addbefe2 | ||
|
|
d274576b47 | ||
|
|
6373e20696 | ||
|
|
809fe44b43 | ||
|
|
198ccc028a | ||
|
|
b96e27a7e4 | ||
|
|
1147ac4350 | ||
|
|
21d267cb11 | ||
|
|
6791f205af | ||
|
|
7ab2e21c10 | ||
|
|
2f991ac6f1 | ||
|
|
9411b38508 | ||
|
|
386c48b116 | ||
|
|
9d82911f63 | ||
|
|
51065e7a4d | ||
|
|
327452622e | ||
|
|
13316e5380 | ||
|
|
9f98025b8c | ||
|
|
564f950037 | ||
|
|
be651caa68 | ||
|
|
aa00feb6a5 | ||
|
|
03c0fd9ada | ||
|
|
6093e88eeb | ||
|
|
ec519f20fa | ||
|
|
d3495896fa | ||
|
|
323c86308a | ||
|
|
f9057e1a28 | ||
|
|
9596a25bb9 | ||
|
|
47bfeec115 | ||
|
|
6bfd6c322b | ||
|
|
0512dd4c25 | ||
|
|
acbc741037 | ||
|
|
c2163ecee5 | ||
|
|
71689fcf23 | ||
|
|
1c334141ee | ||
|
|
b89d71bfa5 | ||
|
|
3179c4e4ac | ||
|
|
f5e39c0064 | ||
|
|
86e2797c57 | ||
|
|
39b749432a | ||
|
|
0ad343484f | ||
|
|
196606438c | ||
|
|
6896818bfd | ||
|
|
eb4f0ad7fb | ||
|
|
467e61bcc1 | ||
|
|
a2c78c9063 | ||
|
|
b23353e376 | ||
|
|
b8e9790de3 | ||
|
|
e37e8d9e65 | ||
|
|
f657432be3 | ||
|
|
80c2895e56 | ||
|
|
88da998532 | ||
|
|
225972e151 | ||
|
|
4972bdb383 | ||
|
|
11c7a15067 | ||
|
|
9df725165b | ||
|
|
682326c130 | ||
|
|
86575cb035 | ||
|
|
eecc6188a7 | ||
|
|
3b4df4615a | ||
|
|
edfda6ad5b | ||
|
|
3c7e8be2e7 | ||
|
|
416fcba846 | ||
|
|
e196e229cd | ||
|
|
da57572409 | ||
|
|
ef172712da | ||
|
|
170c56bcb9 | ||
|
|
f3ca9fa4c5 | ||
|
|
48facec524 | ||
|
|
ee0c75a26d | ||
|
|
e9c92f30ba | ||
|
|
0a074e52e0 | ||
|
|
da3f4c30e2 | ||
|
|
2b08ca7c99 | ||
|
|
c8e466a160 | ||
|
|
a39685d98c | ||
|
|
90200dbe9c | ||
|
|
2304dac8e3 | ||
|
|
38b2919c0d | ||
|
|
207fd9fcb7 | ||
|
|
fbcf58c48f | ||
|
|
8f4a579df9 | ||
|
|
600ca3bcf9 | ||
|
|
a4d2f22fd2 | ||
|
|
00c8d7e6f5 | ||
|
|
0d89e967f2 | ||
|
|
447f8d0113 | ||
|
|
60802796cb | ||
|
|
5b42578cb1 | ||
|
|
25a0a5364a | ||
|
|
047cc218a6 | ||
|
|
39fc862676 | ||
|
|
f47d926f29 | ||
|
|
f4d0938e3d | ||
|
|
f156da4ec2 | ||
|
|
0c1e5da9a8 | ||
|
|
d6b317c552 | ||
|
|
01826c6876 | ||
|
|
0b62c9d2f6 | ||
|
|
72161a9b71 | ||
|
|
df8f4e7251 | ||
|
|
aa13ab37c4 | ||
|
|
acda64a837 | ||
|
|
49a001a93a | ||
|
|
22a6ec7794 | ||
|
|
26c6e4997c | ||
|
|
d7086fc4a3 | ||
|
|
92150e07d3 | ||
|
|
ac3c857e1a | ||
|
|
48e313fb44 | ||
|
|
5390117275 | ||
|
|
0b3af2052f | ||
|
|
bb19ba3eb6 | ||
|
|
879bf08d18 | ||
|
|
b99421e7ee | ||
|
|
3b6d8fab47 | ||
|
|
53c0cdc0c1 | ||
|
|
58f877de1a | ||
|
|
95a7b33fb4 | ||
|
|
81dd5adccf | ||
|
|
94e86a0be1 | ||
|
|
5b2dbfe007 | ||
|
|
4451843a39 | ||
|
|
5e2c5fa97b | ||
|
|
018b206177 | ||
|
|
03d31b1890 | ||
|
|
265776566e | ||
|
|
6e77e32855 | ||
|
|
0b1c506626 | ||
|
|
719a653375 | ||
|
|
66520c77f8 | ||
|
|
ab2d019349 | ||
|
|
d0e0b291df | ||
|
|
200e9eca92 | ||
|
|
634f771547 | ||
|
|
2996f8919d | ||
|
|
1b68efe7c7 | ||
|
|
a19a7b976c | ||
|
|
145b0c33fc | ||
|
|
8b1a39f2c1 | ||
|
|
6dbc051409 | ||
|
|
c148a5bbfc | ||
|
|
90d9bd9723 | ||
|
|
bc7e6ccf53 | ||
|
|
6cab002214 | ||
|
|
3762a69537 | ||
|
|
348f7b5dfc | ||
|
|
008a62e4e9 | ||
|
|
a4c5fa57e0 | ||
|
|
9be6c41af7 | ||
|
|
5c311eefb1 | ||
|
|
d0ceb74a2e | ||
|
|
ea1fe6a538 | ||
|
|
a93509c9b3 | ||
|
|
210e9e23af | ||
|
|
c4513f0286 | ||
|
|
1114572b47 | ||
|
|
b2588d1c4f | ||
|
|
69d3e0c4b6 | ||
|
|
e2414d8fea | ||
|
|
24db0d1499 | ||
|
|
89f505bb13 | ||
|
|
df5b1f3806 | ||
|
|
755deb3ffe | ||
|
|
59f8c9f38e | ||
|
|
69e9b5d55e | ||
|
|
a2d8b0ffbe | ||
|
|
0bbf3a3d76 | ||
|
|
10de19d38b | ||
|
|
73aff806f3 | ||
|
|
963a223e7e | ||
|
|
bbfc2f416e | ||
|
|
e05d31eaaf | ||
|
|
431f006751 | ||
|
|
ffc9d7b152 | ||
|
|
79604180db | ||
|
|
7d6e117f68 | ||
|
|
b3cc2f990a | ||
|
|
8d953f0bcb | ||
|
|
5cac52720c | ||
|
|
bca6119db8 | ||
|
|
568000805f | ||
|
|
3fb6307596 | ||
|
|
7aa0031dec | ||
|
|
2585f1b724 | ||
|
|
470e08f616 | ||
|
|
f1e51f9708 | ||
|
|
e0becc109d | ||
|
|
47e4dd40cd | ||
|
|
c38faebc25 | ||
|
|
21b7d8f8ea | ||
|
|
3357b55fbf | ||
|
|
f01add9ef5 | ||
|
|
b0b8e11c60 | ||
|
|
7e0fcb9e65 | ||
|
|
972235cf06 | ||
|
|
b3c9a76619 | ||
|
|
5f84d6f8f8 | ||
|
|
1cdeb8130d | ||
|
|
ce69428cc6 | ||
|
|
1818cf7114 | ||
|
|
b375c41586 | ||
|
|
d85ee4e051 | ||
|
|
cfc394963f | ||
|
|
e7380e3676 | ||
|
|
597ef8b947 | ||
|
|
484bc1e6f0 | ||
|
|
afd416c84e | ||
|
|
84d7987108 | ||
|
|
ec927bdd63 | ||
|
|
df7d4cbc47 | ||
|
|
dc51362f0b | ||
|
|
da2d282cf6 | ||
|
|
3b37bf4794 | ||
|
|
42a58dda57 | ||
|
|
4d695a3544 | ||
|
|
45080d3fd1 | ||
|
|
9195d96705 | ||
|
|
54d276f6a7 | ||
|
|
2a7fc03e79 | ||
|
|
eb3e6963fa | ||
|
|
960aea2fd4 | ||
|
|
ef5815e4a5 | ||
|
|
b7e8108edd | ||
|
|
d48296eacc | ||
|
|
e0a546000d | ||
|
|
4c93e2945c | ||
|
|
a6d64b2010 | ||
|
|
2e74c93878 | ||
|
|
f86496b545 | ||
|
|
557fb19d13 | ||
|
|
196f4471be | ||
|
|
ccb4827ec9 | ||
|
|
4ae21a671d | ||
|
|
af1d7ef664 | ||
|
|
bb4444f54d | ||
|
|
3bead80f96 | ||
|
|
8ad0df41a0 | ||
|
|
aa9cba38c4 | ||
|
|
12a7fc1af1 |
17
.gitignore
vendored
17
.gitignore
vendored
@@ -1,4 +1,17 @@
|
|||||||
*.pyc
|
.*
|
||||||
.*.swp
|
!.gitignore
|
||||||
|
*~
|
||||||
|
*.py[co]
|
||||||
|
.*.sw[po]
|
||||||
|
*.egg
|
||||||
docs/.build
|
docs/.build
|
||||||
docs/_build
|
docs/_build
|
||||||
|
build/
|
||||||
|
dist/
|
||||||
|
mongoengine.egg-info/
|
||||||
|
env/
|
||||||
|
.settings
|
||||||
|
.project
|
||||||
|
.pydevproject
|
||||||
|
tests/test_bugfix.py
|
||||||
|
htmlcov/
|
||||||
27
.travis.yml
Normal file
27
.travis.yml
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
# http://travis-ci.org/#!/MongoEngine/mongoengine
|
||||||
|
language: python
|
||||||
|
services: mongodb
|
||||||
|
python:
|
||||||
|
- "2.6"
|
||||||
|
- "2.7"
|
||||||
|
- "3.2"
|
||||||
|
- "3.3"
|
||||||
|
env:
|
||||||
|
- PYMONGO=dev DJANGO=1.5.1
|
||||||
|
- PYMONGO=dev DJANGO=1.4.2
|
||||||
|
- PYMONGO=2.5 DJANGO=1.5.1
|
||||||
|
- PYMONGO=2.5 DJANGO=1.4.2
|
||||||
|
install:
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install django==$DJANGO --use-mirrors ; true; fi
|
||||||
|
- if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi
|
||||||
|
- if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi
|
||||||
|
- python setup.py install
|
||||||
|
script:
|
||||||
|
- python setup.py test
|
||||||
|
notifications:
|
||||||
|
irc: "irc.freenode.org#mongoengine"
|
||||||
|
branches:
|
||||||
|
only:
|
||||||
|
- master
|
||||||
163
AUTHORS
Normal file
163
AUTHORS
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
The PRIMARY AUTHORS are (and/or have been):
|
||||||
|
|
||||||
|
Ross Lawley <ross.lawley@gmail.com>
|
||||||
|
Harry Marr <harry@hmarr.com>
|
||||||
|
Matt Dennewitz <mattdennewitz@gmail.com>
|
||||||
|
Deepak Thukral <iapain@yahoo.com>
|
||||||
|
Florian Schlachter <flori@n-schlachter.de>
|
||||||
|
Steve Challis <steve@stevechallis.com>
|
||||||
|
Wilson Júnior <wilsonpjunior@gmail.com>
|
||||||
|
Dan Crosta https://github.com/dcrosta
|
||||||
|
Laine Herron https://github.com/LaineHerron
|
||||||
|
|
||||||
|
CONTRIBUTORS
|
||||||
|
|
||||||
|
Dervived from the git logs, inevitably incomplete but all of whom and others
|
||||||
|
have submitted patches, reported bugs and generally helped make MongoEngine
|
||||||
|
that much better:
|
||||||
|
|
||||||
|
* Harry Marr
|
||||||
|
* Ross Lawley
|
||||||
|
* blackbrrr
|
||||||
|
* Florian Schlachter
|
||||||
|
* Vincent Driessen
|
||||||
|
* Steve Challis
|
||||||
|
* flosch
|
||||||
|
* Deepak Thukral
|
||||||
|
* Colin Howe
|
||||||
|
* Wilson Júnior (https://github.com/wpjunior)
|
||||||
|
* Alistair Roche
|
||||||
|
* Dan Crosta
|
||||||
|
* Viktor Kerkez
|
||||||
|
* Stephan Jaekel
|
||||||
|
* Rached Ben Mustapha
|
||||||
|
* Greg Turner
|
||||||
|
* Daniel Hasselrot
|
||||||
|
* Mircea Pasoi
|
||||||
|
* Matt Chisholm
|
||||||
|
* James Punteney
|
||||||
|
* TimothéePeignier
|
||||||
|
* Stuart Rackham
|
||||||
|
* Serge Matveenko
|
||||||
|
* Matt Dennewitz
|
||||||
|
* Don Spaulding
|
||||||
|
* Ales Zoulek
|
||||||
|
* sshwsfc
|
||||||
|
* sib
|
||||||
|
* Samuel Clay
|
||||||
|
* Nick Vlku
|
||||||
|
* martin
|
||||||
|
* Flavio Amieiro
|
||||||
|
* Анхбаяр Лхагвадорж
|
||||||
|
* Zak Johnson
|
||||||
|
* Victor Farazdagi
|
||||||
|
* vandersonmota
|
||||||
|
* Theo Julienne
|
||||||
|
* sp
|
||||||
|
* Slavi Pantaleev
|
||||||
|
* Richard Henry
|
||||||
|
* Nicolas Perriault
|
||||||
|
* Nick Vlku Jr
|
||||||
|
* Michael Henson
|
||||||
|
* Leo Honkanen
|
||||||
|
* kuno
|
||||||
|
* Josh Ourisman
|
||||||
|
* Jaime
|
||||||
|
* Igor Ivanov
|
||||||
|
* Gregg Lind
|
||||||
|
* Gareth Lloyd
|
||||||
|
* Albert Choi
|
||||||
|
* John Arnfield
|
||||||
|
* grubberr
|
||||||
|
* Paul Aliagas
|
||||||
|
* Paul Cunnane
|
||||||
|
* Julien Rebetez
|
||||||
|
* Marc Tamlyn
|
||||||
|
* Karim Allah
|
||||||
|
* Adam Parrish
|
||||||
|
* jpfarias
|
||||||
|
* jonrscott
|
||||||
|
* Alice Zoë Bevan-McGregor
|
||||||
|
* Stephen Young
|
||||||
|
* tkloc
|
||||||
|
* aid
|
||||||
|
* yamaneko1212
|
||||||
|
* dave mankoff
|
||||||
|
* Alexander G. Morano
|
||||||
|
* jwilder
|
||||||
|
* Joe Shaw
|
||||||
|
* Adam Flynn
|
||||||
|
* Ankhbayar
|
||||||
|
* Jan Schrewe
|
||||||
|
* David Koblas
|
||||||
|
* Crittercism
|
||||||
|
* Alvin Liang
|
||||||
|
* andrewmlevy
|
||||||
|
* Chris Faulkner
|
||||||
|
* Ashwin Purohit
|
||||||
|
* Shalabh Aggarwal
|
||||||
|
* Chris Williams
|
||||||
|
* Robert Kajic
|
||||||
|
* Jacob Peddicord
|
||||||
|
* Nils Hasenbanck
|
||||||
|
* mostlystatic
|
||||||
|
* Greg Banks
|
||||||
|
* swashbuckler
|
||||||
|
* Adam Reeve
|
||||||
|
* Anthony Nemitz
|
||||||
|
* deignacio
|
||||||
|
* Shaun Duncan
|
||||||
|
* Meir Kriheli
|
||||||
|
* Andrey Fedoseev
|
||||||
|
* aparajita
|
||||||
|
* Tristan Escalada
|
||||||
|
* Alexander Koshelev
|
||||||
|
* Jaime Irurzun
|
||||||
|
* Alexandre González
|
||||||
|
* Thomas Steinacher
|
||||||
|
* Tommi Komulainen
|
||||||
|
* Peter Landry
|
||||||
|
* biszkoptwielki
|
||||||
|
* Anton Kolechkin
|
||||||
|
* Sergey Nikitin
|
||||||
|
* psychogenic
|
||||||
|
* Stefan Wójcik
|
||||||
|
* dimonb
|
||||||
|
* Garry Polley
|
||||||
|
* James Slagle
|
||||||
|
* Adrian Scott
|
||||||
|
* Peter Teichman
|
||||||
|
* Jakub Kot
|
||||||
|
* Jorge Bastida
|
||||||
|
* Aleksandr Sorokoumov
|
||||||
|
* Yohan Graterol
|
||||||
|
* bool-dev
|
||||||
|
* Russ Weeks
|
||||||
|
* Paul Swartz
|
||||||
|
* Sundar Raman
|
||||||
|
* Benoit Louy
|
||||||
|
* lraucy
|
||||||
|
* hellysmile
|
||||||
|
* Jaepil Jeong
|
||||||
|
* Daniil Sharou
|
||||||
|
* Stefan Wójcik
|
||||||
|
* Pete Campton
|
||||||
|
* Martyn Smith
|
||||||
|
* Marcelo Anton
|
||||||
|
* Aleksey Porfirov
|
||||||
|
* Nicolas Trippar
|
||||||
|
* Manuel Hermann
|
||||||
|
* Gustavo Gawryszewski
|
||||||
|
* Max Countryman
|
||||||
|
* caitifbrito
|
||||||
|
* lcya86 刘春洋
|
||||||
|
* Martin Alderete (https://github.com/malderete)
|
||||||
|
* Nick Joyce
|
||||||
|
* Jared Forsyth
|
||||||
|
* Kenneth Falck
|
||||||
|
* Lukasz Balcerzak
|
||||||
|
* Nicolas Cortot
|
||||||
|
* Alex (https://github.com/kelsta)
|
||||||
|
* Jin Zhang
|
||||||
|
* Daniel Axtens
|
||||||
|
* Leo-Naeka
|
||||||
61
CONTRIBUTING.rst
Normal file
61
CONTRIBUTING.rst
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
Contributing to MongoEngine
|
||||||
|
===========================
|
||||||
|
|
||||||
|
MongoEngine has a large `community
|
||||||
|
<https://raw.github.com/MongoEngine/mongoengine/master/AUTHORS>`_ and
|
||||||
|
contributions are always encouraged. Contributions can be as simple as
|
||||||
|
minor tweaks to the documentation. Please read these guidelines before
|
||||||
|
sending a pull request.
|
||||||
|
|
||||||
|
Bugfixes and New Features
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
Before starting to write code, look for existing `tickets
|
||||||
|
<https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one
|
||||||
|
<https://github.com/MongoEngine/mongoengine/issues>`_ for your specific
|
||||||
|
issue or feature request. That way you avoid working on something
|
||||||
|
that might not be of interest or that has already been addressed. If in doubt
|
||||||
|
post to the `user group <http://groups.google.com/group/mongoengine-users>`
|
||||||
|
|
||||||
|
Supported Interpreters
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
MongoEngine supports CPython 2.6 and newer. Language
|
||||||
|
features not supported by all interpreters can not be used.
|
||||||
|
Please also ensure that your code is properly converted by
|
||||||
|
`2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support.
|
||||||
|
|
||||||
|
Style Guide
|
||||||
|
-----------
|
||||||
|
|
||||||
|
MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_
|
||||||
|
including 4 space indents and 79 character line limits.
|
||||||
|
|
||||||
|
Testing
|
||||||
|
-------
|
||||||
|
|
||||||
|
All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_
|
||||||
|
and any pull requests are automatically tested by Travis. Any pull requests
|
||||||
|
without tests will take longer to be integrated and might be refused.
|
||||||
|
|
||||||
|
General Guidelines
|
||||||
|
------------------
|
||||||
|
|
||||||
|
- Avoid backward breaking changes if at all possible.
|
||||||
|
- Write inline documentation for new classes and methods.
|
||||||
|
- Write tests and make sure they pass (make sure you have a mongod
|
||||||
|
running on the default port, then execute ``python setup.py test``
|
||||||
|
from the cmd line to run the test suite).
|
||||||
|
- Add yourself to AUTHORS :)
|
||||||
|
|
||||||
|
Documentation
|
||||||
|
-------------
|
||||||
|
|
||||||
|
To contribute to the `API documentation
|
||||||
|
<http://docs.mongoengine.org/en/latest/apireference.html>`_
|
||||||
|
just make your changes to the inline documentation of the appropriate
|
||||||
|
`source code <https://github.com/MongoEngine/mongoengine>`_ or `rst file
|
||||||
|
<https://github.com/MongoEngine/mongoengine/tree/master/docs>`_ in a
|
||||||
|
branch and submit a `pull request <https://help.github.com/articles/using-pull-requests>`_.
|
||||||
|
You might also use the github `Edit <https://github.com/blog/844-forking-with-the-edit-button>`_
|
||||||
|
button.
|
||||||
8
LICENSE
8
LICENSE
@@ -1,5 +1,5 @@
|
|||||||
Copyright (c) 2009-2010 Harry Marr
|
Copyright (c) 2009 See AUTHORS
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person
|
Permission is hereby granted, free of charge, to any person
|
||||||
obtaining a copy of this software and associated documentation
|
obtaining a copy of this software and associated documentation
|
||||||
files (the "Software"), to deal in the Software without
|
files (the "Software"), to deal in the Software without
|
||||||
@@ -8,10 +8,10 @@ copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|||||||
copies of the Software, and to permit persons to whom the
|
copies of the Software, and to permit persons to whom the
|
||||||
Software is furnished to do so, subject to the following
|
Software is furnished to do so, subject to the following
|
||||||
conditions:
|
conditions:
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be
|
The above copyright notice and this permission notice shall be
|
||||||
included in all copies or substantial portions of the Software.
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
|
include MANIFEST.in
|
||||||
include README.rst
|
include README.rst
|
||||||
include LICENSE
|
include LICENSE
|
||||||
|
include AUTHORS
|
||||||
recursive-include docs *
|
recursive-include docs *
|
||||||
prune docs/_build/*
|
prune docs/_build
|
||||||
recursive-include tests *
|
|
||||||
recursive-exclude * *.pyc *.swp
|
|
||||||
|
|||||||
40
README.rst
40
README.rst
@@ -2,26 +2,31 @@
|
|||||||
MongoEngine
|
MongoEngine
|
||||||
===========
|
===========
|
||||||
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
||||||
|
:Repository: https://github.com/MongoEngine/mongoengine
|
||||||
:Author: Harry Marr (http://github.com/hmarr)
|
:Author: Harry Marr (http://github.com/hmarr)
|
||||||
|
:Maintainer: Ross Lawley (http://github.com/rozza)
|
||||||
|
|
||||||
|
.. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master
|
||||||
|
:target: http://travis-ci.org/MongoEngine/mongoengine
|
||||||
|
|
||||||
About
|
About
|
||||||
=====
|
=====
|
||||||
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
||||||
Documentation available at http://hmarr.com/mongoengine/ - there is currently
|
Documentation available at http://mongoengine-odm.rtfd.org - there is currently
|
||||||
a `tutorial <http://hmarr.com/mongoengine/tutorial.html>`_, a `user guide
|
a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide
|
||||||
<http://hmarr.com/mongoengine/userguide.html>`_ and an `API reference
|
<https://mongoengine-odm.readthedocs.org/en/latest/guide/index.html>`_ and an `API reference
|
||||||
<http://hmarr.com/mongoengine/apireference.html>`_.
|
<http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_.
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
============
|
============
|
||||||
If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
||||||
you can use ``easy_install mongoengine``. Otherwise, you can download the
|
you can use ``easy_install -U mongoengine``. Otherwise, you can download the
|
||||||
source from `GitHub <http://github.com/hmarr/mongoengine>`_ and run ``python
|
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
|
||||||
setup.py install``.
|
setup.py install``.
|
||||||
|
|
||||||
Dependencies
|
Dependencies
|
||||||
============
|
============
|
||||||
- pymongo 1.1+
|
- pymongo 2.5+
|
||||||
- sphinx (optional - for documentation generation)
|
- sphinx (optional - for documentation generation)
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
@@ -58,11 +63,6 @@ Some simple examples of what MongoEngine code looks like::
|
|||||||
... print 'Link:', post.url
|
... print 'Link:', post.url
|
||||||
... print
|
... print
|
||||||
...
|
...
|
||||||
=== Using MongoEngine ===
|
|
||||||
See the tutorial
|
|
||||||
|
|
||||||
=== MongoEngine Docs ===
|
|
||||||
Link: hmarr.com/mongoengine
|
|
||||||
|
|
||||||
>>> len(BlogPost.objects)
|
>>> len(BlogPost.objects)
|
||||||
2
|
2
|
||||||
@@ -80,10 +80,16 @@ Some simple examples of what MongoEngine code looks like::
|
|||||||
Tests
|
Tests
|
||||||
=====
|
=====
|
||||||
To run the test suite, ensure you are running a local instance of MongoDB on
|
To run the test suite, ensure you are running a local instance of MongoDB on
|
||||||
the standard port, and run ``python setup.py test``.
|
the standard port, and run: ``python setup.py test``.
|
||||||
|
|
||||||
|
Community
|
||||||
|
=========
|
||||||
|
- `MongoEngine Users mailing list
|
||||||
|
<http://groups.google.com/group/mongoengine-users>`_
|
||||||
|
- `MongoEngine Developers mailing list
|
||||||
|
<http://groups.google.com/group/mongoengine-dev>`_
|
||||||
|
- `#mongoengine IRC channel <http://webchat.freenode.net/?channels=mongoengine>`_
|
||||||
|
|
||||||
Contributing
|
Contributing
|
||||||
============
|
============
|
||||||
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ - to
|
We welcome contributions! see the`Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_
|
||||||
contribute to the project, fork it on GitHub and send a pull request, all
|
|
||||||
contributions and suggestions are welcome!
|
|
||||||
|
|||||||
282
benchmark.py
Normal file
282
benchmark.py
Normal file
@@ -0,0 +1,282 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import timeit
|
||||||
|
|
||||||
|
|
||||||
|
def cprofile_main():
|
||||||
|
from pymongo import Connection
|
||||||
|
connection = Connection()
|
||||||
|
connection.drop_database('timeit_test')
|
||||||
|
connection.disconnect()
|
||||||
|
|
||||||
|
from mongoengine import Document, DictField, connect
|
||||||
|
connect("timeit_test")
|
||||||
|
|
||||||
|
class Noddy(Document):
|
||||||
|
fields = DictField()
|
||||||
|
|
||||||
|
for i in xrange(1):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key" + str(j)] = "value " + str(j)
|
||||||
|
noddy.save()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""
|
||||||
|
0.4 Performance Figures ...
|
||||||
|
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - Pymongo
|
||||||
|
3.86744189262
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine
|
||||||
|
6.23374891281
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||||
|
5.33027005196
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||||
|
pass - No Cascade
|
||||||
|
|
||||||
|
0.5.X
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - Pymongo
|
||||||
|
3.89597702026
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine
|
||||||
|
21.7735359669
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||||
|
19.8670389652
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||||
|
pass - No Cascade
|
||||||
|
|
||||||
|
0.6.X
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - Pymongo
|
||||||
|
3.81559205055
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine
|
||||||
|
10.0446798801
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||||
|
9.51354718208
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||||
|
9.02567505836
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, force=True
|
||||||
|
8.44933390617
|
||||||
|
|
||||||
|
0.7.X
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - Pymongo
|
||||||
|
3.78801012039
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine
|
||||||
|
9.73050498962
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||||
|
8.33456707001
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||||
|
8.37778115273
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, force=True
|
||||||
|
8.36906409264
|
||||||
|
0.8.X
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - Pymongo
|
||||||
|
3.69964408875
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - Pymongo write_concern={"w": 0}
|
||||||
|
3.5526599884
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine
|
||||||
|
7.00959801674
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries without continual assign - MongoEngine
|
||||||
|
5.60943293571
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade=True
|
||||||
|
6.715102911
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True
|
||||||
|
5.50644683838
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False
|
||||||
|
4.69851183891
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False
|
||||||
|
4.68946313858
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
"""
|
||||||
|
|
||||||
|
setup = """
|
||||||
|
from pymongo import MongoClient
|
||||||
|
connection = MongoClient()
|
||||||
|
connection.drop_database('timeit_test')
|
||||||
|
"""
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
from pymongo import MongoClient
|
||||||
|
connection = MongoClient()
|
||||||
|
|
||||||
|
db = connection.timeit_test
|
||||||
|
noddy = db.noddy
|
||||||
|
|
||||||
|
for i in xrange(10000):
|
||||||
|
example = {'fields': {}}
|
||||||
|
for j in range(20):
|
||||||
|
example['fields']["key"+str(j)] = "value "+str(j)
|
||||||
|
|
||||||
|
noddy.save(example)
|
||||||
|
|
||||||
|
myNoddys = noddy.find()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print "-" * 100
|
||||||
|
print """Creating 10000 dictionaries - Pymongo"""
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print t.timeit(1)
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
from pymongo import MongoClient
|
||||||
|
connection = MongoClient()
|
||||||
|
|
||||||
|
db = connection.timeit_test
|
||||||
|
noddy = db.noddy
|
||||||
|
|
||||||
|
for i in xrange(10000):
|
||||||
|
example = {'fields': {}}
|
||||||
|
for j in range(20):
|
||||||
|
example['fields']["key"+str(j)] = "value "+str(j)
|
||||||
|
|
||||||
|
noddy.save(example, write_concern={"w": 0})
|
||||||
|
|
||||||
|
myNoddys = noddy.find()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print "-" * 100
|
||||||
|
print """Creating 10000 dictionaries - Pymongo write_concern={"w": 0}"""
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print t.timeit(1)
|
||||||
|
|
||||||
|
setup = """
|
||||||
|
from pymongo import MongoClient
|
||||||
|
connection = MongoClient()
|
||||||
|
connection.drop_database('timeit_test')
|
||||||
|
connection.disconnect()
|
||||||
|
|
||||||
|
from mongoengine import Document, DictField, connect
|
||||||
|
connect("timeit_test")
|
||||||
|
|
||||||
|
class Noddy(Document):
|
||||||
|
fields = DictField()
|
||||||
|
"""
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in xrange(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save()
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print "-" * 100
|
||||||
|
print """Creating 10000 dictionaries - MongoEngine"""
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print t.timeit(1)
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in xrange(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
fields = {}
|
||||||
|
for j in range(20):
|
||||||
|
fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.fields = fields
|
||||||
|
noddy.save()
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print "-" * 100
|
||||||
|
print """Creating 10000 dictionaries without continual assign - MongoEngine"""
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print t.timeit(1)
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in xrange(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save(write_concern={"w": 0}, cascade=True)
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print "-" * 100
|
||||||
|
print """Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True"""
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print t.timeit(1)
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in xrange(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save(write_concern={"w": 0}, validate=False, cascade=True)
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print "-" * 100
|
||||||
|
print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True"""
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print t.timeit(1)
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in xrange(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save(validate=False, write_concern={"w": 0})
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print "-" * 100
|
||||||
|
print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False"""
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print t.timeit(1)
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in xrange(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save(force_insert=True, write_concern={"w": 0}, validate=False)
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print "-" * 100
|
||||||
|
print """Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False"""
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print t.timeit(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -6,6 +6,7 @@ Connecting
|
|||||||
==========
|
==========
|
||||||
|
|
||||||
.. autofunction:: mongoengine.connect
|
.. autofunction:: mongoengine.connect
|
||||||
|
.. autofunction:: mongoengine.register_connection
|
||||||
|
|
||||||
Documents
|
Documents
|
||||||
=========
|
=========
|
||||||
@@ -15,35 +16,74 @@ Documents
|
|||||||
|
|
||||||
.. attribute:: objects
|
.. attribute:: objects
|
||||||
|
|
||||||
A :class:`~mongoengine.queryset.QuerySet` object that is created lazily
|
A :class:`~mongoengine.queryset.QuerySet` object that is created lazily
|
||||||
on access.
|
on access.
|
||||||
|
|
||||||
.. autoclass:: mongoengine.EmbeddedDocument
|
.. autoclass:: mongoengine.EmbeddedDocument
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.DynamicDocument
|
||||||
|
:members:
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.DynamicEmbeddedDocument
|
||||||
|
:members:
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.document.MapReduceDocument
|
||||||
|
:members:
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.ValidationError
|
||||||
|
:members:
|
||||||
|
|
||||||
|
Context Managers
|
||||||
|
================
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.context_managers.switch_db
|
||||||
|
.. autoclass:: mongoengine.context_managers.no_dereference
|
||||||
|
.. autoclass:: mongoengine.context_managers.query_counter
|
||||||
|
|
||||||
Querying
|
Querying
|
||||||
========
|
========
|
||||||
|
|
||||||
.. autoclass:: mongoengine.queryset.QuerySet
|
.. autoclass:: mongoengine.queryset.QuerySet
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
|
.. automethod:: mongoengine.queryset.QuerySet.__call__
|
||||||
|
|
||||||
.. autofunction:: mongoengine.queryset.queryset_manager
|
.. autofunction:: mongoengine.queryset.queryset_manager
|
||||||
|
|
||||||
Fields
|
Fields
|
||||||
======
|
======
|
||||||
|
|
||||||
.. autoclass:: mongoengine.StringField
|
.. autoclass:: mongoengine.fields.StringField
|
||||||
|
.. autoclass:: mongoengine.fields.URLField
|
||||||
.. autoclass:: mongoengine.IntField
|
.. autoclass:: mongoengine.fields.EmailField
|
||||||
|
.. autoclass:: mongoengine.fields.IntField
|
||||||
.. autoclass:: mongoengine.FloatField
|
.. autoclass:: mongoengine.fields.LongField
|
||||||
|
.. autoclass:: mongoengine.fields.FloatField
|
||||||
.. autoclass:: mongoengine.DateTimeField
|
.. autoclass:: mongoengine.fields.DecimalField
|
||||||
|
.. autoclass:: mongoengine.fields.BooleanField
|
||||||
.. autoclass:: mongoengine.EmbeddedDocumentField
|
.. autoclass:: mongoengine.fields.DateTimeField
|
||||||
|
.. autoclass:: mongoengine.fields.ComplexDateTimeField
|
||||||
.. autoclass:: mongoengine.ListField
|
.. autoclass:: mongoengine.fields.EmbeddedDocumentField
|
||||||
|
.. autoclass:: mongoengine.fields.GenericEmbeddedDocumentField
|
||||||
.. autoclass:: mongoengine.ObjectIdField
|
.. autoclass:: mongoengine.fields.DynamicField
|
||||||
|
.. autoclass:: mongoengine.fields.ListField
|
||||||
.. autoclass:: mongoengine.ReferenceField
|
.. autoclass:: mongoengine.fields.SortedListField
|
||||||
|
.. autoclass:: mongoengine.fields.DictField
|
||||||
|
.. autoclass:: mongoengine.fields.MapField
|
||||||
|
.. autoclass:: mongoengine.fields.ReferenceField
|
||||||
|
.. autoclass:: mongoengine.fields.GenericReferenceField
|
||||||
|
.. autoclass:: mongoengine.fields.BinaryField
|
||||||
|
.. autoclass:: mongoengine.fields.FileField
|
||||||
|
.. autoclass:: mongoengine.fields.ImageField
|
||||||
|
.. autoclass:: mongoengine.fields.SequenceField
|
||||||
|
.. autoclass:: mongoengine.fields.ObjectIdField
|
||||||
|
.. autoclass:: mongoengine.fields.UUIDField
|
||||||
|
.. autoclass:: mongoengine.fields.GeoPointField
|
||||||
|
.. autoclass:: mongoengine.fields.PointField
|
||||||
|
.. autoclass:: mongoengine.fields.LineStringField
|
||||||
|
.. autoclass:: mongoengine.fields.PolygonField
|
||||||
|
.. autoclass:: mongoengine.fields.GridFSError
|
||||||
|
.. autoclass:: mongoengine.fields.GridFSProxy
|
||||||
|
.. autoclass:: mongoengine.fields.ImageGridFsProxy
|
||||||
|
.. autoclass:: mongoengine.fields.ImproperlyConfigured
|
||||||
|
|||||||
@@ -2,6 +2,552 @@
|
|||||||
Changelog
|
Changelog
|
||||||
=========
|
=========
|
||||||
|
|
||||||
|
Changes in 0.8.1
|
||||||
|
================
|
||||||
|
- Fixed Python 2.6 django auth importlib issue (#326)
|
||||||
|
- Fixed pickle unsaved document regression (#327)
|
||||||
|
|
||||||
|
Changes in 0.8.0
|
||||||
|
================
|
||||||
|
- Fixed querying ReferenceField custom_id (#317)
|
||||||
|
- Fixed pickle issues with collections (#316)
|
||||||
|
- Added `get_next_value` preview for SequenceFields (#319)
|
||||||
|
- Added no_sub_classes context manager and queryset helper (#312)
|
||||||
|
- Querysets now utilises a local cache
|
||||||
|
- Changed __len__ behavour in the queryset (#247, #311)
|
||||||
|
- Fixed querying string versions of ObjectIds issue with ReferenceField (#307)
|
||||||
|
- Added $setOnInsert support for upserts (#308)
|
||||||
|
- Upserts now possible with just query parameters (#309)
|
||||||
|
- Upserting is the only way to ensure docs are saved correctly (#306)
|
||||||
|
- Fixed register_delete_rule inheritance issue
|
||||||
|
- Fix cloning of sliced querysets (#303)
|
||||||
|
- Fixed update_one write concern (#302)
|
||||||
|
- Updated minimum requirement for pymongo to 2.5
|
||||||
|
- Add support for new geojson fields, indexes and queries (#299)
|
||||||
|
- If values cant be compared mark as changed (#287)
|
||||||
|
- Ensure as_pymongo() and to_json honour only() and exclude() (#293)
|
||||||
|
- Document serialization uses field order to ensure a strict order is set (#296)
|
||||||
|
- DecimalField now stores as float not string (#289)
|
||||||
|
- UUIDField now stores as a binary by default (#292)
|
||||||
|
- Added Custom User Model for Django 1.5 (#285)
|
||||||
|
- Cascading saves now default to off (#291)
|
||||||
|
- ReferenceField now store ObjectId's by default rather than DBRef (#290)
|
||||||
|
- Added ImageField support for inline replacements (#86)
|
||||||
|
- Added SequenceField.set_next_value(value) helper (#159)
|
||||||
|
- Updated .only() behaviour - now like exclude it is chainable (#202)
|
||||||
|
- Added with_limit_and_skip support to count() (#235)
|
||||||
|
- Objects queryset manager now inherited (#256)
|
||||||
|
- Updated connection to use MongoClient (#262, #274)
|
||||||
|
- Fixed db_alias and inherited Documents (#143)
|
||||||
|
- Documentation update for document errors (#124)
|
||||||
|
- Deprecated `get_or_create` (#35)
|
||||||
|
- Updated inheritable objects created by upsert now contain _cls (#118)
|
||||||
|
- Added support for creating documents with embedded documents in a single operation (#6)
|
||||||
|
- Added to_json and from_json to Document (#1)
|
||||||
|
- Added to_json and from_json to QuerySet (#131)
|
||||||
|
- Updated index creation now tied to Document class (#102)
|
||||||
|
- Added none() to queryset (#127)
|
||||||
|
- Updated SequenceFields to allow post processing of the calculated counter value (#141)
|
||||||
|
- Added clean method to documents for pre validation data cleaning (#60)
|
||||||
|
- Added support setting for read prefrence at a query level (#157)
|
||||||
|
- Added _instance to EmbeddedDocuments pointing to the parent (#139)
|
||||||
|
- Inheritance is off by default (#122)
|
||||||
|
- Remove _types and just use _cls for inheritance (#148)
|
||||||
|
- Only allow QNode instances to be passed as query objects (#199)
|
||||||
|
- Dynamic fields are now validated on save (#153) (#154)
|
||||||
|
- Added support for multiple slices and made slicing chainable. (#170) (#190) (#191)
|
||||||
|
- Fixed GridFSProxy __getattr__ behaviour (#196)
|
||||||
|
- Fix Django timezone support (#151)
|
||||||
|
- Simplified Q objects, removed QueryTreeTransformerVisitor (#98) (#171)
|
||||||
|
- FileFields now copyable (#198)
|
||||||
|
- Querysets now return clones and are no longer edit in place (#56)
|
||||||
|
- Added support for $maxDistance (#179)
|
||||||
|
- Uses getlasterror to test created on updated saves (#163)
|
||||||
|
- Fixed inheritance and unique index creation (#140)
|
||||||
|
- Fixed reverse delete rule with inheritance (#197)
|
||||||
|
- Fixed validation for GenericReferences which havent been dereferenced
|
||||||
|
- Added switch_db context manager (#106)
|
||||||
|
- Added switch_db method to document instances (#106)
|
||||||
|
- Added no_dereference context manager (#82) (#61)
|
||||||
|
- Added switch_collection context manager (#220)
|
||||||
|
- Added switch_collection method to document instances (#220)
|
||||||
|
- Added support for compound primary keys (#149) (#121)
|
||||||
|
- Fixed overriding objects with custom manager (#58)
|
||||||
|
- Added no_dereference method for querysets (#82) (#61)
|
||||||
|
- Undefined data should not override instance methods (#49)
|
||||||
|
- Added Django Group and Permission (#142)
|
||||||
|
- Added Doc class and pk to Validation messages (#69)
|
||||||
|
- Fixed Documents deleted via a queryset don't call any signals (#105)
|
||||||
|
- Added the "get_decoded" method to the MongoSession class (#216)
|
||||||
|
- Fixed invalid choices error bubbling (#214)
|
||||||
|
- Updated Save so it calls $set and $unset in a single operation (#211)
|
||||||
|
- Fixed inner queryset looping (#204)
|
||||||
|
|
||||||
|
Changes in 0.7.10
|
||||||
|
=================
|
||||||
|
- Fix UnicodeEncodeError for dbref (#278)
|
||||||
|
- Allow construction using positional parameters (#268)
|
||||||
|
- Updated EmailField length to support long domains (#243)
|
||||||
|
- Added 64-bit integer support (#251)
|
||||||
|
- Added Django sessions TTL support (#224)
|
||||||
|
- Fixed issue with numerical keys in MapField(EmbeddedDocumentField()) (#240)
|
||||||
|
- Fixed clearing _changed_fields for complex nested embedded documents (#237, #239, #242)
|
||||||
|
- Added "id" back to _data dictionary (#255)
|
||||||
|
- Only mark a field as changed if the value has changed (#258)
|
||||||
|
- Explicitly check for Document instances when dereferencing (#261)
|
||||||
|
- Fixed order_by chaining issue (#265)
|
||||||
|
- Added dereference support for tuples (#250)
|
||||||
|
- Resolve field name to db field name when using distinct(#260, #264, #269)
|
||||||
|
- Added kwargs to doc.save to help interop with django (#223, #270)
|
||||||
|
- Fixed cloning querysets in PY3
|
||||||
|
- Int fields no longer unset in save when changed to 0 (#272)
|
||||||
|
- Fixed ReferenceField query chaining bug fixed (#254)
|
||||||
|
|
||||||
|
Changes in 0.7.9
|
||||||
|
================
|
||||||
|
- Better fix handling for old style _types
|
||||||
|
- Embedded SequenceFields follow collection naming convention
|
||||||
|
|
||||||
|
Changes in 0.7.8
|
||||||
|
================
|
||||||
|
- Fix sequence fields in embedded documents (#166)
|
||||||
|
- Fix query chaining with .order_by() (#176)
|
||||||
|
- Added optional encoding and collection config for Django sessions (#180, #181, #183)
|
||||||
|
- Fixed EmailField so can add extra validation (#173, #174, #187)
|
||||||
|
- Fixed bulk inserts can now handle custom pk's (#192)
|
||||||
|
- Added as_pymongo method to return raw or cast results from pymongo (#193)
|
||||||
|
|
||||||
|
Changes in 0.7.7
|
||||||
|
================
|
||||||
|
- Fix handling for old style _types
|
||||||
|
|
||||||
|
Changes in 0.7.6
|
||||||
|
================
|
||||||
|
- Unicode fix for repr (#133)
|
||||||
|
- Allow updates with match operators (#144)
|
||||||
|
- Updated URLField - now can have a override the regex (#136)
|
||||||
|
- Allow Django AuthenticationBackends to work with Django user (hmarr/mongoengine#573)
|
||||||
|
- Fixed reload issue with ReferenceField where dbref=False (#138)
|
||||||
|
|
||||||
|
Changes in 0.7.5
|
||||||
|
================
|
||||||
|
- ReferenceFields with dbref=False use ObjectId instead of strings (#134)
|
||||||
|
See ticket for upgrade notes (#134)
|
||||||
|
|
||||||
|
Changes in 0.7.4
|
||||||
|
================
|
||||||
|
- Fixed index inheritance issues - firmed up testcases (#123) (#125)
|
||||||
|
|
||||||
|
Changes in 0.7.3
|
||||||
|
================
|
||||||
|
- Reverted EmbeddedDocuments meta handling - now can turn off inheritance (#119)
|
||||||
|
|
||||||
|
Changes in 0.7.2
|
||||||
|
================
|
||||||
|
- Update index spec generation so its not destructive (#113)
|
||||||
|
|
||||||
|
Changes in 0.7.1
|
||||||
|
=================
|
||||||
|
- Fixed index spec inheritance (#111)
|
||||||
|
|
||||||
|
Changes in 0.7.0
|
||||||
|
=================
|
||||||
|
- Updated queryset.delete so you can use with skip / limit (#107)
|
||||||
|
- Updated index creation allows kwargs to be passed through refs (#104)
|
||||||
|
- Fixed Q object merge edge case (#109)
|
||||||
|
- Fixed reloading on sharded documents (hmarr/mongoengine#569)
|
||||||
|
- Added NotUniqueError for duplicate keys (#62)
|
||||||
|
- Added custom collection / sequence naming for SequenceFields (#92)
|
||||||
|
- Fixed UnboundLocalError in composite index with pk field (#88)
|
||||||
|
- Updated ReferenceField's to optionally store ObjectId strings
|
||||||
|
this will become the default in 0.8 (#89)
|
||||||
|
- Added FutureWarning - save will default to `cascade=False` in 0.8
|
||||||
|
- Added example of indexing embedded document fields (#75)
|
||||||
|
- Fixed ImageField resizing when forcing size (#80)
|
||||||
|
- Add flexibility for fields handling bad data (#78)
|
||||||
|
- Embedded Documents no longer handle meta definitions
|
||||||
|
- Use weakref proxies in base lists / dicts (#74)
|
||||||
|
- Improved queryset filtering (hmarr/mongoengine#554)
|
||||||
|
- Fixed Dynamic Documents and Embedded Documents (hmarr/mongoengine#561)
|
||||||
|
- Fixed abstract classes and shard keys (#64)
|
||||||
|
- Fixed Python 2.5 support
|
||||||
|
- Added Python 3 support (thanks to Laine Heron)
|
||||||
|
|
||||||
|
Changes in 0.6.20
|
||||||
|
=================
|
||||||
|
- Added support for distinct and db_alias (#59)
|
||||||
|
- Improved support for chained querysets when constraining the same fields (hmarr/mongoengine#554)
|
||||||
|
- Fixed BinaryField lookup re (#48)
|
||||||
|
|
||||||
|
Changes in 0.6.19
|
||||||
|
=================
|
||||||
|
|
||||||
|
- Added Binary support to UUID (#47)
|
||||||
|
- Fixed MapField lookup for fields without declared lookups (#46)
|
||||||
|
- Fixed BinaryField python value issue (#48)
|
||||||
|
- Fixed SequenceField non numeric value lookup (#41)
|
||||||
|
- Fixed queryset manager issue (#52)
|
||||||
|
- Fixed FileField comparision (hmarr/mongoengine#547)
|
||||||
|
|
||||||
|
Changes in 0.6.18
|
||||||
|
=================
|
||||||
|
- Fixed recursion loading bug in _get_changed_fields
|
||||||
|
|
||||||
|
Changes in 0.6.17
|
||||||
|
=================
|
||||||
|
- Fixed issue with custom queryset manager expecting explict variable names
|
||||||
|
|
||||||
|
Changes in 0.6.16
|
||||||
|
=================
|
||||||
|
- Fixed issue where db_alias wasn't inherited
|
||||||
|
|
||||||
|
Changes in 0.6.15
|
||||||
|
=================
|
||||||
|
- Updated validation error messages
|
||||||
|
- Added support for null / zero / false values in item_frequencies
|
||||||
|
- Fixed cascade save edge case
|
||||||
|
- Fixed geo index creation through reference fields
|
||||||
|
- Added support for args / kwargs when using @queryset_manager
|
||||||
|
- Deref list custom id fix
|
||||||
|
|
||||||
|
Changes in 0.6.14
|
||||||
|
=================
|
||||||
|
- Fixed error dict with nested validation
|
||||||
|
- Fixed Int/Float fields and not equals None
|
||||||
|
- Exclude tests from installation
|
||||||
|
- Allow tuples for index meta
|
||||||
|
- Fixed use of str in instance checks
|
||||||
|
- Fixed unicode support in transform update
|
||||||
|
- Added support for add_to_set and each
|
||||||
|
|
||||||
|
Changes in 0.6.13
|
||||||
|
=================
|
||||||
|
- Fixed EmbeddedDocument db_field validation issue
|
||||||
|
- Fixed StringField unicode issue
|
||||||
|
- Fixes __repr__ modifying the cursor
|
||||||
|
|
||||||
|
Changes in 0.6.12
|
||||||
|
=================
|
||||||
|
- Fixes scalar lookups for primary_key
|
||||||
|
- Fixes error with _delta handling DBRefs
|
||||||
|
|
||||||
|
Changes in 0.6.11
|
||||||
|
==================
|
||||||
|
- Fixed inconsistency handling None values field attrs
|
||||||
|
- Fixed map_field embedded db_field issue
|
||||||
|
- Fixed .save() _delta issue with DbRefs
|
||||||
|
- Fixed Django TestCase
|
||||||
|
- Added cmp to Embedded Document
|
||||||
|
- Added PULL reverse_delete_rule
|
||||||
|
- Fixed CASCADE delete bug
|
||||||
|
- Fixed db_field data load error
|
||||||
|
- Fixed recursive save with FileField
|
||||||
|
|
||||||
|
Changes in 0.6.10
|
||||||
|
=================
|
||||||
|
- Fixed basedict / baselist to return super(..)
|
||||||
|
- Promoted BaseDynamicField to DynamicField
|
||||||
|
|
||||||
|
Changes in 0.6.9
|
||||||
|
================
|
||||||
|
- Fixed sparse indexes on inherited docs
|
||||||
|
- Removed FileField auto deletion, needs more work maybe 0.7
|
||||||
|
|
||||||
|
Changes in 0.6.8
|
||||||
|
================
|
||||||
|
- Fixed FileField losing reference when no default set
|
||||||
|
- Removed possible race condition from FileField (grid_file)
|
||||||
|
- Added assignment to save, can now do: `b = MyDoc(**kwargs).save()`
|
||||||
|
- Added support for pull operations on nested EmbeddedDocuments
|
||||||
|
- Added support for choices with GenericReferenceFields
|
||||||
|
- Added support for choices with GenericEmbeddedDocumentFields
|
||||||
|
- Fixed Django 1.4 sessions first save data loss
|
||||||
|
- FileField now automatically delete files on .delete()
|
||||||
|
- Fix for GenericReference to_mongo method
|
||||||
|
- Fixed connection regression
|
||||||
|
- Updated Django User document, now allows inheritance
|
||||||
|
|
||||||
|
Changes in 0.6.7
|
||||||
|
================
|
||||||
|
- Fixed indexing on '_id' or 'pk' or 'id'
|
||||||
|
- Invalid data from the DB now raises a InvalidDocumentError
|
||||||
|
- Cleaned up the Validation Error - docs and code
|
||||||
|
- Added meta `auto_create_index` so you can disable index creation
|
||||||
|
- Added write concern options to inserts
|
||||||
|
- Fixed typo in meta for index options
|
||||||
|
- Bug fix Read preference now passed correctly
|
||||||
|
- Added support for File like objects for GridFS
|
||||||
|
- Fix for #473 - Dereferencing abstracts
|
||||||
|
|
||||||
|
Changes in 0.6.6
|
||||||
|
================
|
||||||
|
- Django 1.4 fixed (finally)
|
||||||
|
- Added tests for Django
|
||||||
|
|
||||||
|
Changes in 0.6.5
|
||||||
|
================
|
||||||
|
- More Django updates
|
||||||
|
|
||||||
|
Changes in 0.6.4
|
||||||
|
================
|
||||||
|
|
||||||
|
- Refactored connection / fixed replicasetconnection
|
||||||
|
- Bug fix for unknown connection alias error message
|
||||||
|
- Sessions support Django 1.3 and Django 1.4
|
||||||
|
- Minor fix for ReferenceField
|
||||||
|
|
||||||
|
Changes in 0.6.3
|
||||||
|
================
|
||||||
|
- Updated sessions for Django 1.4
|
||||||
|
- Bug fix for updates where listfields contain embedded documents
|
||||||
|
- Bug fix for collection naming and mixins
|
||||||
|
|
||||||
|
Changes in 0.6.2
|
||||||
|
================
|
||||||
|
- Updated documentation for ReplicaSet connections
|
||||||
|
- Hack round _types issue with SERVER-5247 - querying other arrays may also cause problems.
|
||||||
|
|
||||||
|
Changes in 0.6.1
|
||||||
|
================
|
||||||
|
- Fix for replicaSet connections
|
||||||
|
|
||||||
|
Changes in 0.6
|
||||||
|
================
|
||||||
|
|
||||||
|
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
|
||||||
|
- Added support for covered indexes when inheritance is off
|
||||||
|
- No longer always upsert on save for items with a '_id'
|
||||||
|
- Error raised if update doesn't have an operation
|
||||||
|
- DeReferencing is now thread safe
|
||||||
|
- Errors raised if trying to perform a join in a query
|
||||||
|
- Updates can now take __raw__ queries
|
||||||
|
- Added custom 2D index declarations
|
||||||
|
- Added replicaSet connection support
|
||||||
|
- Updated deprecated imports from pymongo (safe for pymongo 2.2)
|
||||||
|
- Added uri support for connections
|
||||||
|
- Added scalar for efficiently returning partial data values (aliased to values_list)
|
||||||
|
- Fixed limit skip bug
|
||||||
|
- Improved Inheritance / Mixin
|
||||||
|
- Added sharding support
|
||||||
|
- Added pymongo 2.1 support
|
||||||
|
- Fixed Abstract documents can now declare indexes
|
||||||
|
- Added db_alias support to individual documents
|
||||||
|
- Fixed GridFS documents can now be pickled
|
||||||
|
- Added Now raises an InvalidDocumentError when declaring multiple fields with the same db_field
|
||||||
|
- Added InvalidQueryError when calling with_id with a filter
|
||||||
|
- Added support for DBRefs in distinct()
|
||||||
|
- Fixed issue saving False booleans
|
||||||
|
- Fixed issue with dynamic documents deltas
|
||||||
|
- Added Reverse Delete Rule support to ListFields - MapFields aren't supported
|
||||||
|
- Added customisable cascade kwarg options
|
||||||
|
- Fixed Handle None values for non-required fields
|
||||||
|
- Removed Document._get_subclasses() - no longer required
|
||||||
|
- Fixed bug requiring subclasses when not actually needed
|
||||||
|
- Fixed deletion of dynamic data
|
||||||
|
- Added support for the $elementMatch operator
|
||||||
|
- Added reverse option to SortedListFields
|
||||||
|
- Fixed dereferencing - multi directional list dereferencing
|
||||||
|
- Fixed issue creating indexes with recursive embedded documents
|
||||||
|
- Fixed recursive lookup in _unique_with_indexes
|
||||||
|
- Fixed passing ComplexField defaults to constructor for ReferenceFields
|
||||||
|
- Fixed validation of DictField Int keys
|
||||||
|
- Added optional cascade saving
|
||||||
|
- Fixed dereferencing - max_depth now taken into account
|
||||||
|
- Fixed document mutation saving issue
|
||||||
|
- Fixed positional operator when replacing embedded documents
|
||||||
|
- Added Non-Django Style choices back (you can have either)
|
||||||
|
- Fixed __repr__ of a sliced queryset
|
||||||
|
- Added recursive validation error of documents / complex fields
|
||||||
|
- Fixed breaking during queryset iteration
|
||||||
|
- Added pre and post bulk-insert signals
|
||||||
|
- Added ImageField - requires PIL
|
||||||
|
- Fixed Reference Fields can be None in get_or_create / queries
|
||||||
|
- Fixed accessing pk on an embedded document
|
||||||
|
- Fixed calling a queryset after drop_collection now recreates the collection
|
||||||
|
- Add field name to validation exception messages
|
||||||
|
- Added UUID field
|
||||||
|
- Improved efficiency of .get()
|
||||||
|
- Updated ComplexFields so if required they won't accept empty lists / dicts
|
||||||
|
- Added spec file for rpm-based distributions
|
||||||
|
- Fixed ListField so it doesnt accept strings
|
||||||
|
- Added DynamicDocument and EmbeddedDynamicDocument classes for expando schemas
|
||||||
|
|
||||||
|
Changes in v0.5.2
|
||||||
|
=================
|
||||||
|
|
||||||
|
- A Robust Circular reference bugfix
|
||||||
|
|
||||||
|
|
||||||
|
Changes in v0.5.1
|
||||||
|
=================
|
||||||
|
|
||||||
|
- Fixed simple circular reference bug
|
||||||
|
|
||||||
|
Changes in v0.5
|
||||||
|
===============
|
||||||
|
|
||||||
|
- Added InvalidDocumentError - so Document core methods can't be overwritten
|
||||||
|
- Added GenericEmbeddedDocument - so you can embed any type of embeddable document
|
||||||
|
- Added within_polygon support - for those with mongodb 1.9
|
||||||
|
- Updated sum / average to use map_reduce as db.eval doesn't work in sharded environments
|
||||||
|
- Added where() - filter to allowing users to specify query expressions as Javascript
|
||||||
|
- Added SequenceField - for creating sequential counters
|
||||||
|
- Added update() convenience method to a document
|
||||||
|
- Added cascading saves - so changes to Referenced documents are saved on .save()
|
||||||
|
- Added select_related() support
|
||||||
|
- Added support for the positional operator
|
||||||
|
- Updated geo index checking to be recursive and check in embedded documents
|
||||||
|
- Updated default collection naming convention
|
||||||
|
- Added Document Mixin support
|
||||||
|
- Fixed queryet __repr__ mid iteration
|
||||||
|
- Added hint() support, so cantell Mongo the proper index to use for the query
|
||||||
|
- Fixed issue with inconsitent setting of _cls breaking inherited referencing
|
||||||
|
- Added help_text and verbose_name to fields to help with some form libs
|
||||||
|
- Updated item_frequencies to handle embedded document lookups
|
||||||
|
- Added delta tracking now only sets / unsets explicitly changed fields
|
||||||
|
- Fixed saving so sets updated values rather than overwrites
|
||||||
|
- Added ComplexDateTimeField - Handles datetimes correctly with microseconds
|
||||||
|
- Added ComplexBaseField - for improved flexibility and performance
|
||||||
|
- Added get_FIELD_display() method for easy choice field displaying
|
||||||
|
- Added queryset.slave_okay(enabled) method
|
||||||
|
- Updated queryset.timeout(enabled) and queryset.snapshot(enabled) to be chainable
|
||||||
|
- Added insert method for bulk inserts
|
||||||
|
- Added blinker signal support
|
||||||
|
- Added query_counter context manager for tests
|
||||||
|
- Added map_reduce method item_frequencies and set as default (as db.eval doesn't work in sharded environments)
|
||||||
|
- Added inline_map_reduce option to map_reduce
|
||||||
|
- Updated connection exception so it provides more info on the cause.
|
||||||
|
- Added searching multiple levels deep in ``DictField``
|
||||||
|
- Added ``DictField`` entries containing strings to use matching operators
|
||||||
|
- Added ``MapField``, similar to ``DictField``
|
||||||
|
- Added Abstract Base Classes
|
||||||
|
- Added Custom Objects Managers
|
||||||
|
- Added sliced subfields updating
|
||||||
|
- Added ``NotRegistered`` exception if dereferencing ``Document`` not in the registry
|
||||||
|
- Added a write concern for ``save``, ``update``, ``update_one`` and ``get_or_create``
|
||||||
|
- Added slicing / subarray fetching controls
|
||||||
|
- Fixed various unique index and other index issues
|
||||||
|
- Fixed threaded connection issues
|
||||||
|
- Added spherical geospatial query operators
|
||||||
|
- Updated queryset to handle latest version of pymongo
|
||||||
|
map_reduce now requires an output.
|
||||||
|
- Added ``Document`` __hash__, __ne__ for pickling
|
||||||
|
- Added ``FileField`` optional size arg for read method
|
||||||
|
- Fixed ``FileField`` seek and tell methods for reading files
|
||||||
|
- Added ``QuerySet.clone`` to support copying querysets
|
||||||
|
- Fixed item_frequencies when using name thats the same as a native js function
|
||||||
|
- Added reverse delete rules
|
||||||
|
- Fixed issue with unset operation
|
||||||
|
- Fixed Q-object bug
|
||||||
|
- Added ``QuerySet.all_fields`` resets previous .only() and .exclude()
|
||||||
|
- Added ``QuerySet.exclude``
|
||||||
|
- Added django style choices
|
||||||
|
- Fixed order and filter issue
|
||||||
|
- Added ``QuerySet.only`` subfield support
|
||||||
|
- Added creation_counter to ``BaseField`` allowing fields to be sorted in the
|
||||||
|
way the user has specified them
|
||||||
|
- Fixed various errors
|
||||||
|
- Added many tests
|
||||||
|
|
||||||
|
Changes in v0.4
|
||||||
|
===============
|
||||||
|
- Added ``GridFSStorage`` Django storage backend
|
||||||
|
- Added ``FileField`` for GridFS support
|
||||||
|
- New Q-object implementation, which is no longer based on Javascript
|
||||||
|
- Added ``SortedListField``
|
||||||
|
- Added ``EmailField``
|
||||||
|
- Added ``GeoPointField``
|
||||||
|
- Added ``exact`` and ``iexact`` match operators to ``QuerySet``
|
||||||
|
- Added ``get_document_or_404`` and ``get_list_or_404`` Django shortcuts
|
||||||
|
- Added new query operators for Geo queries
|
||||||
|
- Added ``not`` query operator
|
||||||
|
- Added new update operators: ``pop`` and ``add_to_set``
|
||||||
|
- Added ``__raw__`` query parameter
|
||||||
|
- Added support for custom querysets
|
||||||
|
- Fixed document inheritance primary key issue
|
||||||
|
- Added support for querying by array element position
|
||||||
|
- Base class can now be defined for ``DictField``
|
||||||
|
- Fixed MRO error that occured on document inheritance
|
||||||
|
- Added ``QuerySet.distinct``, ``QuerySet.create``, ``QuerySet.snapshot``,
|
||||||
|
``QuerySet.timeout`` and ``QuerySet.all``
|
||||||
|
- Subsequent calls to ``connect()`` now work
|
||||||
|
- Introduced ``min_length`` for ``StringField``
|
||||||
|
- Fixed multi-process connection issue
|
||||||
|
- Other minor fixes
|
||||||
|
|
||||||
|
Changes in v0.3
|
||||||
|
===============
|
||||||
|
- Added MapReduce support
|
||||||
|
- Added ``contains``, ``startswith`` and ``endswith`` query operators (and
|
||||||
|
case-insensitive versions that are prefixed with 'i')
|
||||||
|
- Deprecated fields' ``name`` parameter, replaced with ``db_field``
|
||||||
|
- Added ``QuerySet.only`` for only retrieving specific fields
|
||||||
|
- Added ``QuerySet.in_bulk()`` for bulk querying using ids
|
||||||
|
- ``QuerySet``\ s now have a ``rewind()`` method, which is called automatically
|
||||||
|
when the iterator is exhausted, allowing ``QuerySet``\ s to be reused
|
||||||
|
- Added ``DictField``
|
||||||
|
- Added ``URLField``
|
||||||
|
- Added ``DecimalField``
|
||||||
|
- Added ``BinaryField``
|
||||||
|
- Added ``GenericReferenceField``
|
||||||
|
- Added ``get()`` and ``get_or_create()`` methods to ``QuerySet``
|
||||||
|
- ``ReferenceField``\ s may now reference the document they are defined on
|
||||||
|
(recursive references) and documents that have not yet been defined
|
||||||
|
- ``Document`` objects may now be compared for equality (equal if _ids are
|
||||||
|
equal and documents are of same type)
|
||||||
|
- ``QuerySet`` update methods now have an ``upsert`` parameter
|
||||||
|
- Added field name substitution for Javascript code (allows the user to use the
|
||||||
|
Python names for fields in JS, which are later substituted for the real field
|
||||||
|
names)
|
||||||
|
- ``Q`` objects now support regex querying
|
||||||
|
- Fixed bug where referenced documents within lists weren't properly
|
||||||
|
dereferenced
|
||||||
|
- ``ReferenceField``\ s may now be queried using their _id
|
||||||
|
- Fixed bug where ``EmbeddedDocuments`` couldn't be non-polymorphic
|
||||||
|
- ``queryset_manager`` functions now accept two arguments -- the document class
|
||||||
|
as the first and the queryset as the second
|
||||||
|
- Fixed bug where ``QuerySet.exec_js`` ignored ``Q`` objects
|
||||||
|
- Other minor fixes
|
||||||
|
|
||||||
|
Changes in v0.2.2
|
||||||
|
=================
|
||||||
|
- Fixed bug that prevented indexes from being used on ``ListField``\ s
|
||||||
|
- ``Document.filter()`` added as an alias to ``Document.__call__()``
|
||||||
|
- ``validate()`` may now be used on ``EmbeddedDocument``\ s
|
||||||
|
|
||||||
|
Changes in v0.2.1
|
||||||
|
=================
|
||||||
|
- Added a MongoEngine backend for Django sessions
|
||||||
|
- Added ``force_insert`` to ``Document.save()``
|
||||||
|
- Improved querying syntax for ``ListField`` and ``EmbeddedDocumentField``
|
||||||
|
- Added support for user-defined primary keys (``_id`` in MongoDB)
|
||||||
|
|
||||||
|
Changes in v0.2
|
||||||
|
===============
|
||||||
|
- Added ``Q`` class for building advanced queries
|
||||||
|
- Added ``QuerySet`` methods for atomic updates to documents
|
||||||
|
- Fields may now specify ``unique=True`` to enforce uniqueness across a
|
||||||
|
collection
|
||||||
|
- Added option for default document ordering
|
||||||
|
- Fixed bug in index definitions
|
||||||
|
|
||||||
|
Changes in v0.1.3
|
||||||
|
=================
|
||||||
|
- Added Django authentication backend
|
||||||
|
- Added ``Document.meta`` support for indexes, which are ensured just before
|
||||||
|
querying takes place
|
||||||
|
- A few minor bugfixes
|
||||||
|
|
||||||
|
|
||||||
|
Changes in v0.1.2
|
||||||
|
=================
|
||||||
|
- Query values may be processed before before being used in queries
|
||||||
|
- Made connections lazy
|
||||||
|
- Fixed bug in Document dictionary-style access
|
||||||
|
- Added ``BooleanField``
|
||||||
|
- Added ``Document.reload()`` method
|
||||||
|
|
||||||
|
|
||||||
Changes in v0.1.1
|
Changes in v0.1.1
|
||||||
=================
|
=================
|
||||||
- Documents may now use capped collections
|
- Documents may now use capped collections
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ print 'ALL POSTS'
|
|||||||
print
|
print
|
||||||
for post in Post.objects:
|
for post in Post.objects:
|
||||||
print post.title
|
print post.title
|
||||||
print '=' * len(post.title)
|
print '=' * post.title.count()
|
||||||
|
|
||||||
if isinstance(post, TextPost):
|
if isinstance(post, TextPost):
|
||||||
print post.content
|
print post.content
|
||||||
|
|||||||
23
docs/conf.py
23
docs/conf.py
@@ -16,16 +16,16 @@ import sys, os
|
|||||||
# If extensions (or modules to document with autodoc) are in another directory,
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
# add these directories to sys.path here. If the directory is relative to the
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
sys.path.append(os.path.abspath('..'))
|
sys.path.insert(0, os.path.abspath('..'))
|
||||||
|
|
||||||
# -- General configuration -----------------------------------------------------
|
# -- General configuration -----------------------------------------------------
|
||||||
|
|
||||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||||
extensions = ['sphinx.ext.autodoc']
|
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo']
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ['.templates']
|
templates_path = ['_templates']
|
||||||
|
|
||||||
# The suffix of source filenames.
|
# The suffix of source filenames.
|
||||||
source_suffix = '.rst'
|
source_suffix = '.rst'
|
||||||
@@ -38,7 +38,7 @@ master_doc = 'index'
|
|||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u'MongoEngine'
|
project = u'MongoEngine'
|
||||||
copyright = u'2009, Harry Marr'
|
copyright = u'2009, MongoEngine Authors'
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
@@ -121,7 +121,7 @@ html_theme_path = ['_themes']
|
|||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
html_static_path = ['_static']
|
#html_static_path = ['_static']
|
||||||
|
|
||||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||||
# using the given strftime format.
|
# using the given strftime format.
|
||||||
@@ -132,7 +132,11 @@ html_static_path = ['_static']
|
|||||||
html_use_smartypants = True
|
html_use_smartypants = True
|
||||||
|
|
||||||
# Custom sidebar templates, maps document names to template names.
|
# Custom sidebar templates, maps document names to template names.
|
||||||
#html_sidebars = {}
|
html_sidebars = {
|
||||||
|
'index': ['globaltoc.html', 'searchbox.html'],
|
||||||
|
'**': ['localtoc.html', 'relations.html', 'searchbox.html']
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
# Additional templates that should be rendered to pages, maps page names to
|
# Additional templates that should be rendered to pages, maps page names to
|
||||||
# template names.
|
# template names.
|
||||||
@@ -173,8 +177,8 @@ latex_paper_size = 'a4'
|
|||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
('index', 'MongoEngine.tex', u'MongoEngine Documentation',
|
('index', 'MongoEngine.tex', 'MongoEngine Documentation',
|
||||||
u'Harry Marr', 'manual'),
|
'Ross Lawley', 'manual'),
|
||||||
]
|
]
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
@@ -193,3 +197,6 @@ latex_documents = [
|
|||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#latex_use_modindex = True
|
#latex_use_modindex = True
|
||||||
|
|
||||||
|
autoclass_content = 'both'
|
||||||
|
|
||||||
|
|||||||
139
docs/django.rst
Normal file
139
docs/django.rst
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
==============
|
||||||
|
Django Support
|
||||||
|
==============
|
||||||
|
|
||||||
|
.. note:: Updated to support Django 1.5
|
||||||
|
|
||||||
|
Connecting
|
||||||
|
==========
|
||||||
|
In your **settings.py** file, ignore the standard database settings (unless you
|
||||||
|
also plan to use the ORM in your project), and instead call
|
||||||
|
:func:`~mongoengine.connect` somewhere in the settings module.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
If you are not using another Database backend you may need to add a dummy
|
||||||
|
database backend to ``settings.py`` eg::
|
||||||
|
|
||||||
|
DATABASES = {
|
||||||
|
'default': {
|
||||||
|
'ENGINE': 'django.db.backends.dummy'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Authentication
|
||||||
|
==============
|
||||||
|
MongoEngine includes a Django authentication backend, which uses MongoDB. The
|
||||||
|
:class:`~mongoengine.django.auth.User` model is a MongoEngine
|
||||||
|
:class:`~mongoengine.Document`, but implements most of the methods and
|
||||||
|
attributes that the standard Django :class:`User` model does - so the two are
|
||||||
|
moderately compatible. Using this backend will allow you to store users in
|
||||||
|
MongoDB but still use many of the Django authentication infrastucture (such as
|
||||||
|
the :func:`login_required` decorator and the :func:`authenticate` function). To
|
||||||
|
enable the MongoEngine auth backend, add the following to you **settings.py**
|
||||||
|
file::
|
||||||
|
|
||||||
|
AUTHENTICATION_BACKENDS = (
|
||||||
|
'mongoengine.django.auth.MongoEngineBackend',
|
||||||
|
)
|
||||||
|
|
||||||
|
The :mod:`~mongoengine.django.auth` module also contains a
|
||||||
|
:func:`~mongoengine.django.auth.get_user` helper function, that takes a user's
|
||||||
|
:attr:`id` and returns a :class:`~mongoengine.django.auth.User` object.
|
||||||
|
|
||||||
|
.. versionadded:: 0.1.3
|
||||||
|
|
||||||
|
Custom User model
|
||||||
|
=================
|
||||||
|
Django 1.5 introduced `Custom user Models
|
||||||
|
<https://docs.djangoproject.com/en/dev/topics/auth/customizing/#auth-custom-user>`
|
||||||
|
which can be used as an alternative the Mongoengine authentication backend.
|
||||||
|
|
||||||
|
The main advantage of this option is that other components relying on
|
||||||
|
:mod:`django.contrib.auth` and supporting the new swappable user model are more
|
||||||
|
likely to work. For example, you can use the ``createsuperuser`` management
|
||||||
|
command as usual.
|
||||||
|
|
||||||
|
To enable the custom User model in Django, add ``'mongoengine.django.mongo_auth'``
|
||||||
|
in your ``INSTALLED_APPS`` and set ``'mongo_auth.MongoUser'`` as the custom user
|
||||||
|
user model to use. In your **settings.py** file you will have::
|
||||||
|
|
||||||
|
INSTALLED_APPS = (
|
||||||
|
...
|
||||||
|
'django.contrib.auth',
|
||||||
|
'mongoengine.django.mongo_auth',
|
||||||
|
...
|
||||||
|
)
|
||||||
|
|
||||||
|
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
|
||||||
|
|
||||||
|
An additional ``MONGOENGINE_USER_DOCUMENT`` setting enables you to replace the
|
||||||
|
:class:`~mongoengine.django.auth.User` class with another class of your choice::
|
||||||
|
|
||||||
|
MONGOENGINE_USER_DOCUMENT = 'mongoengine.django.auth.User'
|
||||||
|
|
||||||
|
The custom :class:`User` must be a :class:`~mongoengine.Document` class, but
|
||||||
|
otherwise has the same requirements as a standard custom user model,
|
||||||
|
as specified in the `Django Documentation
|
||||||
|
<https://docs.djangoproject.com/en/dev/topics/auth/customizing/>`.
|
||||||
|
In particular, the custom class must define :attr:`USERNAME_FIELD` and
|
||||||
|
:attr:`REQUIRED_FIELDS` attributes.
|
||||||
|
|
||||||
|
Sessions
|
||||||
|
========
|
||||||
|
Django allows the use of different backend stores for its sessions. MongoEngine
|
||||||
|
provides a MongoDB-based session backend for Django, which allows you to use
|
||||||
|
sessions in you Django application with just MongoDB. To enable the MongoEngine
|
||||||
|
session backend, ensure that your settings module has
|
||||||
|
``'django.contrib.sessions.middleware.SessionMiddleware'`` in the
|
||||||
|
``MIDDLEWARE_CLASSES`` field and ``'django.contrib.sessions'`` in your
|
||||||
|
``INSTALLED_APPS``. From there, all you need to do is add the following line
|
||||||
|
into you settings module::
|
||||||
|
|
||||||
|
SESSION_ENGINE = 'mongoengine.django.sessions'
|
||||||
|
|
||||||
|
Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` seconds, but doesnt delete cookie at sessions backend, so ``'mongoengine.django.sessions'`` supports `mongodb TTL
|
||||||
|
<http://docs.mongodb.org/manual/tutorial/expire-data/>`_.
|
||||||
|
|
||||||
|
.. versionadded:: 0.2.1
|
||||||
|
|
||||||
|
Storage
|
||||||
|
=======
|
||||||
|
With MongoEngine's support for GridFS via the :class:`~mongoengine.fields.FileField`,
|
||||||
|
it is useful to have a Django file storage backend that wraps this. The new
|
||||||
|
storage module is called :class:`~mongoengine.django.storage.GridFSStorage`.
|
||||||
|
Using it is very similar to using the default FileSystemStorage.::
|
||||||
|
|
||||||
|
from mongoengine.django.storage import GridFSStorage
|
||||||
|
fs = GridFSStorage()
|
||||||
|
|
||||||
|
filename = fs.save('hello.txt', 'Hello, World!')
|
||||||
|
|
||||||
|
All of the `Django Storage API methods
|
||||||
|
<http://docs.djangoproject.com/en/dev/ref/files/storage/>`_ have been
|
||||||
|
implemented except :func:`path`. If the filename provided already exists, an
|
||||||
|
underscore and a number (before # the file extension, if one exists) will be
|
||||||
|
appended to the filename until the generated filename doesn't exist. The
|
||||||
|
:func:`save` method will return the new filename.::
|
||||||
|
|
||||||
|
>>> fs.exists('hello.txt')
|
||||||
|
True
|
||||||
|
>>> fs.open('hello.txt').read()
|
||||||
|
'Hello, World!'
|
||||||
|
>>> fs.size('hello.txt')
|
||||||
|
13
|
||||||
|
>>> fs.url('hello.txt')
|
||||||
|
'http://your_media_url/hello.txt'
|
||||||
|
>>> fs.open('hello.txt').name
|
||||||
|
'hello.txt'
|
||||||
|
>>> fs.listdir()
|
||||||
|
([], [u'hello.txt'])
|
||||||
|
|
||||||
|
All files will be saved and retrieved in GridFS via the :class::`FileDocument`
|
||||||
|
document, allowing easy access to the files without the GridFSStorage
|
||||||
|
backend.::
|
||||||
|
|
||||||
|
>>> from mongoengine.django.storage import FileDocument
|
||||||
|
>>> FileDocument.objects()
|
||||||
|
[<FileDocument: FileDocument object>]
|
||||||
|
|
||||||
|
.. versionadded:: 0.4
|
||||||
99
docs/guide/connecting.rst
Normal file
99
docs/guide/connecting.rst
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
.. _guide-connecting:
|
||||||
|
|
||||||
|
=====================
|
||||||
|
Connecting to MongoDB
|
||||||
|
=====================
|
||||||
|
|
||||||
|
To connect to a running instance of :program:`mongod`, use the
|
||||||
|
:func:`~mongoengine.connect` function. The first argument is the name of the
|
||||||
|
database to connect to::
|
||||||
|
|
||||||
|
from mongoengine import connect
|
||||||
|
connect('project1')
|
||||||
|
|
||||||
|
By default, MongoEngine assumes that the :program:`mongod` instance is running
|
||||||
|
on **localhost** on port **27017**. If MongoDB is running elsewhere, you should
|
||||||
|
provide the :attr:`host` and :attr:`port` arguments to
|
||||||
|
:func:`~mongoengine.connect`::
|
||||||
|
|
||||||
|
connect('project1', host='192.168.1.35', port=12345)
|
||||||
|
|
||||||
|
If the database requires authentication, :attr:`username` and :attr:`password`
|
||||||
|
arguments should be provided::
|
||||||
|
|
||||||
|
connect('project1', username='webapp', password='pwd123')
|
||||||
|
|
||||||
|
Uri style connections are also supported as long as you include the database
|
||||||
|
name - just supply the uri as the :attr:`host` to
|
||||||
|
:func:`~mongoengine.connect`::
|
||||||
|
|
||||||
|
connect('project1', host='mongodb://localhost/database_name')
|
||||||
|
|
||||||
|
ReplicaSets
|
||||||
|
===========
|
||||||
|
|
||||||
|
MongoEngine supports :class:`~pymongo.mongo_replica_set_client.MongoReplicaSetClient`
|
||||||
|
to use them please use a URI style connection and provide the `replicaSet` name in the
|
||||||
|
connection kwargs.
|
||||||
|
|
||||||
|
Read preferences are supported throught the connection or via individual
|
||||||
|
queries by passing the read_preference ::
|
||||||
|
|
||||||
|
Bar.objects().read_preference(ReadPreference.PRIMARY)
|
||||||
|
Bar.objects(read_preference=ReadPreference.PRIMARY)
|
||||||
|
|
||||||
|
Multiple Databases
|
||||||
|
==================
|
||||||
|
|
||||||
|
Multiple database support was added in MongoEngine 0.6. To use multiple
|
||||||
|
databases you can use :func:`~mongoengine.connect` and provide an `alias` name
|
||||||
|
for the connection - if no `alias` is provided then "default" is used.
|
||||||
|
|
||||||
|
In the background this uses :func:`~mongoengine.register_connection` to
|
||||||
|
store the data and you can register all aliases up front if required.
|
||||||
|
|
||||||
|
Individual documents can also support multiple databases by providing a
|
||||||
|
`db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` objects
|
||||||
|
to point across databases and collections. Below is an example schema, using
|
||||||
|
3 different databases to store data::
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
meta = {"db_alias": "user-db"}
|
||||||
|
|
||||||
|
class Book(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
meta = {"db_alias": "book-db"}
|
||||||
|
|
||||||
|
class AuthorBooks(Document):
|
||||||
|
author = ReferenceField(User)
|
||||||
|
book = ReferenceField(Book)
|
||||||
|
|
||||||
|
meta = {"db_alias": "users-books-db"}
|
||||||
|
|
||||||
|
|
||||||
|
Switch Database Context Manager
|
||||||
|
===============================
|
||||||
|
|
||||||
|
Sometimes you may want to switch the database to query against for a class
|
||||||
|
for example, archiving older data into a separate database for performance
|
||||||
|
reasons.
|
||||||
|
|
||||||
|
The :class:`~mongoengine.context_managers.switch_db` context manager allows
|
||||||
|
you to change the database alias for a given class allowing quick and easy
|
||||||
|
access to the same User document across databases.eg ::
|
||||||
|
|
||||||
|
from mongoengine.context_managers import switch_db
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
meta = {"db_alias": "user-db"}
|
||||||
|
|
||||||
|
with switch_db(User, 'archive-user-db') as User:
|
||||||
|
User(name="Ross").save() # Saves the 'archive-user-db'
|
||||||
|
|
||||||
|
.. note:: Make sure any aliases have been registered with
|
||||||
|
:func:`~mongoengine.register_connection` before using the context manager.
|
||||||
670
docs/guide/defining-documents.rst
Normal file
670
docs/guide/defining-documents.rst
Normal file
@@ -0,0 +1,670 @@
|
|||||||
|
==================
|
||||||
|
Defining documents
|
||||||
|
==================
|
||||||
|
In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When
|
||||||
|
working with relational databases, rows are stored in **tables**, which have a
|
||||||
|
strict **schema** that the rows follow. MongoDB stores documents in
|
||||||
|
**collections** rather than tables - the principle difference is that no schema
|
||||||
|
is enforced at a database level.
|
||||||
|
|
||||||
|
Defining a document's schema
|
||||||
|
============================
|
||||||
|
MongoEngine allows you to define schemata for documents as this helps to reduce
|
||||||
|
coding errors, and allows for utility methods to be defined on fields which may
|
||||||
|
be present.
|
||||||
|
|
||||||
|
To define a schema for a document, create a class that inherits from
|
||||||
|
:class:`~mongoengine.Document`. Fields are specified by adding **field
|
||||||
|
objects** as class attributes to the document class::
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
class Page(Document):
|
||||||
|
title = StringField(max_length=200, required=True)
|
||||||
|
date_modified = DateTimeField(default=datetime.datetime.now)
|
||||||
|
|
||||||
|
As BSON (the binary format for storing data in mongodb) is order dependent,
|
||||||
|
documents are serialized based on their field order.
|
||||||
|
|
||||||
|
Dynamic document schemas
|
||||||
|
========================
|
||||||
|
One of the benefits of MongoDb is dynamic schemas for a collection, whilst data
|
||||||
|
should be planned and organised (after all explicit is better than implicit!)
|
||||||
|
there are scenarios where having dynamic / expando style documents is desirable.
|
||||||
|
|
||||||
|
:class:`~mongoengine.DynamicDocument` documents work in the same way as
|
||||||
|
:class:`~mongoengine.Document` but any data / attributes set to them will also
|
||||||
|
be saved ::
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
class Page(DynamicDocument):
|
||||||
|
title = StringField(max_length=200, required=True)
|
||||||
|
|
||||||
|
# Create a new page and add tags
|
||||||
|
>>> page = Page(title='Using MongoEngine')
|
||||||
|
>>> page.tags = ['mongodb', 'mongoengine']
|
||||||
|
>>> page.save()
|
||||||
|
|
||||||
|
>>> Page.objects(tags='mongoengine').count()
|
||||||
|
>>> 1
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
||||||
|
|
||||||
|
Dynamic fields are stored in alphabetical order *after* any declared fields.
|
||||||
|
|
||||||
|
Fields
|
||||||
|
======
|
||||||
|
By default, fields are not required. To make a field mandatory, set the
|
||||||
|
:attr:`required` keyword argument of a field to ``True``. Fields also may have
|
||||||
|
validation constraints available (such as :attr:`max_length` in the example
|
||||||
|
above). Fields may also take default values, which will be used if a value is
|
||||||
|
not provided. Default values may optionally be a callable, which will be called
|
||||||
|
to retrieve the value (such as in the above example). The field types available
|
||||||
|
are as follows:
|
||||||
|
|
||||||
|
* :class:`~mongoengine.fields.BinaryField`
|
||||||
|
* :class:`~mongoengine.fields.BooleanField`
|
||||||
|
* :class:`~mongoengine.fields.ComplexDateTimeField`
|
||||||
|
* :class:`~mongoengine.fields.DateTimeField`
|
||||||
|
* :class:`~mongoengine.fields.DecimalField`
|
||||||
|
* :class:`~mongoengine.fields.DictField`
|
||||||
|
* :class:`~mongoengine.fields.DynamicField`
|
||||||
|
* :class:`~mongoengine.fields.EmailField`
|
||||||
|
* :class:`~mongoengine.fields.EmbeddedDocumentField`
|
||||||
|
* :class:`~mongoengine.fields.FileField`
|
||||||
|
* :class:`~mongoengine.fields.FloatField`
|
||||||
|
* :class:`~mongoengine.fields.GenericEmbeddedDocumentField`
|
||||||
|
* :class:`~mongoengine.fields.GenericReferenceField`
|
||||||
|
* :class:`~mongoengine.fields.GeoPointField`
|
||||||
|
* :class:`~mongoengine.fields.ImageField`
|
||||||
|
* :class:`~mongoengine.fields.IntField`
|
||||||
|
* :class:`~mongoengine.fields.ListField`
|
||||||
|
* :class:`~mongoengine.fields.MapField`
|
||||||
|
* :class:`~mongoengine.fields.ObjectIdField`
|
||||||
|
* :class:`~mongoengine.fields.ReferenceField`
|
||||||
|
* :class:`~mongoengine.fields.SequenceField`
|
||||||
|
* :class:`~mongoengine.fields.SortedListField`
|
||||||
|
* :class:`~mongoengine.fields.StringField`
|
||||||
|
* :class:`~mongoengine.fields.URLField`
|
||||||
|
* :class:`~mongoengine.fields.UUIDField`
|
||||||
|
|
||||||
|
Field arguments
|
||||||
|
---------------
|
||||||
|
Each field type can be customized by keyword arguments. The following keyword
|
||||||
|
arguments can be set on all fields:
|
||||||
|
|
||||||
|
:attr:`db_field` (Default: None)
|
||||||
|
The MongoDB field name.
|
||||||
|
|
||||||
|
:attr:`name` (Default: None)
|
||||||
|
The mongoengine field name.
|
||||||
|
|
||||||
|
:attr:`required` (Default: False)
|
||||||
|
If set to True and the field is not set on the document instance, a
|
||||||
|
:class:`~mongoengine.ValidationError` will be raised when the document is
|
||||||
|
validated.
|
||||||
|
|
||||||
|
:attr:`default` (Default: None)
|
||||||
|
A value to use when no value is set for this field.
|
||||||
|
|
||||||
|
The definion of default parameters follow `the general rules on Python
|
||||||
|
<http://docs.python.org/reference/compound_stmts.html#function-definitions>`__,
|
||||||
|
which means that some care should be taken when dealing with default mutable objects
|
||||||
|
(like in :class:`~mongoengine.fields.ListField` or :class:`~mongoengine.fields.DictField`)::
|
||||||
|
|
||||||
|
class ExampleFirst(Document):
|
||||||
|
# Default an empty list
|
||||||
|
values = ListField(IntField(), default=list)
|
||||||
|
|
||||||
|
class ExampleSecond(Document):
|
||||||
|
# Default a set of values
|
||||||
|
values = ListField(IntField(), default=lambda: [1,2,3])
|
||||||
|
|
||||||
|
class ExampleDangerous(Document):
|
||||||
|
# This can make an .append call to add values to the default (and all the following objects),
|
||||||
|
# instead to just an object
|
||||||
|
values = ListField(IntField(), default=[1,2,3])
|
||||||
|
|
||||||
|
|
||||||
|
:attr:`unique` (Default: False)
|
||||||
|
When True, no documents in the collection will have the same value for this
|
||||||
|
field.
|
||||||
|
|
||||||
|
:attr:`unique_with` (Default: None)
|
||||||
|
A field name (or list of field names) that when taken together with this
|
||||||
|
field, will not have two documents in the collection with the same value.
|
||||||
|
|
||||||
|
:attr:`primary_key` (Default: False)
|
||||||
|
When True, use this field as a primary key for the collection. `DictField`
|
||||||
|
and `EmbeddedDocuments` both support being the primary key for a document.
|
||||||
|
|
||||||
|
:attr:`choices` (Default: None)
|
||||||
|
An iterable (e.g. a list or tuple) of choices to which the value of this
|
||||||
|
field should be limited.
|
||||||
|
|
||||||
|
Can be either be a nested tuples of value (stored in mongo) and a
|
||||||
|
human readable key ::
|
||||||
|
|
||||||
|
SIZE = (('S', 'Small'),
|
||||||
|
('M', 'Medium'),
|
||||||
|
('L', 'Large'),
|
||||||
|
('XL', 'Extra Large'),
|
||||||
|
('XXL', 'Extra Extra Large'))
|
||||||
|
|
||||||
|
|
||||||
|
class Shirt(Document):
|
||||||
|
size = StringField(max_length=3, choices=SIZE)
|
||||||
|
|
||||||
|
Or a flat iterable just containing values ::
|
||||||
|
|
||||||
|
SIZE = ('S', 'M', 'L', 'XL', 'XXL')
|
||||||
|
|
||||||
|
class Shirt(Document):
|
||||||
|
size = StringField(max_length=3, choices=SIZE)
|
||||||
|
|
||||||
|
:attr:`help_text` (Default: None)
|
||||||
|
Optional help text to output with the field - used by form libraries
|
||||||
|
|
||||||
|
:attr:`verbose_name` (Default: None)
|
||||||
|
Optional human-readable name for the field - used by form libraries
|
||||||
|
|
||||||
|
|
||||||
|
List fields
|
||||||
|
-----------
|
||||||
|
MongoDB allows the storage of lists of items. To add a list of items to a
|
||||||
|
:class:`~mongoengine.Document`, use the :class:`~mongoengine.fields.ListField` field
|
||||||
|
type. :class:`~mongoengine.fields.ListField` takes another field object as its first
|
||||||
|
argument, which specifies which type elements may be stored within the list::
|
||||||
|
|
||||||
|
class Page(Document):
|
||||||
|
tags = ListField(StringField(max_length=50))
|
||||||
|
|
||||||
|
Embedded documents
|
||||||
|
------------------
|
||||||
|
MongoDB has the ability to embed documents within other documents. Schemata may
|
||||||
|
be defined for these embedded documents, just as they may be for regular
|
||||||
|
documents. To create an embedded document, just define a document as usual, but
|
||||||
|
inherit from :class:`~mongoengine.EmbeddedDocument` rather than
|
||||||
|
:class:`~mongoengine.Document`::
|
||||||
|
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
content = StringField()
|
||||||
|
|
||||||
|
To embed the document within another document, use the
|
||||||
|
:class:`~mongoengine.fields.EmbeddedDocumentField` field type, providing the embedded
|
||||||
|
document class as the first argument::
|
||||||
|
|
||||||
|
class Page(Document):
|
||||||
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
|
|
||||||
|
comment1 = Comment(content='Good work!')
|
||||||
|
comment2 = Comment(content='Nice article!')
|
||||||
|
page = Page(comments=[comment1, comment2])
|
||||||
|
|
||||||
|
Dictionary Fields
|
||||||
|
-----------------
|
||||||
|
Often, an embedded document may be used instead of a dictionary -- generally
|
||||||
|
this is recommended as dictionaries don't support validation or custom field
|
||||||
|
types. However, sometimes you will not know the structure of what you want to
|
||||||
|
store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate::
|
||||||
|
|
||||||
|
class SurveyResponse(Document):
|
||||||
|
date = DateTimeField()
|
||||||
|
user = ReferenceField(User)
|
||||||
|
answers = DictField()
|
||||||
|
|
||||||
|
survey_response = SurveyResponse(date=datetime.now(), user=request.user)
|
||||||
|
response_form = ResponseForm(request.POST)
|
||||||
|
survey_response.answers = response_form.cleaned_data()
|
||||||
|
survey_response.save()
|
||||||
|
|
||||||
|
Dictionaries can store complex data, other dictionaries, lists, references to
|
||||||
|
other objects, so are the most flexible field type available.
|
||||||
|
|
||||||
|
Reference fields
|
||||||
|
----------------
|
||||||
|
References may be stored to other documents in the database using the
|
||||||
|
:class:`~mongoengine.fields.ReferenceField`. Pass in another document class as the
|
||||||
|
first argument to the constructor, then simply assign document objects to the
|
||||||
|
field::
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Page(Document):
|
||||||
|
content = StringField()
|
||||||
|
author = ReferenceField(User)
|
||||||
|
|
||||||
|
john = User(name="John Smith")
|
||||||
|
john.save()
|
||||||
|
|
||||||
|
post = Page(content="Test Page")
|
||||||
|
post.author = john
|
||||||
|
post.save()
|
||||||
|
|
||||||
|
The :class:`User` object is automatically turned into a reference behind the
|
||||||
|
scenes, and dereferenced when the :class:`Page` object is retrieved.
|
||||||
|
|
||||||
|
To add a :class:`~mongoengine.fields.ReferenceField` that references the document
|
||||||
|
being defined, use the string ``'self'`` in place of the document class as the
|
||||||
|
argument to :class:`~mongoengine.fields.ReferenceField`'s constructor. To reference a
|
||||||
|
document that has not yet been defined, use the name of the undefined document
|
||||||
|
as the constructor's argument::
|
||||||
|
|
||||||
|
class Employee(Document):
|
||||||
|
name = StringField()
|
||||||
|
boss = ReferenceField('self')
|
||||||
|
profile_page = ReferenceField('ProfilePage')
|
||||||
|
|
||||||
|
class ProfilePage(Document):
|
||||||
|
content = StringField()
|
||||||
|
|
||||||
|
|
||||||
|
.. _one-to-many-with-listfields:
|
||||||
|
|
||||||
|
One to Many with ListFields
|
||||||
|
'''''''''''''''''''''''''''
|
||||||
|
|
||||||
|
If you are implementing a one to many relationship via a list of references,
|
||||||
|
then the references are stored as DBRefs and to query you need to pass an
|
||||||
|
instance of the object to the query::
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Page(Document):
|
||||||
|
content = StringField()
|
||||||
|
authors = ListField(ReferenceField(User))
|
||||||
|
|
||||||
|
bob = User(name="Bob Jones").save()
|
||||||
|
john = User(name="John Smith").save()
|
||||||
|
|
||||||
|
Page(content="Test Page", authors=[bob, john]).save()
|
||||||
|
Page(content="Another Page", authors=[john]).save()
|
||||||
|
|
||||||
|
# Find all pages Bob authored
|
||||||
|
Page.objects(authors__in=[bob])
|
||||||
|
|
||||||
|
# Find all pages that both Bob and John have authored
|
||||||
|
Page.objects(authors__all=[bob, john])
|
||||||
|
|
||||||
|
|
||||||
|
Dealing with deletion of referred documents
|
||||||
|
'''''''''''''''''''''''''''''''''''''''''''
|
||||||
|
By default, MongoDB doesn't check the integrity of your data, so deleting
|
||||||
|
documents that other documents still hold references to will lead to consistency
|
||||||
|
issues. Mongoengine's :class:`ReferenceField` adds some functionality to
|
||||||
|
safeguard against these kinds of database integrity problems, providing each
|
||||||
|
reference with a delete rule specification. A delete rule is specified by
|
||||||
|
supplying the :attr:`reverse_delete_rule` attributes on the
|
||||||
|
:class:`ReferenceField` definition, like this::
|
||||||
|
|
||||||
|
class Employee(Document):
|
||||||
|
...
|
||||||
|
profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY)
|
||||||
|
|
||||||
|
The declaration in this example means that when an :class:`Employee` object is
|
||||||
|
removed, the :class:`ProfilePage` that belongs to that employee is removed as
|
||||||
|
well. If a whole batch of employees is removed, all profile pages that are
|
||||||
|
linked are removed as well.
|
||||||
|
|
||||||
|
Its value can take any of the following constants:
|
||||||
|
|
||||||
|
:const:`mongoengine.DO_NOTHING`
|
||||||
|
This is the default and won't do anything. Deletes are fast, but may cause
|
||||||
|
database inconsistency or dangling references.
|
||||||
|
:const:`mongoengine.DENY`
|
||||||
|
Deletion is denied if there still exist references to the object being
|
||||||
|
deleted.
|
||||||
|
:const:`mongoengine.NULLIFY`
|
||||||
|
Any object's fields still referring to the object being deleted are removed
|
||||||
|
(using MongoDB's "unset" operation), effectively nullifying the relationship.
|
||||||
|
:const:`mongoengine.CASCADE`
|
||||||
|
Any object containing fields that are refererring to the object being deleted
|
||||||
|
are deleted first.
|
||||||
|
:const:`mongoengine.PULL`
|
||||||
|
Removes the reference to the object (using MongoDB's "pull" operation)
|
||||||
|
from any object's fields of
|
||||||
|
:class:`~mongoengine.fields.ListField` (:class:`~mongoengine.fields.ReferenceField`).
|
||||||
|
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
A safety note on setting up these delete rules! Since the delete rules are
|
||||||
|
not recorded on the database level by MongoDB itself, but instead at runtime,
|
||||||
|
in-memory, by the MongoEngine module, it is of the upmost importance
|
||||||
|
that the module that declares the relationship is loaded **BEFORE** the
|
||||||
|
delete is invoked.
|
||||||
|
|
||||||
|
If, for example, the :class:`Employee` object lives in the
|
||||||
|
:mod:`payroll` app, and the :class:`ProfilePage` in the :mod:`people`
|
||||||
|
app, it is extremely important that the :mod:`people` app is loaded
|
||||||
|
before any employee is removed, because otherwise, MongoEngine could
|
||||||
|
never know this relationship exists.
|
||||||
|
|
||||||
|
In Django, be sure to put all apps that have such delete rule declarations in
|
||||||
|
their :file:`models.py` in the :const:`INSTALLED_APPS` tuple.
|
||||||
|
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
Signals are not triggered when doing cascading updates / deletes - if this
|
||||||
|
is required you must manually handle the update / delete.
|
||||||
|
|
||||||
|
Generic reference fields
|
||||||
|
''''''''''''''''''''''''
|
||||||
|
A second kind of reference field also exists,
|
||||||
|
:class:`~mongoengine.fields.GenericReferenceField`. This allows you to reference any
|
||||||
|
kind of :class:`~mongoengine.Document`, and hence doesn't take a
|
||||||
|
:class:`~mongoengine.Document` subclass as a constructor argument::
|
||||||
|
|
||||||
|
class Link(Document):
|
||||||
|
url = StringField()
|
||||||
|
|
||||||
|
class Post(Document):
|
||||||
|
title = StringField()
|
||||||
|
|
||||||
|
class Bookmark(Document):
|
||||||
|
bookmark_object = GenericReferenceField()
|
||||||
|
|
||||||
|
link = Link(url='http://hmarr.com/mongoengine/')
|
||||||
|
link.save()
|
||||||
|
|
||||||
|
post = Post(title='Using MongoEngine')
|
||||||
|
post.save()
|
||||||
|
|
||||||
|
Bookmark(bookmark_object=link).save()
|
||||||
|
Bookmark(bookmark_object=post).save()
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Using :class:`~mongoengine.fields.GenericReferenceField`\ s is slightly less
|
||||||
|
efficient than the standard :class:`~mongoengine.fields.ReferenceField`\ s, so if
|
||||||
|
you will only be referencing one document type, prefer the standard
|
||||||
|
:class:`~mongoengine.fields.ReferenceField`.
|
||||||
|
|
||||||
|
Uniqueness constraints
|
||||||
|
----------------------
|
||||||
|
MongoEngine allows you to specify that a field should be unique across a
|
||||||
|
collection by providing ``unique=True`` to a :class:`~mongoengine.fields.Field`\ 's
|
||||||
|
constructor. If you try to save a document that has the same value for a unique
|
||||||
|
field as a document that is already in the database, a
|
||||||
|
:class:`~mongoengine.OperationError` will be raised. You may also specify
|
||||||
|
multi-field uniqueness constraints by using :attr:`unique_with`, which may be
|
||||||
|
either a single field name, or a list or tuple of field names::
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
username = StringField(unique=True)
|
||||||
|
first_name = StringField()
|
||||||
|
last_name = StringField(unique_with='first_name')
|
||||||
|
|
||||||
|
Skipping Document validation on save
|
||||||
|
------------------------------------
|
||||||
|
You can also skip the whole document validation process by setting
|
||||||
|
``validate=False`` when caling the :meth:`~mongoengine.document.Document.save`
|
||||||
|
method::
|
||||||
|
|
||||||
|
class Recipient(Document):
|
||||||
|
name = StringField()
|
||||||
|
email = EmailField()
|
||||||
|
|
||||||
|
recipient = Recipient(name='admin', email='root@localhost')
|
||||||
|
recipient.save() # will raise a ValidationError while
|
||||||
|
recipient.save(validate=False) # won't
|
||||||
|
|
||||||
|
Document collections
|
||||||
|
====================
|
||||||
|
Document classes that inherit **directly** from :class:`~mongoengine.Document`
|
||||||
|
will have their own **collection** in the database. The name of the collection
|
||||||
|
is by default the name of the class, coverted to lowercase (so in the example
|
||||||
|
above, the collection would be called `page`). If you need to change the name
|
||||||
|
of the collection (e.g. to use MongoEngine with an existing database), then
|
||||||
|
create a class dictionary attribute called :attr:`meta` on your document, and
|
||||||
|
set :attr:`collection` to the name of the collection that you want your
|
||||||
|
document class to use::
|
||||||
|
|
||||||
|
class Page(Document):
|
||||||
|
title = StringField(max_length=200, required=True)
|
||||||
|
meta = {'collection': 'cmsPage'}
|
||||||
|
|
||||||
|
Capped collections
|
||||||
|
------------------
|
||||||
|
A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying
|
||||||
|
:attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary.
|
||||||
|
:attr:`max_documents` is the maximum number of documents that is allowed to be
|
||||||
|
stored in the collection, and :attr:`max_size` is the maximum size of the
|
||||||
|
collection in bytes. If :attr:`max_size` is not specified and
|
||||||
|
:attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB).
|
||||||
|
The following example shows a :class:`Log` document that will be limited to
|
||||||
|
1000 entries and 2MB of disk space::
|
||||||
|
|
||||||
|
class Log(Document):
|
||||||
|
ip_address = StringField()
|
||||||
|
meta = {'max_documents': 1000, 'max_size': 2000000}
|
||||||
|
|
||||||
|
Indexes
|
||||||
|
=======
|
||||||
|
|
||||||
|
You can specify indexes on collections to make querying faster. This is done
|
||||||
|
by creating a list of index specifications called :attr:`indexes` in the
|
||||||
|
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
|
||||||
|
either be a single field name, a tuple containing multiple field names, or a
|
||||||
|
dictionary containing a full index definition. A direction may be specified on
|
||||||
|
fields by prefixing the field name with a **+** or a **-** sign. Note that
|
||||||
|
direction only matters on multi-field indexes. ::
|
||||||
|
|
||||||
|
class Page(Document):
|
||||||
|
title = StringField()
|
||||||
|
rating = StringField()
|
||||||
|
meta = {
|
||||||
|
'indexes': ['title', ('title', '-rating')]
|
||||||
|
}
|
||||||
|
|
||||||
|
If a dictionary is passed then the following options are available:
|
||||||
|
|
||||||
|
:attr:`fields` (Default: None)
|
||||||
|
The fields to index. Specified in the same format as described above.
|
||||||
|
|
||||||
|
:attr:`cls` (Default: True)
|
||||||
|
If you have polymorphic models that inherit and have
|
||||||
|
:attr:`allow_inheritance` turned on, you can configure whether the index
|
||||||
|
should have the :attr:`_cls` field added automatically to the start of the
|
||||||
|
index.
|
||||||
|
|
||||||
|
:attr:`sparse` (Default: False)
|
||||||
|
Whether the index should be sparse.
|
||||||
|
|
||||||
|
:attr:`unique` (Default: False)
|
||||||
|
Whether the index should be unique.
|
||||||
|
|
||||||
|
:attr:`expireAfterSeconds` (Optional)
|
||||||
|
Allows you to automatically expire data from a collection by setting the
|
||||||
|
time in seconds to expire the a field.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Inheritance adds extra fields indices see: :ref:`document-inheritance`.
|
||||||
|
|
||||||
|
Compound Indexes and Indexing sub documents
|
||||||
|
-------------------------------------------
|
||||||
|
|
||||||
|
Compound indexes can be created by adding the Embedded field or dictionary
|
||||||
|
field name to the index definition.
|
||||||
|
|
||||||
|
Sometimes its more efficient to index parts of Embedded / dictionary fields,
|
||||||
|
in this case use 'dot' notation to identify the value to index eg: `rank.title`
|
||||||
|
|
||||||
|
Geospatial indexes
|
||||||
|
------------------
|
||||||
|
|
||||||
|
|
||||||
|
The best geo index for mongodb is the new "2dsphere", which has an improved
|
||||||
|
spherical model and provides better performance and more options when querying.
|
||||||
|
The following fields will explicitly add a "2dsphere" index:
|
||||||
|
|
||||||
|
- :class:`~mongoengine.fields.PointField`
|
||||||
|
- :class:`~mongoengine.fields.LineStringField`
|
||||||
|
- :class:`~mongoengine.fields.PolygonField`
|
||||||
|
|
||||||
|
As "2dsphere" indexes can be part of a compound index, you may not want the
|
||||||
|
automatic index but would prefer a compound index. In this example we turn off
|
||||||
|
auto indexing and explicitly declare a compound index on ``location`` and ``datetime``::
|
||||||
|
|
||||||
|
class Log(Document):
|
||||||
|
location = PointField(auto_index=False)
|
||||||
|
datetime = DateTimeField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [[("location", "2dsphere"), ("datetime", 1)]]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Pre MongoDB 2.4 Geo
|
||||||
|
'''''''''''''''''''
|
||||||
|
|
||||||
|
.. note:: For MongoDB < 2.4 this is still current, however the new 2dsphere
|
||||||
|
index is a big improvement over the previous 2D model - so upgrading is
|
||||||
|
advised.
|
||||||
|
|
||||||
|
Geospatial indexes will be automatically created for all
|
||||||
|
:class:`~mongoengine.fields.GeoPointField`\ s
|
||||||
|
|
||||||
|
It is also possible to explicitly define geospatial indexes. This is
|
||||||
|
useful if you need to define a geospatial index on a subfield of a
|
||||||
|
:class:`~mongoengine.fields.DictField` or a custom field that contains a
|
||||||
|
point. To create a geospatial index you must prefix the field with the
|
||||||
|
***** sign. ::
|
||||||
|
|
||||||
|
class Place(Document):
|
||||||
|
location = DictField()
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
'*location.point',
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
Time To Live indexes
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
A special index type that allows you to automatically expire data from a
|
||||||
|
collection after a given period. See the official
|
||||||
|
`ttl <http://docs.mongodb.org/manual/tutorial/expire-data/#expire-data-from-collections-by-setting-ttl>`_
|
||||||
|
documentation for more information. A common usecase might be session data::
|
||||||
|
|
||||||
|
class Session(Document):
|
||||||
|
created = DateTimeField(default=datetime.now)
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
{'fields': ['created'], 'expireAfterSeconds': 3600}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
Ordering
|
||||||
|
========
|
||||||
|
A default ordering can be specified for your
|
||||||
|
:class:`~mongoengine.queryset.QuerySet` using the :attr:`ordering` attribute of
|
||||||
|
:attr:`~mongoengine.Document.meta`. Ordering will be applied when the
|
||||||
|
:class:`~mongoengine.queryset.QuerySet` is created, and can be overridden by
|
||||||
|
subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. ::
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField()
|
||||||
|
published_date = DateTimeField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'ordering': ['-published_date']
|
||||||
|
}
|
||||||
|
|
||||||
|
blog_post_1 = BlogPost(title="Blog Post #1")
|
||||||
|
blog_post_1.published_date = datetime(2010, 1, 5, 0, 0 ,0)
|
||||||
|
|
||||||
|
blog_post_2 = BlogPost(title="Blog Post #2")
|
||||||
|
blog_post_2.published_date = datetime(2010, 1, 6, 0, 0 ,0)
|
||||||
|
|
||||||
|
blog_post_3 = BlogPost(title="Blog Post #3")
|
||||||
|
blog_post_3.published_date = datetime(2010, 1, 7, 0, 0 ,0)
|
||||||
|
|
||||||
|
blog_post_1.save()
|
||||||
|
blog_post_2.save()
|
||||||
|
blog_post_3.save()
|
||||||
|
|
||||||
|
# get the "first" BlogPost using default ordering
|
||||||
|
# from BlogPost.meta.ordering
|
||||||
|
latest_post = BlogPost.objects.first()
|
||||||
|
assert latest_post.title == "Blog Post #3"
|
||||||
|
|
||||||
|
# override default ordering, order BlogPosts by "published_date"
|
||||||
|
first_post = BlogPost.objects.order_by("+published_date").first()
|
||||||
|
assert first_post.title == "Blog Post #1"
|
||||||
|
|
||||||
|
Shard keys
|
||||||
|
==========
|
||||||
|
|
||||||
|
If your collection is sharded, then you need to specify the shard key as a tuple,
|
||||||
|
using the :attr:`shard_key` attribute of :attr:`-mongoengine.Document.meta`.
|
||||||
|
This ensures that the shard key is sent with the query when calling the
|
||||||
|
:meth:`~mongoengine.document.Document.save` or
|
||||||
|
:meth:`~mongoengine.document.Document.update` method on an existing
|
||||||
|
:class:`-mongoengine.Document` instance::
|
||||||
|
|
||||||
|
class LogEntry(Document):
|
||||||
|
machine = StringField()
|
||||||
|
app = StringField()
|
||||||
|
timestamp = DateTimeField()
|
||||||
|
data = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'shard_key': ('machine', 'timestamp',)
|
||||||
|
}
|
||||||
|
|
||||||
|
.. _document-inheritance:
|
||||||
|
|
||||||
|
Document inheritance
|
||||||
|
====================
|
||||||
|
|
||||||
|
To create a specialised type of a :class:`~mongoengine.Document` you have
|
||||||
|
defined, you may subclass it and add any extra fields or methods you may need.
|
||||||
|
As this is new class is not a direct subclass of
|
||||||
|
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
|
||||||
|
will use the same collection as its superclass uses. This allows for more
|
||||||
|
convenient and efficient retrieval of related documents - all you need do is
|
||||||
|
set :attr:`allow_inheritance` to True in the :attr:`meta` data for a
|
||||||
|
document.::
|
||||||
|
|
||||||
|
# Stored in a collection named 'page'
|
||||||
|
class Page(Document):
|
||||||
|
title = StringField(max_length=200, required=True)
|
||||||
|
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
# Also stored in the collection named 'page'
|
||||||
|
class DatedPage(Page):
|
||||||
|
date = DateTimeField()
|
||||||
|
|
||||||
|
.. note:: From 0.8 onwards you must declare :attr:`allow_inheritance` defaults
|
||||||
|
to False, meaning you must set it to True to use inheritance.
|
||||||
|
|
||||||
|
|
||||||
|
Working with existing data
|
||||||
|
--------------------------
|
||||||
|
As MongoEngine no longer defaults to needing :attr:`_cls` you can quickly and
|
||||||
|
easily get working with existing data. Just define the document to match
|
||||||
|
the expected schema in your database ::
|
||||||
|
|
||||||
|
# Will work with data in an existing collection named 'cmsPage'
|
||||||
|
class Page(Document):
|
||||||
|
title = StringField(max_length=200, required=True)
|
||||||
|
meta = {
|
||||||
|
'collection': 'cmsPage'
|
||||||
|
}
|
||||||
|
|
||||||
|
If you have wildly varying schemas then using a
|
||||||
|
:class:`~mongoengine.DynamicDocument` might be more appropriate, instead of
|
||||||
|
defining all possible field types.
|
||||||
|
|
||||||
|
If you use :class:`~mongoengine.Document` and the database contains data that
|
||||||
|
isn't defined then that data will be stored in the `document._data` dictionary.
|
||||||
127
docs/guide/document-instances.rst
Normal file
127
docs/guide/document-instances.rst
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
===================
|
||||||
|
Documents instances
|
||||||
|
===================
|
||||||
|
To create a new document object, create an instance of the relevant document
|
||||||
|
class, providing values for its fields as its constructor keyword arguments.
|
||||||
|
You may provide values for any of the fields on the document::
|
||||||
|
|
||||||
|
>>> page = Page(title="Test Page")
|
||||||
|
>>> page.title
|
||||||
|
'Test Page'
|
||||||
|
|
||||||
|
You may also assign values to the document's fields using standard object
|
||||||
|
attribute syntax::
|
||||||
|
|
||||||
|
>>> page.title = "Example Page"
|
||||||
|
>>> page.title
|
||||||
|
'Example Page'
|
||||||
|
|
||||||
|
Saving and deleting documents
|
||||||
|
=============================
|
||||||
|
MongoEngine tracks changes to documents to provide efficient saving. To save
|
||||||
|
the document to the database, call the :meth:`~mongoengine.Document.save` method.
|
||||||
|
If the document does not exist in the database, it will be created. If it does
|
||||||
|
already exist, then any changes will be updated atomically. For example::
|
||||||
|
|
||||||
|
>>> page = Page(title="Test Page")
|
||||||
|
>>> page.save() # Performs an insert
|
||||||
|
>>> page.title = "My Page"
|
||||||
|
>>> page.save() # Performs an atomic set on the title field.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Changes to documents are tracked and on the whole perform ``set`` operations.
|
||||||
|
|
||||||
|
* ``list_field.push(0)`` - *sets* the resulting list
|
||||||
|
* ``del(list_field)`` - *unsets* whole list
|
||||||
|
|
||||||
|
With lists its preferable to use ``Doc.update(push__list_field=0)`` as
|
||||||
|
this stops the whole list being updated - stopping any race conditions.
|
||||||
|
|
||||||
|
.. seealso::
|
||||||
|
:ref:`guide-atomic-updates`
|
||||||
|
|
||||||
|
Pre save data validation and cleaning
|
||||||
|
-------------------------------------
|
||||||
|
MongoEngine allows you to create custom cleaning rules for your documents when
|
||||||
|
calling :meth:`~mongoengine.Document.save`. By providing a custom
|
||||||
|
:meth:`~mongoengine.Document.clean` method you can do any pre validation / data
|
||||||
|
cleaning.
|
||||||
|
|
||||||
|
This might be useful if you want to ensure a default value based on other
|
||||||
|
document values for example::
|
||||||
|
|
||||||
|
class Essay(Document):
|
||||||
|
status = StringField(choices=('Published', 'Draft'), required=True)
|
||||||
|
pub_date = DateTimeField()
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
"""Ensures that only published essays have a `pub_date` and
|
||||||
|
automatically sets the pub_date if published and not set"""
|
||||||
|
if self.status == 'Draft' and self.pub_date is not None:
|
||||||
|
msg = 'Draft entries should not have a publication date.'
|
||||||
|
raise ValidationError(msg)
|
||||||
|
# Set the pub_date for published items if not set.
|
||||||
|
if self.status == 'Published' and self.pub_date is None:
|
||||||
|
self.pub_date = datetime.now()
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
Cleaning is only called if validation is turned on and when calling
|
||||||
|
:meth:`~mongoengine.Document.save`.
|
||||||
|
|
||||||
|
Cascading Saves
|
||||||
|
---------------
|
||||||
|
If your document contains :class:`~mongoengine.fields.ReferenceField` or
|
||||||
|
:class:`~mongoengine.fields.GenericReferenceField` objects, then by default the
|
||||||
|
:meth:`~mongoengine.Document.save` method will not save any changes to
|
||||||
|
those objects. If you want all references to also be saved also, noting each
|
||||||
|
save is a separate query, then passing :attr:`cascade` as True
|
||||||
|
to the save method will cascade any saves.
|
||||||
|
|
||||||
|
Deleting documents
|
||||||
|
------------------
|
||||||
|
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
|
||||||
|
Note that this will only work if the document exists in the database and has a
|
||||||
|
valid :attr:`id`.
|
||||||
|
|
||||||
|
Document IDs
|
||||||
|
============
|
||||||
|
Each document in the database has a unique id. This may be accessed through the
|
||||||
|
:attr:`id` attribute on :class:`~mongoengine.Document` objects. Usually, the id
|
||||||
|
will be generated automatically by the database server when the object is save,
|
||||||
|
meaning that you may only access the :attr:`id` field once a document has been
|
||||||
|
saved::
|
||||||
|
|
||||||
|
>>> page = Page(title="Test Page")
|
||||||
|
>>> page.id
|
||||||
|
>>> page.save()
|
||||||
|
>>> page.id
|
||||||
|
ObjectId('123456789abcdef000000000')
|
||||||
|
|
||||||
|
Alternatively, you may define one of your own fields to be the document's
|
||||||
|
"primary key" by providing ``primary_key=True`` as a keyword argument to a
|
||||||
|
field's constructor. Under the hood, MongoEngine will use this field as the
|
||||||
|
:attr:`id`; in fact :attr:`id` is actually aliased to your primary key field so
|
||||||
|
you may still use :attr:`id` to access the primary key if you want::
|
||||||
|
|
||||||
|
>>> class User(Document):
|
||||||
|
... email = StringField(primary_key=True)
|
||||||
|
... name = StringField()
|
||||||
|
...
|
||||||
|
>>> bob = User(email='bob@example.com', name='Bob')
|
||||||
|
>>> bob.save()
|
||||||
|
>>> bob.id == bob.email == 'bob@example.com'
|
||||||
|
True
|
||||||
|
|
||||||
|
You can also access the document's "primary key" using the :attr:`pk` field; in
|
||||||
|
is an alias to :attr:`id`::
|
||||||
|
|
||||||
|
>>> page = Page(title="Another Test Page")
|
||||||
|
>>> page.save()
|
||||||
|
>>> page.id == page.pk
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
If you define your own primary key field, the field implicitly becomes
|
||||||
|
required, so a :class:`~mongoengine.ValidationError` will be thrown if
|
||||||
|
you don't provide it.
|
||||||
74
docs/guide/gridfs.rst
Normal file
74
docs/guide/gridfs.rst
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
======
|
||||||
|
GridFS
|
||||||
|
======
|
||||||
|
|
||||||
|
.. versionadded:: 0.4
|
||||||
|
|
||||||
|
Writing
|
||||||
|
-------
|
||||||
|
|
||||||
|
GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field
|
||||||
|
object. This field acts as a file-like object and provides a couple of
|
||||||
|
different ways of inserting and retrieving data. Arbitrary metadata such as
|
||||||
|
content type can also be stored alongside the files. In the following example,
|
||||||
|
a document is created to store details about animals, including a photo::
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
genus = StringField()
|
||||||
|
family = StringField()
|
||||||
|
photo = FileField()
|
||||||
|
|
||||||
|
marmot = Animal(genus='Marmota', family='Sciuridae')
|
||||||
|
|
||||||
|
marmot_photo = open('marmot.jpg', 'r')
|
||||||
|
marmot.photo.put(marmot_photo, content_type = 'image/jpeg')
|
||||||
|
marmot.save()
|
||||||
|
|
||||||
|
Retrieval
|
||||||
|
---------
|
||||||
|
|
||||||
|
So using the :class:`~mongoengine.fields.FileField` is just like using any other
|
||||||
|
field. The file can also be retrieved just as easily::
|
||||||
|
|
||||||
|
marmot = Animal.objects(genus='Marmota').first()
|
||||||
|
photo = marmot.photo.read()
|
||||||
|
content_type = marmot.photo.content_type
|
||||||
|
|
||||||
|
Streaming
|
||||||
|
---------
|
||||||
|
|
||||||
|
Streaming data into a :class:`~mongoengine.fields.FileField` is achieved in a
|
||||||
|
slightly different manner. First, a new file must be created by calling the
|
||||||
|
:func:`new_file` method. Data can then be written using :func:`write`::
|
||||||
|
|
||||||
|
marmot.photo.new_file()
|
||||||
|
marmot.photo.write('some_image_data')
|
||||||
|
marmot.photo.write('some_more_image_data')
|
||||||
|
marmot.photo.close()
|
||||||
|
|
||||||
|
marmot.photo.save()
|
||||||
|
|
||||||
|
Deletion
|
||||||
|
--------
|
||||||
|
|
||||||
|
Deleting stored files is achieved with the :func:`delete` method::
|
||||||
|
|
||||||
|
marmot.photo.delete()
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
The FileField in a Document actually only stores the ID of a file in a
|
||||||
|
separate GridFS collection. This means that deleting a document
|
||||||
|
with a defined FileField does not actually delete the file. You must be
|
||||||
|
careful to delete any files in a Document as above before deleting the
|
||||||
|
Document itself.
|
||||||
|
|
||||||
|
|
||||||
|
Replacing files
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Files can be replaced with the :func:`replace` method. This works just like
|
||||||
|
the :func:`put` method so even metadata can (and should) be replaced::
|
||||||
|
|
||||||
|
another_marmot = open('another_marmot.png', 'r')
|
||||||
|
marmot.photo.replace(another_marmot, content_type='image/png')
|
||||||
14
docs/guide/index.rst
Normal file
14
docs/guide/index.rst
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
==========
|
||||||
|
User Guide
|
||||||
|
==========
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
|
||||||
|
installing
|
||||||
|
connecting
|
||||||
|
defining-documents
|
||||||
|
document-instances
|
||||||
|
querying
|
||||||
|
gridfs
|
||||||
|
signals
|
||||||
31
docs/guide/installing.rst
Normal file
31
docs/guide/installing.rst
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
======================
|
||||||
|
Installing MongoEngine
|
||||||
|
======================
|
||||||
|
|
||||||
|
To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_
|
||||||
|
and ensure it is running in an accessible location. You will also need
|
||||||
|
`PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you
|
||||||
|
install MongoEngine using setuptools, then the dependencies will be handled for
|
||||||
|
you.
|
||||||
|
|
||||||
|
MongoEngine is available on PyPI, so to use it you can use :program:`pip`:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ pip install mongoengine
|
||||||
|
|
||||||
|
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
||||||
|
<http://pypi.python.org/pypi/mongoengine/>`_ and run
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ python setup.py install
|
||||||
|
|
||||||
|
To use the bleeding-edge version of MongoEngine, you can get the source from
|
||||||
|
`GitHub <http://github.com/mongoengine/mongoengine/>`_ and install it as above:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ git clone git://github.com/mongoengine/mongoengine
|
||||||
|
$ cd mongoengine
|
||||||
|
$ python setup.py install
|
||||||
649
docs/guide/querying.rst
Normal file
649
docs/guide/querying.rst
Normal file
@@ -0,0 +1,649 @@
|
|||||||
|
=====================
|
||||||
|
Querying the database
|
||||||
|
=====================
|
||||||
|
:class:`~mongoengine.Document` classes have an :attr:`objects` attribute, which
|
||||||
|
is used for accessing the objects in the database associated with the class.
|
||||||
|
The :attr:`objects` attribute is actually a
|
||||||
|
:class:`~mongoengine.queryset.QuerySetManager`, which creates and returns a new
|
||||||
|
:class:`~mongoengine.queryset.QuerySet` object on access. The
|
||||||
|
:class:`~mongoengine.queryset.QuerySet` object may be iterated over to
|
||||||
|
fetch documents from the database::
|
||||||
|
|
||||||
|
# Prints out the names of all the users in the database
|
||||||
|
for user in User.objects:
|
||||||
|
print user.name
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Once the iteration finishes (when :class:`StopIteration` is raised),
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the
|
||||||
|
:class:`~mongoengine.queryset.QuerySet` may be iterated over again. The
|
||||||
|
results of the first iteration are *not* cached, so the database will be hit
|
||||||
|
each time the :class:`~mongoengine.queryset.QuerySet` is iterated over.
|
||||||
|
|
||||||
|
Filtering queries
|
||||||
|
=================
|
||||||
|
The query may be filtered by calling the
|
||||||
|
:class:`~mongoengine.queryset.QuerySet` object with field lookup keyword
|
||||||
|
arguments. The keys in the keyword arguments correspond to fields on the
|
||||||
|
:class:`~mongoengine.Document` you are querying::
|
||||||
|
|
||||||
|
# This will return a QuerySet that will only iterate over users whose
|
||||||
|
# 'country' field is set to 'uk'
|
||||||
|
uk_users = User.objects(country='uk')
|
||||||
|
|
||||||
|
Fields on embedded documents may also be referred to using field lookup syntax
|
||||||
|
by using a double-underscore in place of the dot in object attribute access
|
||||||
|
syntax::
|
||||||
|
|
||||||
|
# This will return a QuerySet that will only iterate over pages that have
|
||||||
|
# been written by a user whose 'country' field is set to 'uk'
|
||||||
|
uk_pages = Page.objects(author__country='uk')
|
||||||
|
|
||||||
|
|
||||||
|
Query operators
|
||||||
|
===============
|
||||||
|
Operators other than equality may also be used in queries; just attach the
|
||||||
|
operator name to a key with a double-underscore::
|
||||||
|
|
||||||
|
# Only find users whose age is 18 or less
|
||||||
|
young_users = Users.objects(age__lte=18)
|
||||||
|
|
||||||
|
Available operators are as follows:
|
||||||
|
|
||||||
|
* ``ne`` -- not equal to
|
||||||
|
* ``lt`` -- less than
|
||||||
|
* ``lte`` -- less than or equal to
|
||||||
|
* ``gt`` -- greater than
|
||||||
|
* ``gte`` -- greater than or equal to
|
||||||
|
* ``not`` -- negate a standard check, may be used before other operators (e.g.
|
||||||
|
``Q(age__not__mod=5)``)
|
||||||
|
* ``in`` -- value is in list (a list of values should be provided)
|
||||||
|
* ``nin`` -- value is not in list (a list of values should be provided)
|
||||||
|
* ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values
|
||||||
|
* ``all`` -- every item in list of values provided is in array
|
||||||
|
* ``size`` -- the size of the array is
|
||||||
|
* ``exists`` -- value for field exists
|
||||||
|
|
||||||
|
String queries
|
||||||
|
--------------
|
||||||
|
|
||||||
|
The following operators are available as shortcuts to querying with regular
|
||||||
|
expressions:
|
||||||
|
|
||||||
|
* ``exact`` -- string field exactly matches value
|
||||||
|
* ``iexact`` -- string field exactly matches value (case insensitive)
|
||||||
|
* ``contains`` -- string field contains value
|
||||||
|
* ``icontains`` -- string field contains value (case insensitive)
|
||||||
|
* ``startswith`` -- string field starts with value
|
||||||
|
* ``istartswith`` -- string field starts with value (case insensitive)
|
||||||
|
* ``endswith`` -- string field ends with value
|
||||||
|
* ``iendswith`` -- string field ends with value (case insensitive)
|
||||||
|
* ``match`` -- performs an $elemMatch so you can match an entire document within an array
|
||||||
|
|
||||||
|
|
||||||
|
Geo queries
|
||||||
|
-----------
|
||||||
|
|
||||||
|
There are a few special operators for performing geographical queries. The following
|
||||||
|
were added in 0.8 for: :class:`~mongoengine.fields.PointField`,
|
||||||
|
:class:`~mongoengine.fields.LineStringField` and
|
||||||
|
:class:`~mongoengine.fields.PolygonField`:
|
||||||
|
|
||||||
|
* ``geo_within`` -- Check if a geometry is within a polygon. For ease of use
|
||||||
|
it accepts either a geojson geometry or just the polygon coordinates eg::
|
||||||
|
|
||||||
|
loc.objects(point__geo_with=[[[40, 5], [40, 6], [41, 6], [40, 5]]])
|
||||||
|
loc.objects(point__geo_with={"type": "Polygon",
|
||||||
|
"coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
|
||||||
|
|
||||||
|
* ``geo_within_box`` - simplified geo_within searching with a box eg::
|
||||||
|
|
||||||
|
loc.objects(point__geo_within_box=[(-125.0, 35.0), (-100.0, 40.0)])
|
||||||
|
loc.objects(point__geo_within_box=[<bottom left coordinates>, <upper right coordinates>])
|
||||||
|
|
||||||
|
* ``geo_within_polygon`` -- simplified geo_within searching within a simple polygon eg::
|
||||||
|
|
||||||
|
loc.objects(point__geo_within_polygon=[[40, 5], [40, 6], [41, 6], [40, 5]])
|
||||||
|
loc.objects(point__geo_within_polygon=[ [ <x1> , <y1> ] ,
|
||||||
|
[ <x2> , <y2> ] ,
|
||||||
|
[ <x3> , <y3> ] ])
|
||||||
|
|
||||||
|
* ``geo_within_center`` -- simplified geo_within the flat circle radius of a point eg::
|
||||||
|
|
||||||
|
loc.objects(point__geo_within_center=[(-125.0, 35.0), 1])
|
||||||
|
loc.objects(point__geo_within_center=[ [ <x>, <y> ] , <radius> ])
|
||||||
|
|
||||||
|
* ``geo_within_sphere`` -- simplified geo_within the spherical circle radius of a point eg::
|
||||||
|
|
||||||
|
loc.objects(point__geo_within_sphere=[(-125.0, 35.0), 1])
|
||||||
|
loc.objects(point__geo_within_sphere=[ [ <x>, <y> ] , <radius> ])
|
||||||
|
|
||||||
|
* ``geo_intersects`` -- selects all locations that intersect with a geometry eg::
|
||||||
|
|
||||||
|
# Inferred from provided points lists:
|
||||||
|
loc.objects(poly__geo_intersects=[40, 6])
|
||||||
|
loc.objects(poly__geo_intersects=[[40, 5], [40, 6]])
|
||||||
|
loc.objects(poly__geo_intersects=[[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]])
|
||||||
|
|
||||||
|
# With geoJson style objects
|
||||||
|
loc.objects(poly__geo_intersects={"type": "Point", "coordinates": [40, 6]})
|
||||||
|
loc.objects(poly__geo_intersects={"type": "LineString",
|
||||||
|
"coordinates": [[40, 5], [40, 6]]})
|
||||||
|
loc.objects(poly__geo_intersects={"type": "Polygon",
|
||||||
|
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]})
|
||||||
|
|
||||||
|
* ``near`` -- Find all the locations near a given point::
|
||||||
|
|
||||||
|
loc.objects(point__near=[40, 5])
|
||||||
|
loc.objects(point__near={"type": "Point", "coordinates": [40, 5]})
|
||||||
|
|
||||||
|
|
||||||
|
You can also set the maximum distance in meters as well::
|
||||||
|
|
||||||
|
loc.objects(point__near=[40, 5], point__max_distance=1000)
|
||||||
|
|
||||||
|
|
||||||
|
The older 2D indexes are still supported with the
|
||||||
|
:class:`~mongoengine.fields.GeoPointField`:
|
||||||
|
|
||||||
|
* ``within_distance`` -- provide a list containing a point and a maximum
|
||||||
|
distance (e.g. [(41.342, -87.653), 5])
|
||||||
|
* ``within_spherical_distance`` -- Same as above but using the spherical geo model
|
||||||
|
(e.g. [(41.342, -87.653), 5/earth_radius])
|
||||||
|
* ``near`` -- order the documents by how close they are to a given point
|
||||||
|
* ``near_sphere`` -- Same as above but using the spherical geo model
|
||||||
|
* ``within_box`` -- filter documents to those within a given bounding box (e.g.
|
||||||
|
[(35.0, -125.0), (40.0, -100.0)])
|
||||||
|
* ``within_polygon`` -- filter documents to those within a given polygon (e.g.
|
||||||
|
[(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]).
|
||||||
|
|
||||||
|
.. note:: Requires Mongo Server 2.0
|
||||||
|
|
||||||
|
* ``max_distance`` -- can be added to your location queries to set a maximum
|
||||||
|
distance.
|
||||||
|
|
||||||
|
|
||||||
|
Querying lists
|
||||||
|
--------------
|
||||||
|
On most fields, this syntax will look up documents where the field specified
|
||||||
|
matches the given value exactly, but when the field refers to a
|
||||||
|
:class:`~mongoengine.fields.ListField`, a single item may be provided, in which case
|
||||||
|
lists that contain that item will be matched::
|
||||||
|
|
||||||
|
class Page(Document):
|
||||||
|
tags = ListField(StringField())
|
||||||
|
|
||||||
|
# This will match all pages that have the word 'coding' as an item in the
|
||||||
|
# 'tags' list
|
||||||
|
Page.objects(tags='coding')
|
||||||
|
|
||||||
|
It is possible to query by position in a list by using a numerical value as a
|
||||||
|
query operator. So if you wanted to find all pages whose first tag was ``db``,
|
||||||
|
you could use the following query::
|
||||||
|
|
||||||
|
Page.objects(tags__0='db')
|
||||||
|
|
||||||
|
If you only want to fetch part of a list eg: you want to paginate a list, then
|
||||||
|
the `slice` operator is required::
|
||||||
|
|
||||||
|
# comments - skip 5, limit 10
|
||||||
|
Page.objects.fields(slice__comments=[5, 10])
|
||||||
|
|
||||||
|
For updating documents, if you don't know the position in a list, you can use
|
||||||
|
the $ positional operator ::
|
||||||
|
|
||||||
|
Post.objects(comments__by="joe").update(**{'inc__comments__$__votes': 1})
|
||||||
|
|
||||||
|
However, this doesn't map well to the syntax so you can also use a capital S instead ::
|
||||||
|
|
||||||
|
Post.objects(comments__by="joe").update(inc__comments__S__votes=1)
|
||||||
|
|
||||||
|
.. note:: Due to Mongo currently the $ operator only applies to the first matched item in the query.
|
||||||
|
|
||||||
|
|
||||||
|
Raw queries
|
||||||
|
-----------
|
||||||
|
It is possible to provide a raw PyMongo query as a query parameter, which will
|
||||||
|
be integrated directly into the query. This is done using the ``__raw__``
|
||||||
|
keyword argument::
|
||||||
|
|
||||||
|
Page.objects(__raw__={'tags': 'coding'})
|
||||||
|
|
||||||
|
.. versionadded:: 0.4
|
||||||
|
|
||||||
|
Limiting and skipping results
|
||||||
|
=============================
|
||||||
|
Just as with traditional ORMs, you may limit the number of results returned, or
|
||||||
|
skip a number or results in you query.
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.limit` and
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on
|
||||||
|
:class:`~mongoengine.queryset.QuerySet` objects, but the prefered syntax for
|
||||||
|
achieving this is using array-slicing syntax::
|
||||||
|
|
||||||
|
# Only the first 5 people
|
||||||
|
users = User.objects[:5]
|
||||||
|
|
||||||
|
# All except for the first 5 people
|
||||||
|
users = User.objects[5:]
|
||||||
|
|
||||||
|
# 5 users, starting from the 10th user found
|
||||||
|
users = User.objects[10:15]
|
||||||
|
|
||||||
|
You may also index the query to retrieve a single result. If an item at that
|
||||||
|
index does not exists, an :class:`IndexError` will be raised. A shortcut for
|
||||||
|
retrieving the first result and returning :attr:`None` if no result exists is
|
||||||
|
provided (:meth:`~mongoengine.queryset.QuerySet.first`)::
|
||||||
|
|
||||||
|
>>> # Make sure there are no users
|
||||||
|
>>> User.drop_collection()
|
||||||
|
>>> User.objects[0]
|
||||||
|
IndexError: list index out of range
|
||||||
|
>>> User.objects.first() == None
|
||||||
|
True
|
||||||
|
>>> User(name='Test User').save()
|
||||||
|
>>> User.objects[0] == User.objects.first()
|
||||||
|
True
|
||||||
|
|
||||||
|
Retrieving unique results
|
||||||
|
-------------------------
|
||||||
|
To retrieve a result that should be unique in the collection, use
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.get`. This will raise
|
||||||
|
:class:`~mongoengine.queryset.DoesNotExist` if
|
||||||
|
no document matches the query, and
|
||||||
|
:class:`~mongoengine.queryset.MultipleObjectsReturned`
|
||||||
|
if more than one document matched the query. These exceptions are merged into
|
||||||
|
your document defintions eg: `MyDoc.DoesNotExist`
|
||||||
|
|
||||||
|
A variation of this method exists,
|
||||||
|
:meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new
|
||||||
|
document with the query arguments if no documents match the query. An
|
||||||
|
additional keyword argument, :attr:`defaults` may be provided, which will be
|
||||||
|
used as default values for the new document, in the case that it should need
|
||||||
|
to be created::
|
||||||
|
|
||||||
|
>>> a, created = User.objects.get_or_create(name='User A', defaults={'age': 30})
|
||||||
|
>>> b, created = User.objects.get_or_create(name='User A', defaults={'age': 40})
|
||||||
|
>>> a.name == b.name and a.age == b.age
|
||||||
|
True
|
||||||
|
|
||||||
|
Default Document queries
|
||||||
|
========================
|
||||||
|
By default, the objects :attr:`~mongoengine.Document.objects` attribute on a
|
||||||
|
document returns a :class:`~mongoengine.queryset.QuerySet` that doesn't filter
|
||||||
|
the collection -- it returns all objects. This may be changed by defining a
|
||||||
|
method on a document that modifies a queryset. The method should accept two
|
||||||
|
arguments -- :attr:`doc_cls` and :attr:`queryset`. The first argument is the
|
||||||
|
:class:`~mongoengine.Document` class that the method is defined on (in this
|
||||||
|
sense, the method is more like a :func:`classmethod` than a regular method),
|
||||||
|
and the second argument is the initial queryset. The method needs to be
|
||||||
|
decorated with :func:`~mongoengine.queryset.queryset_manager` in order for it
|
||||||
|
to be recognised. ::
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField()
|
||||||
|
date = DateTimeField()
|
||||||
|
|
||||||
|
@queryset_manager
|
||||||
|
def objects(doc_cls, queryset):
|
||||||
|
# This may actually also be done by defining a default ordering for
|
||||||
|
# the document, but this illustrates the use of manager methods
|
||||||
|
return queryset.order_by('-date')
|
||||||
|
|
||||||
|
You don't need to call your method :attr:`objects` -- you may define as many
|
||||||
|
custom manager methods as you like::
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField()
|
||||||
|
published = BooleanField()
|
||||||
|
|
||||||
|
@queryset_manager
|
||||||
|
def live_posts(doc_cls, queryset):
|
||||||
|
return queryset.filter(published=True)
|
||||||
|
|
||||||
|
BlogPost(title='test1', published=False).save()
|
||||||
|
BlogPost(title='test2', published=True).save()
|
||||||
|
assert len(BlogPost.objects) == 2
|
||||||
|
assert len(BlogPost.live_posts()) == 1
|
||||||
|
|
||||||
|
Custom QuerySets
|
||||||
|
================
|
||||||
|
Should you want to add custom methods for interacting with or filtering
|
||||||
|
documents, extending the :class:`~mongoengine.queryset.QuerySet` class may be
|
||||||
|
the way to go. To use a custom :class:`~mongoengine.queryset.QuerySet` class on
|
||||||
|
a document, set ``queryset_class`` to the custom class in a
|
||||||
|
:class:`~mongoengine.Document`\ s ``meta`` dictionary::
|
||||||
|
|
||||||
|
class AwesomerQuerySet(QuerySet):
|
||||||
|
|
||||||
|
def get_awesome(self):
|
||||||
|
return self.filter(awesome=True)
|
||||||
|
|
||||||
|
class Page(Document):
|
||||||
|
meta = {'queryset_class': AwesomerQuerySet}
|
||||||
|
|
||||||
|
# To call:
|
||||||
|
Page.objects.get_awesome()
|
||||||
|
|
||||||
|
.. versionadded:: 0.4
|
||||||
|
|
||||||
|
Aggregation
|
||||||
|
===========
|
||||||
|
MongoDB provides some aggregation methods out of the box, but there are not as
|
||||||
|
many as you typically get with an RDBMS. MongoEngine provides a wrapper around
|
||||||
|
the built-in methods and provides some of its own, which are implemented as
|
||||||
|
Javascript code that is executed on the database server.
|
||||||
|
|
||||||
|
Counting results
|
||||||
|
----------------
|
||||||
|
Just as with limiting and skipping results, there is a method on
|
||||||
|
:class:`~mongoengine.queryset.QuerySet` objects --
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic
|
||||||
|
way of achieving this::
|
||||||
|
|
||||||
|
num_users = len(User.objects)
|
||||||
|
|
||||||
|
Further aggregation
|
||||||
|
-------------------
|
||||||
|
You may sum over the values of a specific field on documents using
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.sum`::
|
||||||
|
|
||||||
|
yearly_expense = Employee.objects.sum('salary')
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
If the field isn't present on a document, that document will be ignored from
|
||||||
|
the sum.
|
||||||
|
|
||||||
|
To get the average (mean) of a field on a collection of documents, use
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.average`::
|
||||||
|
|
||||||
|
mean_age = User.objects.average('age')
|
||||||
|
|
||||||
|
As MongoDB provides native lists, MongoEngine provides a helper method to get a
|
||||||
|
dictionary of the frequencies of items in lists across an entire collection --
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.item_frequencies`. An example of its use
|
||||||
|
would be generating "tag-clouds"::
|
||||||
|
|
||||||
|
class Article(Document):
|
||||||
|
tag = ListField(StringField())
|
||||||
|
|
||||||
|
# After adding some tagged articles...
|
||||||
|
tag_freqs = Article.objects.item_frequencies('tag', normalize=True)
|
||||||
|
|
||||||
|
from operator import itemgetter
|
||||||
|
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
|
||||||
|
|
||||||
|
|
||||||
|
Query efficiency and performance
|
||||||
|
================================
|
||||||
|
|
||||||
|
There are a couple of methods to improve efficiency when querying, reducing the
|
||||||
|
information returned by the query or efficient dereferencing .
|
||||||
|
|
||||||
|
Retrieving a subset of fields
|
||||||
|
-----------------------------
|
||||||
|
|
||||||
|
Sometimes a subset of fields on a :class:`~mongoengine.Document` is required,
|
||||||
|
and for efficiency only these should be retrieved from the database. This issue
|
||||||
|
is especially important for MongoDB, as fields may often be extremely large
|
||||||
|
(e.g. a :class:`~mongoengine.fields.ListField` of
|
||||||
|
:class:`~mongoengine.EmbeddedDocument`\ s, which represent the comments on a
|
||||||
|
blog post. To select only a subset of fields, use
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.only`, specifying the fields you want to
|
||||||
|
retrieve as its arguments. Note that if fields that are not downloaded are
|
||||||
|
accessed, their default value (or :attr:`None` if no default value is provided)
|
||||||
|
will be given::
|
||||||
|
|
||||||
|
>>> class Film(Document):
|
||||||
|
... title = StringField()
|
||||||
|
... year = IntField()
|
||||||
|
... rating = IntField(default=3)
|
||||||
|
...
|
||||||
|
>>> Film(title='The Shawshank Redemption', year=1994, rating=5).save()
|
||||||
|
>>> f = Film.objects.only('title').first()
|
||||||
|
>>> f.title
|
||||||
|
'The Shawshank Redemption'
|
||||||
|
>>> f.year # None
|
||||||
|
>>> f.rating # default value
|
||||||
|
3
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
The :meth:`~mongoengine.queryset.QuerySet.exclude` is the opposite of
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.only` if you want to exclude a field.
|
||||||
|
|
||||||
|
If you later need the missing fields, just call
|
||||||
|
:meth:`~mongoengine.Document.reload` on your document.
|
||||||
|
|
||||||
|
Getting related data
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
When iterating the results of :class:`~mongoengine.fields.ListField` or
|
||||||
|
:class:`~mongoengine.fields.DictField` we automatically dereference any
|
||||||
|
:class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the
|
||||||
|
number the queries to mongo.
|
||||||
|
|
||||||
|
There are times when that efficiency is not enough, documents that have
|
||||||
|
:class:`~mongoengine.fields.ReferenceField` objects or
|
||||||
|
:class:`~mongoengine.fields.GenericReferenceField` objects at the top level are
|
||||||
|
expensive as the number of queries to MongoDB can quickly rise.
|
||||||
|
|
||||||
|
To limit the number of queries use
|
||||||
|
:func:`~mongoengine.queryset.QuerySet.select_related` which converts the
|
||||||
|
QuerySet to a list and dereferences as efficiently as possible. By default
|
||||||
|
:func:`~mongoengine.queryset.QuerySet.select_related` only dereferences any
|
||||||
|
references to the depth of 1 level. If you have more complicated documents and
|
||||||
|
want to dereference more of the object at once then increasing the :attr:`max_depth`
|
||||||
|
will dereference more levels of the document.
|
||||||
|
|
||||||
|
Turning off dereferencing
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
Sometimes for performance reasons you don't want to automatically dereference
|
||||||
|
data. To turn off dereferencing of the results of a query use
|
||||||
|
:func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so::
|
||||||
|
|
||||||
|
post = Post.objects.no_dereference().first()
|
||||||
|
assert(isinstance(post.author, ObjectId))
|
||||||
|
|
||||||
|
You can also turn off all dereferencing for a fixed period by using the
|
||||||
|
:class:`~mongoengine.context_managers.no_dereference` context manager::
|
||||||
|
|
||||||
|
with no_dereference(Post) as Post:
|
||||||
|
post = Post.objects.first()
|
||||||
|
assert(isinstance(post.author, ObjectId))
|
||||||
|
|
||||||
|
# Outside the context manager dereferencing occurs.
|
||||||
|
assert(isinstance(post.author, User))
|
||||||
|
|
||||||
|
|
||||||
|
Advanced queries
|
||||||
|
================
|
||||||
|
|
||||||
|
Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword
|
||||||
|
arguments can't fully express the query you want to use -- for example if you
|
||||||
|
need to combine a number of constraints using *and* and *or*. This is made
|
||||||
|
possible in MongoEngine through the :class:`~mongoengine.queryset.Q` class.
|
||||||
|
A :class:`~mongoengine.queryset.Q` object represents part of a query, and
|
||||||
|
can be initialised using the same keyword-argument syntax you use to query
|
||||||
|
documents. To build a complex query, you may combine
|
||||||
|
:class:`~mongoengine.queryset.Q` objects using the ``&`` (and) and ``|`` (or)
|
||||||
|
operators. To use a :class:`~mongoengine.queryset.Q` object, pass it in as the
|
||||||
|
first positional argument to :attr:`Document.objects` when you filter it by
|
||||||
|
calling it with keyword arguments::
|
||||||
|
|
||||||
|
# Get published posts
|
||||||
|
Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now()))
|
||||||
|
|
||||||
|
# Get top posts
|
||||||
|
Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000))
|
||||||
|
|
||||||
|
.. warning:: You have to use bitwise operators. You cannot use ``or``, ``and``
|
||||||
|
to combine queries as ``Q(a=a) or Q(b=b)`` is not the same as
|
||||||
|
``Q(a=a) | Q(b=b)``. As ``Q(a=a)`` equates to true ``Q(a=a) or Q(b=b)`` is
|
||||||
|
the same as ``Q(a=a)``.
|
||||||
|
|
||||||
|
.. _guide-atomic-updates:
|
||||||
|
|
||||||
|
Atomic updates
|
||||||
|
==============
|
||||||
|
Documents may be updated atomically by using the
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.update_one` and
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.update` methods on a
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers"
|
||||||
|
that you may use with these methods:
|
||||||
|
|
||||||
|
* ``set`` -- set a particular value
|
||||||
|
* ``unset`` -- delete a particular value (since MongoDB v1.3+)
|
||||||
|
* ``inc`` -- increment a value by a given amount
|
||||||
|
* ``dec`` -- decrement a value by a given amount
|
||||||
|
* ``pop`` -- remove the last item from a list
|
||||||
|
* ``push`` -- append a value to a list
|
||||||
|
* ``push_all`` -- append several values to a list
|
||||||
|
* ``pop`` -- remove the first or last element of a list
|
||||||
|
* ``pull`` -- remove a value from a list
|
||||||
|
* ``pull_all`` -- remove several values from a list
|
||||||
|
* ``add_to_set`` -- add value to a list only if its not in the list already
|
||||||
|
|
||||||
|
The syntax for atomic updates is similar to the querying syntax, but the
|
||||||
|
modifier comes before the field, not after it::
|
||||||
|
|
||||||
|
>>> post = BlogPost(title='Test', page_views=0, tags=['database'])
|
||||||
|
>>> post.save()
|
||||||
|
>>> BlogPost.objects(id=post.id).update_one(inc__page_views=1)
|
||||||
|
>>> post.reload() # the document has been changed, so we need to reload it
|
||||||
|
>>> post.page_views
|
||||||
|
1
|
||||||
|
>>> BlogPost.objects(id=post.id).update_one(set__title='Example Post')
|
||||||
|
>>> post.reload()
|
||||||
|
>>> post.title
|
||||||
|
'Example Post'
|
||||||
|
>>> BlogPost.objects(id=post.id).update_one(push__tags='nosql')
|
||||||
|
>>> post.reload()
|
||||||
|
>>> post.tags
|
||||||
|
['database', 'nosql']
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates
|
||||||
|
on changed documents by tracking changes to that document.
|
||||||
|
|
||||||
|
The positional operator allows you to update list items without knowing the
|
||||||
|
index position, therefore making the update a single atomic operation. As we
|
||||||
|
cannot use the `$` syntax in keyword arguments it has been mapped to `S`::
|
||||||
|
|
||||||
|
>>> post = BlogPost(title='Test', page_views=0, tags=['database', 'mongo'])
|
||||||
|
>>> post.save()
|
||||||
|
>>> BlogPost.objects(id=post.id, tags='mongo').update(set__tags__S='mongodb')
|
||||||
|
>>> post.reload()
|
||||||
|
>>> post.tags
|
||||||
|
['database', 'mongodb']
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
Currently only top level lists are handled, future versions of mongodb /
|
||||||
|
pymongo plan to support nested positional operators. See `The $ positional
|
||||||
|
operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_.
|
||||||
|
|
||||||
|
Server-side javascript execution
|
||||||
|
================================
|
||||||
|
Javascript functions may be written and sent to the server for execution. The
|
||||||
|
result of this is the return value of the Javascript function. This
|
||||||
|
functionality is accessed through the
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.exec_js` method on
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet` objects. Pass in a string containing a
|
||||||
|
Javascript function as the first argument.
|
||||||
|
|
||||||
|
The remaining positional arguments are names of fields that will be passed into
|
||||||
|
you Javascript function as its arguments. This allows functions to be written
|
||||||
|
that may be executed on any field in a collection (e.g. the
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.sum` method, which accepts the name of
|
||||||
|
the field to sum over as its argument). Note that field names passed in in this
|
||||||
|
manner are automatically translated to the names used on the database (set
|
||||||
|
using the :attr:`name` keyword argument to a field constructor).
|
||||||
|
|
||||||
|
Keyword arguments to :meth:`~mongoengine.queryset.QuerySet.exec_js` are
|
||||||
|
combined into an object called :attr:`options`, which is available in the
|
||||||
|
Javascript function. This may be used for defining specific parameters for your
|
||||||
|
function.
|
||||||
|
|
||||||
|
Some variables are made available in the scope of the Javascript function:
|
||||||
|
|
||||||
|
* ``collection`` -- the name of the collection that corresponds to the
|
||||||
|
:class:`~mongoengine.Document` class that is being used; this should be
|
||||||
|
used to get the :class:`Collection` object from :attr:`db` in Javascript
|
||||||
|
code
|
||||||
|
* ``query`` -- the query that has been generated by the
|
||||||
|
:class:`~mongoengine.queryset.QuerySet` object; this may be passed into
|
||||||
|
the :meth:`find` method on a :class:`Collection` object in the Javascript
|
||||||
|
function
|
||||||
|
* ``options`` -- an object containing the keyword arguments passed into
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.exec_js`
|
||||||
|
|
||||||
|
The following example demonstrates the intended usage of
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums
|
||||||
|
over a field on a document (this functionality is already available throught
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of
|
||||||
|
example)::
|
||||||
|
|
||||||
|
def sum_field(document, field_name, include_negatives=True):
|
||||||
|
code = """
|
||||||
|
function(sumField) {
|
||||||
|
var total = 0.0;
|
||||||
|
db[collection].find(query).forEach(function(doc) {
|
||||||
|
var val = doc[sumField];
|
||||||
|
if (val >= 0.0 || options.includeNegatives) {
|
||||||
|
total += val;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return total;
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
options = {'includeNegatives': include_negatives}
|
||||||
|
return document.objects.exec_js(code, field_name, **options)
|
||||||
|
|
||||||
|
As fields in MongoEngine may use different names in the database (set using the
|
||||||
|
:attr:`db_field` keyword argument to a :class:`Field` constructor), a mechanism
|
||||||
|
exists for replacing MongoEngine field names with the database field names in
|
||||||
|
Javascript code. When accessing a field on a collection object, use
|
||||||
|
square-bracket notation, and prefix the MongoEngine field name with a tilde.
|
||||||
|
The field name that follows the tilde will be translated to the name used in
|
||||||
|
the database. Note that when referring to fields on embedded documents,
|
||||||
|
the name of the :class:`~mongoengine.fields.EmbeddedDocumentField`, followed by a dot,
|
||||||
|
should be used before the name of the field on the embedded document. The
|
||||||
|
following example shows how the substitutions are made::
|
||||||
|
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
content = StringField(db_field='body')
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField(db_field='doctitle')
|
||||||
|
comments = ListField(EmbeddedDocumentField(Comment), name='cs')
|
||||||
|
|
||||||
|
# Returns a list of dictionaries. Each dictionary contains a value named
|
||||||
|
# "document", which corresponds to the "title" field on a BlogPost, and
|
||||||
|
# "comment", which corresponds to an individual comment. The substitutions
|
||||||
|
# made are shown in the comments.
|
||||||
|
BlogPost.objects.exec_js("""
|
||||||
|
function() {
|
||||||
|
var comments = [];
|
||||||
|
db[collection].find(query).forEach(function(doc) {
|
||||||
|
// doc[~comments] -> doc["cs"]
|
||||||
|
var docComments = doc[~comments];
|
||||||
|
|
||||||
|
for (var i = 0; i < docComments.length; i++) {
|
||||||
|
// doc[~comments][i] -> doc["cs"][i]
|
||||||
|
var comment = doc[~comments][i];
|
||||||
|
|
||||||
|
comments.push({
|
||||||
|
// doc[~title] -> doc["doctitle"]
|
||||||
|
'document': doc[~title],
|
||||||
|
|
||||||
|
// comment[~comments.content] -> comment["body"]
|
||||||
|
'comment': comment[~comments.content]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return comments;
|
||||||
|
}
|
||||||
|
""")
|
||||||
60
docs/guide/signals.rst
Normal file
60
docs/guide/signals.rst
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
.. _signals:
|
||||||
|
|
||||||
|
Signals
|
||||||
|
=======
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Signal support is provided by the excellent `blinker`_ library and
|
||||||
|
will gracefully fall back if it is not available.
|
||||||
|
|
||||||
|
|
||||||
|
The following document signals exist in MongoEngine and are pretty self-explanatory:
|
||||||
|
|
||||||
|
* `mongoengine.signals.pre_init`
|
||||||
|
* `mongoengine.signals.post_init`
|
||||||
|
* `mongoengine.signals.pre_save`
|
||||||
|
* `mongoengine.signals.post_save`
|
||||||
|
* `mongoengine.signals.pre_delete`
|
||||||
|
* `mongoengine.signals.post_delete`
|
||||||
|
* `mongoengine.signals.pre_bulk_insert`
|
||||||
|
* `mongoengine.signals.post_bulk_insert`
|
||||||
|
|
||||||
|
Example usage::
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine import signals
|
||||||
|
|
||||||
|
class Author(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def pre_save(cls, sender, document, **kwargs):
|
||||||
|
logging.debug("Pre Save: %s" % document.name)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def post_save(cls, sender, document, **kwargs):
|
||||||
|
logging.debug("Post Save: %s" % document.name)
|
||||||
|
if 'created' in kwargs:
|
||||||
|
if kwargs['created']:
|
||||||
|
logging.debug("Created")
|
||||||
|
else:
|
||||||
|
logging.debug("Updated")
|
||||||
|
|
||||||
|
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||||
|
signals.post_save.connect(Author.post_save, sender=Author)
|
||||||
|
|
||||||
|
|
||||||
|
ReferenceFields and signals
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
Currently `reverse_delete_rules` do not trigger signals on the other part of
|
||||||
|
the relationship. If this is required you must manually handled the
|
||||||
|
reverse deletion.
|
||||||
|
|
||||||
|
.. _blinker: http://pypi.python.org/pypi/blinker
|
||||||
@@ -1,26 +1,84 @@
|
|||||||
|
==============================
|
||||||
MongoEngine User Documentation
|
MongoEngine User Documentation
|
||||||
=======================================
|
==============================
|
||||||
|
|
||||||
MongoEngine is an Object-Document Mapper, written in Python for working with
|
**MongoEngine** is an Object-Document Mapper, written in Python for working with
|
||||||
MongoDB. To install it, simply run
|
MongoDB. To install it, simply run
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
# easy_install mongoengine
|
$ pip install -U mongoengine
|
||||||
|
|
||||||
|
:doc:`tutorial`
|
||||||
|
A quick tutorial building a tumblelog to get you up and running with
|
||||||
|
MongoEngine.
|
||||||
|
|
||||||
|
:doc:`guide/index`
|
||||||
|
The Full guide to MongoEngine - from modeling documents to storing files,
|
||||||
|
from querying for data to firing signals and *everything* between.
|
||||||
|
|
||||||
|
:doc:`apireference`
|
||||||
|
The complete API documentation --- the innards of documents, querysets and fields.
|
||||||
|
|
||||||
|
:doc:`upgrade`
|
||||||
|
How to upgrade MongoEngine.
|
||||||
|
|
||||||
|
:doc:`django`
|
||||||
|
Using MongoEngine and Django
|
||||||
|
|
||||||
|
Community
|
||||||
|
---------
|
||||||
|
|
||||||
|
To get help with using MongoEngine, use the `MongoEngine Users mailing list
|
||||||
|
<http://groups.google.com/group/mongoengine-users>`_ or the ever popular
|
||||||
|
`stackoverflow <http://www.stackoverflow.com>`_.
|
||||||
|
|
||||||
|
Contributing
|
||||||
|
------------
|
||||||
|
|
||||||
|
**Yes please!** We are always looking for contributions, additions and improvements.
|
||||||
|
|
||||||
|
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_
|
||||||
|
and contributions are always encouraged. Contributions can be as simple as
|
||||||
|
minor tweaks to this documentation, the website or the core.
|
||||||
|
|
||||||
|
To contribute, fork the project on
|
||||||
|
`GitHub <http://github.com/MongoEngine/mongoengine>`_ and send a
|
||||||
|
pull request.
|
||||||
|
|
||||||
|
Changes
|
||||||
|
-------
|
||||||
|
|
||||||
|
See the :doc:`changelog` for a full list of changes to MongoEngine and
|
||||||
|
:doc:`upgrade` for upgrade information.
|
||||||
|
|
||||||
|
.. note:: Always read and test the `upgrade <upgrade>`_ documentation before
|
||||||
|
putting updates live in production **;)**
|
||||||
|
|
||||||
|
Offline Reading
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Download the docs in `pdf <https://media.readthedocs.org/pdf/mongoengine-odm/latest/mongoengine-odm.pdf>`_
|
||||||
|
or `epub <https://media.readthedocs.org/epub/mongoengine-odm/latest/mongoengine-odm.epub>`_
|
||||||
|
formats for offline reading.
|
||||||
|
|
||||||
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_.
|
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 1
|
||||||
|
:numbered:
|
||||||
|
:hidden:
|
||||||
|
|
||||||
tutorial
|
tutorial
|
||||||
userguide
|
guide/index
|
||||||
apireference
|
apireference
|
||||||
changelog
|
changelog
|
||||||
|
upgrade
|
||||||
|
django
|
||||||
|
|
||||||
Indices and tables
|
Indices and tables
|
||||||
==================
|
------------------
|
||||||
|
|
||||||
* :ref:`genindex`
|
* :ref:`genindex`
|
||||||
|
* :ref:`modindex`
|
||||||
* :ref:`search`
|
* :ref:`search`
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
========
|
========
|
||||||
Tutorial
|
Tutorial
|
||||||
========
|
========
|
||||||
|
|
||||||
This tutorial introduces **MongoEngine** by means of example --- we will walk
|
This tutorial introduces **MongoEngine** by means of example --- we will walk
|
||||||
through how to create a simple **Tumblelog** application. A Tumblelog is a type
|
through how to create a simple **Tumblelog** application. A Tumblelog is a type
|
||||||
of blog where posts are not constrained to being conventional text-based posts.
|
of blog where posts are not constrained to being conventional text-based posts.
|
||||||
@@ -12,23 +13,29 @@ interface.
|
|||||||
|
|
||||||
Getting started
|
Getting started
|
||||||
===============
|
===============
|
||||||
|
|
||||||
Before we start, make sure that a copy of MongoDB is running in an accessible
|
Before we start, make sure that a copy of MongoDB is running in an accessible
|
||||||
location --- running it locally will be easier, but if that is not an option
|
location --- running it locally will be easier, but if that is not an option
|
||||||
then it may be run on a remote server.
|
then it may be run on a remote server. If you haven't installed mongoengine,
|
||||||
|
simply use pip to install it like so::
|
||||||
|
|
||||||
|
$ pip install mongoengine
|
||||||
|
|
||||||
Before we can start using MongoEngine, we need to tell it how to connect to our
|
Before we can start using MongoEngine, we need to tell it how to connect to our
|
||||||
instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect`
|
instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect`
|
||||||
function. The only argument we need to provide is the name of the MongoDB
|
function. If running locally the only argument we need to provide is the name
|
||||||
database to use::
|
of the MongoDB database to use::
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
|
|
||||||
connect('tumblelog')
|
connect('tumblelog')
|
||||||
|
|
||||||
For more information about connecting to MongoDB see :ref:`guide-connecting`.
|
There are lots of options for connecting to MongoDB, for more information about
|
||||||
|
them see the :ref:`guide-connecting` guide.
|
||||||
|
|
||||||
Defining our documents
|
Defining our documents
|
||||||
======================
|
======================
|
||||||
|
|
||||||
MongoDB is *schemaless*, which means that no schema is enforced by the database
|
MongoDB is *schemaless*, which means that no schema is enforced by the database
|
||||||
--- we may add and remove fields however we want and MongoDB won't complain.
|
--- we may add and remove fields however we want and MongoDB won't complain.
|
||||||
This makes life a lot easier in many regards, especially when there is a change
|
This makes life a lot easier in many regards, especially when there is a change
|
||||||
@@ -39,17 +46,19 @@ define utility methods on our documents in the same way that traditional
|
|||||||
|
|
||||||
In our Tumblelog application we need to store several different types of
|
In our Tumblelog application we need to store several different types of
|
||||||
information. We will need to have a collection of **users**, so that we may
|
information. We will need to have a collection of **users**, so that we may
|
||||||
link posts to an individual. We also need to store our different types
|
link posts to an individual. We also need to store our different types of
|
||||||
**posts** (text, image and link) in the database. To aid navigation of our
|
**posts** (eg: text, image and link) in the database. To aid navigation of our
|
||||||
Tumblelog, posts may have **tags** associated with them, so that the list of
|
Tumblelog, posts may have **tags** associated with them, so that the list of
|
||||||
posts shown to the user may be limited to posts that have been assigned a
|
posts shown to the user may be limited to posts that have been assigned a
|
||||||
specified tag. Finally, it would be nice if **comments** could be added to
|
specific tag. Finally, it would be nice if **comments** could be added to
|
||||||
posts. We'll start with **users**, as the others are slightly more involved.
|
posts. We'll start with **users**, as the other document models are slightly
|
||||||
|
more involved.
|
||||||
|
|
||||||
Users
|
Users
|
||||||
-----
|
-----
|
||||||
|
|
||||||
Just as if we were using a relational database with an ORM, we need to define
|
Just as if we were using a relational database with an ORM, we need to define
|
||||||
which fields a :class:`User` may have, and what their types will be::
|
which fields a :class:`User` may have, and what types of data they might store::
|
||||||
|
|
||||||
class User(Document):
|
class User(Document):
|
||||||
email = StringField(required=True)
|
email = StringField(required=True)
|
||||||
@@ -58,11 +67,13 @@ which fields a :class:`User` may have, and what their types will be::
|
|||||||
|
|
||||||
This looks similar to how a the structure of a table would be defined in a
|
This looks similar to how a the structure of a table would be defined in a
|
||||||
regular ORM. The key difference is that this schema will never be passed on to
|
regular ORM. The key difference is that this schema will never be passed on to
|
||||||
MongoDB --- this will only be enforced at the application level. Also, the User
|
MongoDB --- this will only be enforced at the application level, making future
|
||||||
documents will be stored in a MongoDB *collection* rather than a table.
|
changes easy to manage. Also, the User documents will be stored in a
|
||||||
|
MongoDB *collection* rather than a table.
|
||||||
|
|
||||||
Posts, Comments and Tags
|
Posts, Comments and Tags
|
||||||
------------------------
|
------------------------
|
||||||
|
|
||||||
Now we'll think about how to store the rest of the information. If we were
|
Now we'll think about how to store the rest of the information. If we were
|
||||||
using a relational database, we would most likely have a table of **posts**, a
|
using a relational database, we would most likely have a table of **posts**, a
|
||||||
table of **comments** and a table of **tags**. To associate the comments with
|
table of **comments** and a table of **tags**. To associate the comments with
|
||||||
@@ -75,21 +86,25 @@ of them stand out as particularly intuitive solutions.
|
|||||||
|
|
||||||
Posts
|
Posts
|
||||||
^^^^^
|
^^^^^
|
||||||
But MongoDB *isn't* a relational database, so we're not going to do it that
|
|
||||||
|
Happily mongoDB *isn't* a relational database, so we're not going to do it that
|
||||||
way. As it turns out, we can use MongoDB's schemaless nature to provide us with
|
way. As it turns out, we can use MongoDB's schemaless nature to provide us with
|
||||||
a much nicer solution. We will store all of the posts in *one collection* ---
|
a much nicer solution. We will store all of the posts in *one collection* and
|
||||||
each post type will just have the fields it needs. If we later want to add
|
each post type will only store the fields it needs. If we later want to add
|
||||||
video posts, we don't have to modify the collection at all, we just *start
|
video posts, we don't have to modify the collection at all, we just *start
|
||||||
using* the new fields we need to support video posts. This fits with the
|
using* the new fields we need to support video posts. This fits with the
|
||||||
Object-Oriented principle of *inheritance* nicely. We can think of
|
Object-Oriented principle of *inheritance* nicely. We can think of
|
||||||
:class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and
|
:class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and
|
||||||
:class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports
|
:class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports
|
||||||
this kind of modelling out of the box::
|
this kind of modelling out of the box --- all you need do is turn on inheritance
|
||||||
|
by setting :attr:`allow_inheritance` to True in the :attr:`meta`::
|
||||||
|
|
||||||
class Post(Document):
|
class Post(Document):
|
||||||
title = StringField(max_length=120, required=True)
|
title = StringField(max_length=120, required=True)
|
||||||
author = ReferenceField(User)
|
author = ReferenceField(User)
|
||||||
|
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
class TextPost(Post):
|
class TextPost(Post):
|
||||||
content = StringField()
|
content = StringField()
|
||||||
|
|
||||||
@@ -100,19 +115,20 @@ this kind of modelling out of the box::
|
|||||||
link_url = StringField()
|
link_url = StringField()
|
||||||
|
|
||||||
We are storing a reference to the author of the posts using a
|
We are storing a reference to the author of the posts using a
|
||||||
:class:`~mongoengine.ReferenceField` object. These are similar to foreign key
|
:class:`~mongoengine.fields.ReferenceField` object. These are similar to foreign key
|
||||||
fields in traditional ORMs, and are automatically translated into references
|
fields in traditional ORMs, and are automatically translated into references
|
||||||
when they are saved, and dereferenced when they are loaded.
|
when they are saved, and dereferenced when they are loaded.
|
||||||
|
|
||||||
Tags
|
Tags
|
||||||
^^^^
|
^^^^
|
||||||
|
|
||||||
Now that we have our Post models figured out, how will we attach tags to them?
|
Now that we have our Post models figured out, how will we attach tags to them?
|
||||||
MongoDB allows us to store lists of items natively, so rather than having a
|
MongoDB allows us to store lists of items natively, so rather than having a
|
||||||
link table, we can just store a list of tags in each post. So, for both
|
link table, we can just store a list of tags in each post. So, for both
|
||||||
efficiency and simplicity's sake, we'll store the tags as strings directly
|
efficiency and simplicity's sake, we'll store the tags as strings directly
|
||||||
within the post, rather than storing references to tags in a separate
|
within the post, rather than storing references to tags in a separate
|
||||||
collection. Especially as tags are generally very short (often even shorter
|
collection. Especially as tags are generally very short (often even shorter
|
||||||
than a document's id), this denormalisation won't impact very strongly on the
|
than a document's id), this denormalisation won't impact very strongly on the
|
||||||
size of our database. So let's take a look that the code our modified
|
size of our database. So let's take a look that the code our modified
|
||||||
:class:`Post` class::
|
:class:`Post` class::
|
||||||
|
|
||||||
@@ -121,13 +137,16 @@ size of our database. So let's take a look that the code our modified
|
|||||||
author = ReferenceField(User)
|
author = ReferenceField(User)
|
||||||
tags = ListField(StringField(max_length=30))
|
tags = ListField(StringField(max_length=30))
|
||||||
|
|
||||||
The :class:`~mongoengine.ListField` object that is used to define a Post's tags
|
The :class:`~mongoengine.fields.ListField` object that is used to define a Post's tags
|
||||||
takes a field object as its first argument --- this means that you can have
|
takes a field object as its first argument --- this means that you can have
|
||||||
lists of any type of field (including lists). Note that we don't need to
|
lists of any type of field (including lists).
|
||||||
modify the specialised post types as they all inherit from :class:`Post`.
|
|
||||||
|
.. note:: We don't need to modify the specialised post types as they all
|
||||||
|
inherit from :class:`Post`.
|
||||||
|
|
||||||
Comments
|
Comments
|
||||||
^^^^^^^^
|
^^^^^^^^
|
||||||
|
|
||||||
A comment is typically associated with *one* post. In a relational database, to
|
A comment is typically associated with *one* post. In a relational database, to
|
||||||
display a post with its comments, we would have to retrieve the post from the
|
display a post with its comments, we would have to retrieve the post from the
|
||||||
database, then query the database again for the comments associated with the
|
database, then query the database again for the comments associated with the
|
||||||
@@ -152,21 +171,41 @@ We can then store a list of comment documents in our post document::
|
|||||||
tags = ListField(StringField(max_length=30))
|
tags = ListField(StringField(max_length=30))
|
||||||
comments = ListField(EmbeddedDocumentField(Comment))
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
|
|
||||||
|
Handling deletions of references
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
The :class:`~mongoengine.fields.ReferenceField` object takes a keyword
|
||||||
|
`reverse_delete_rule` for handling deletion rules if the reference is deleted.
|
||||||
|
To delete all the posts if a user is deleted set the rule::
|
||||||
|
|
||||||
|
class Post(Document):
|
||||||
|
title = StringField(max_length=120, required=True)
|
||||||
|
author = ReferenceField(User, reverse_delete_rule=CASCADE)
|
||||||
|
tags = ListField(StringField(max_length=30))
|
||||||
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
|
|
||||||
|
See :class:`~mongoengine.fields.ReferenceField` for more information.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
MapFields and DictFields currently don't support automatic handling of
|
||||||
|
deleted references
|
||||||
|
|
||||||
|
|
||||||
Adding data to our Tumblelog
|
Adding data to our Tumblelog
|
||||||
============================
|
============================
|
||||||
Now that we've defined how our documents will be structured, let's start adding
|
Now that we've defined how our documents will be structured, let's start adding
|
||||||
some documents to the database. Firstly, we'll need to create a :class:`User`
|
some documents to the database. Firstly, we'll need to create a :class:`User`
|
||||||
object::
|
object::
|
||||||
|
|
||||||
john = User(email='jdoe@example.com', first_name='John', last_name='Doe')
|
ross = User(email='ross@example.com', first_name='Ross', last_name='Lawley').save()
|
||||||
john.save()
|
|
||||||
|
|
||||||
Note that we could have also defined our user using attribute syntax::
|
.. note::
|
||||||
|
We could have also defined our user using attribute syntax::
|
||||||
|
|
||||||
john = User(email='jdoe@example.com')
|
ross = User(email='ross@example.com')
|
||||||
john.first_name = 'John'
|
ross.first_name = 'Ross'
|
||||||
john.last_name = 'Doe'
|
ross.last_name = 'Lawley'
|
||||||
john.save()
|
ross.save()
|
||||||
|
|
||||||
Now that we've got our user in the database, let's add a couple of posts::
|
Now that we've got our user in the database, let's add a couple of posts::
|
||||||
|
|
||||||
@@ -175,16 +214,17 @@ Now that we've got our user in the database, let's add a couple of posts::
|
|||||||
post1.tags = ['mongodb', 'mongoengine']
|
post1.tags = ['mongodb', 'mongoengine']
|
||||||
post1.save()
|
post1.save()
|
||||||
|
|
||||||
post2 = LinkPost(title='MongoEngine Documentation', author=john)
|
post2 = LinkPost(title='MongoEngine Documentation', author=ross)
|
||||||
post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs'
|
post2.link_url = 'http://docs.mongoengine.com/'
|
||||||
post2.tags = ['mongoengine']
|
post2.tags = ['mongoengine']
|
||||||
post2.save()
|
post2.save()
|
||||||
|
|
||||||
Note that if you change a field on a object that has already been saved, then
|
.. note:: If you change a field on a object that has already been saved, then
|
||||||
call :meth:`save` again, the document will be updated.
|
call :meth:`save` again, the document will be updated.
|
||||||
|
|
||||||
Accessing our data
|
Accessing our data
|
||||||
==================
|
==================
|
||||||
|
|
||||||
So now we've got a couple of posts in our database, how do we display them?
|
So now we've got a couple of posts in our database, how do we display them?
|
||||||
Each document class (i.e. any class that inherits either directly or indirectly
|
Each document class (i.e. any class that inherits either directly or indirectly
|
||||||
from :class:`~mongoengine.Document`) has an :attr:`objects` attribute, which is
|
from :class:`~mongoengine.Document`) has an :attr:`objects` attribute, which is
|
||||||
@@ -196,6 +236,7 @@ class. So let's see how we can get our posts' titles::
|
|||||||
|
|
||||||
Retrieving type-specific information
|
Retrieving type-specific information
|
||||||
------------------------------------
|
------------------------------------
|
||||||
|
|
||||||
This will print the titles of our posts, one on each line. But What if we want
|
This will print the titles of our posts, one on each line. But What if we want
|
||||||
to access the type-specific data (link_url, content, etc.)? One way is simply
|
to access the type-specific data (link_url, content, etc.)? One way is simply
|
||||||
to use the :attr:`objects` attribute of a subclass of :class:`Post`::
|
to use the :attr:`objects` attribute of a subclass of :class:`Post`::
|
||||||
@@ -234,6 +275,7 @@ text post, and "Link: <url>" if it was a link post.
|
|||||||
|
|
||||||
Searching our posts by tag
|
Searching our posts by tag
|
||||||
--------------------------
|
--------------------------
|
||||||
|
|
||||||
The :attr:`objects` attribute of a :class:`~mongoengine.Document` is actually a
|
The :attr:`objects` attribute of a :class:`~mongoengine.Document` is actually a
|
||||||
:class:`~mongoengine.queryset.QuerySet` object. This lazily queries the
|
:class:`~mongoengine.queryset.QuerySet` object. This lazily queries the
|
||||||
database only when you need the data. It may also be filtered to narrow down
|
database only when you need the data. It may also be filtered to narrow down
|
||||||
@@ -250,5 +292,11 @@ the first matched by the query you provide. Aggregation functions may also be
|
|||||||
used on :class:`~mongoengine.queryset.QuerySet` objects::
|
used on :class:`~mongoengine.queryset.QuerySet` objects::
|
||||||
|
|
||||||
num_posts = Post.objects(tags='mongodb').count()
|
num_posts = Post.objects(tags='mongodb').count()
|
||||||
print 'Found % posts with tag "mongodb"' % num_posts
|
print 'Found %d posts with tag "mongodb"' % num_posts
|
||||||
|
|
||||||
|
Learning more about mongoengine
|
||||||
|
-------------------------------
|
||||||
|
|
||||||
|
If you got this far you've made a great start, so well done! The next step on
|
||||||
|
your mongoengine journey is the `full user guide <guide/index>`_, where you
|
||||||
|
can learn indepth about how to use mongoengine and mongodb.
|
||||||
513
docs/upgrade.rst
Normal file
513
docs/upgrade.rst
Normal file
@@ -0,0 +1,513 @@
|
|||||||
|
#########
|
||||||
|
Upgrading
|
||||||
|
#########
|
||||||
|
|
||||||
|
0.7 to 0.8
|
||||||
|
**********
|
||||||
|
|
||||||
|
There have been numerous backwards breaking changes in 0.8. The reasons for
|
||||||
|
these are ensure that MongoEngine has sane defaults going forward and
|
||||||
|
performs the best it can out the box. Where possible there have been
|
||||||
|
FutureWarnings to help get you ready for the change, but that hasn't been
|
||||||
|
possible for the whole of the release.
|
||||||
|
|
||||||
|
.. warning:: Breaking changes - test upgrading on a test system before putting
|
||||||
|
live. There maybe multiple manual steps in migrating and these are best honed
|
||||||
|
on a staging / test system.
|
||||||
|
|
||||||
|
Python and PyMongo
|
||||||
|
==================
|
||||||
|
|
||||||
|
MongoEngine requires python 2.6 (or above) and pymongo 2.5 (or above)
|
||||||
|
|
||||||
|
Data Model
|
||||||
|
==========
|
||||||
|
|
||||||
|
Inheritance
|
||||||
|
-----------
|
||||||
|
|
||||||
|
The inheritance model has changed, we no longer need to store an array of
|
||||||
|
:attr:`types` with the model we can just use the classname in :attr:`_cls`.
|
||||||
|
This means that you will have to update your indexes for each of your
|
||||||
|
inherited classes like so: ::
|
||||||
|
|
||||||
|
# 1. Declaration of the class
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'indexes': ['name']
|
||||||
|
}
|
||||||
|
|
||||||
|
# 2. Remove _types
|
||||||
|
collection = Animal._get_collection()
|
||||||
|
collection.update({}, {"$unset": {"_types": 1}}, multi=True)
|
||||||
|
|
||||||
|
# 3. Confirm extra data is removed
|
||||||
|
count = collection.find({'_types': {"$exists": True}}).count()
|
||||||
|
assert count == 0
|
||||||
|
|
||||||
|
# 4. Remove indexes
|
||||||
|
info = collection.index_information()
|
||||||
|
indexes_to_drop = [key for key, value in info.iteritems()
|
||||||
|
if '_types' in dict(value['key'])]
|
||||||
|
for index in indexes_to_drop:
|
||||||
|
collection.drop_index(index)
|
||||||
|
|
||||||
|
# 5. Recreate indexes
|
||||||
|
Animal.ensure_indexes()
|
||||||
|
|
||||||
|
|
||||||
|
Document Definition
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
The default for inheritance has changed - its now off by default and
|
||||||
|
:attr:`_cls` will not be stored automatically with the class. So if you extend
|
||||||
|
your :class:`~mongoengine.Document` or :class:`~mongoengine.EmbeddedDocuments`
|
||||||
|
you will need to declare :attr:`allow_inheritance` in the meta data like so: ::
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
Previously, if you had data the database that wasn't defined in the Document
|
||||||
|
definition, it would set it as an attribute on the document. This is no longer
|
||||||
|
the case and the data is set only in the ``document._data`` dictionary: ::
|
||||||
|
|
||||||
|
>>> from mongoengine import *
|
||||||
|
>>> class Animal(Document):
|
||||||
|
... name = StringField()
|
||||||
|
...
|
||||||
|
>>> cat = Animal(name="kit", size="small")
|
||||||
|
|
||||||
|
# 0.7
|
||||||
|
>>> cat.size
|
||||||
|
u'small'
|
||||||
|
|
||||||
|
# 0.8
|
||||||
|
>>> cat.size
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "<stdin>", line 1, in <module>
|
||||||
|
AttributeError: 'Animal' object has no attribute 'size'
|
||||||
|
|
||||||
|
ReferenceField
|
||||||
|
--------------
|
||||||
|
|
||||||
|
ReferenceFields now store ObjectId's by default - this is more efficient than
|
||||||
|
DBRefs as we already know what Document types they reference::
|
||||||
|
|
||||||
|
# Old code
|
||||||
|
class Animal(Document):
|
||||||
|
name = ReferenceField('self')
|
||||||
|
|
||||||
|
# New code to keep dbrefs
|
||||||
|
class Animal(Document):
|
||||||
|
name = ReferenceField('self', dbref=True)
|
||||||
|
|
||||||
|
To migrate all the references you need to touch each object and mark it as dirty
|
||||||
|
eg::
|
||||||
|
|
||||||
|
# Doc definition
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
parent = ReferenceField('self')
|
||||||
|
friends = ListField(ReferenceField('self'))
|
||||||
|
|
||||||
|
# Mark all ReferenceFields as dirty and save
|
||||||
|
for p in Person.objects:
|
||||||
|
p._mark_as_dirty('parent')
|
||||||
|
p._mark_as_dirty('friends')
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
`An example test migration for ReferenceFields is available on github
|
||||||
|
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/refrencefield_dbref_to_object_id.py>`_.
|
||||||
|
|
||||||
|
UUIDField
|
||||||
|
---------
|
||||||
|
|
||||||
|
UUIDFields now default to storing binary values::
|
||||||
|
|
||||||
|
# Old code
|
||||||
|
class Animal(Document):
|
||||||
|
uuid = UUIDField()
|
||||||
|
|
||||||
|
# New code
|
||||||
|
class Animal(Document):
|
||||||
|
uuid = UUIDField(binary=False)
|
||||||
|
|
||||||
|
To migrate all the uuid's you need to touch each object and mark it as dirty
|
||||||
|
eg::
|
||||||
|
|
||||||
|
# Doc definition
|
||||||
|
class Animal(Document):
|
||||||
|
uuid = UUIDField()
|
||||||
|
|
||||||
|
# Mark all ReferenceFields as dirty and save
|
||||||
|
for a in Animal.objects:
|
||||||
|
a._mark_as_dirty('uuid')
|
||||||
|
a.save()
|
||||||
|
|
||||||
|
`An example test migration for UUIDFields is available on github
|
||||||
|
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/uuidfield_to_binary.py>`_.
|
||||||
|
|
||||||
|
DecimalField
|
||||||
|
------------
|
||||||
|
|
||||||
|
DecimalField now store floats - previous it was storing strings and that
|
||||||
|
made it impossible to do comparisons when querying correctly.::
|
||||||
|
|
||||||
|
# Old code
|
||||||
|
class Person(Document):
|
||||||
|
balance = DecimalField()
|
||||||
|
|
||||||
|
# New code
|
||||||
|
class Person(Document):
|
||||||
|
balance = DecimalField(force_string=True)
|
||||||
|
|
||||||
|
To migrate all the uuid's you need to touch each object and mark it as dirty
|
||||||
|
eg::
|
||||||
|
|
||||||
|
# Doc definition
|
||||||
|
class Person(Document):
|
||||||
|
balance = DecimalField()
|
||||||
|
|
||||||
|
# Mark all ReferenceFields as dirty and save
|
||||||
|
for p in Person.objects:
|
||||||
|
p._mark_as_dirty('balance')
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
.. note:: DecimalField's have also been improved with the addition of precision
|
||||||
|
and rounding. See :class:`~mongoengine.fields.DecimalField` for more information.
|
||||||
|
|
||||||
|
`An example test migration for DecimalFields is available on github
|
||||||
|
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/decimalfield_as_float.py>`_.
|
||||||
|
|
||||||
|
Cascading Saves
|
||||||
|
---------------
|
||||||
|
To improve performance document saves will no longer automatically cascade.
|
||||||
|
Any changes to a Documents references will either have to be saved manually or
|
||||||
|
you will have to explicitly tell it to cascade on save::
|
||||||
|
|
||||||
|
# At the class level:
|
||||||
|
class Person(Document):
|
||||||
|
meta = {'cascade': True}
|
||||||
|
|
||||||
|
# Or on save:
|
||||||
|
my_document.save(cascade=True)
|
||||||
|
|
||||||
|
Storage
|
||||||
|
-------
|
||||||
|
|
||||||
|
Document and Embedded Documents are now serialized based on declared field order.
|
||||||
|
Previously, the data was passed to mongodb as a dictionary and which meant that
|
||||||
|
order wasn't guaranteed - so things like ``$addToSet`` operations on
|
||||||
|
:class:`~mongoengine.EmbeddedDocument` could potentially fail in unexpected
|
||||||
|
ways.
|
||||||
|
|
||||||
|
If this impacts you, you may want to rewrite the objects using the
|
||||||
|
``doc.mark_as_dirty('field')`` pattern described above. If you are using a
|
||||||
|
compound primary key then you will need to ensure the order is fixed and match
|
||||||
|
your EmbeddedDocument to that order.
|
||||||
|
|
||||||
|
Querysets
|
||||||
|
=========
|
||||||
|
|
||||||
|
Attack of the clones
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
Querysets now return clones and should no longer be considered editable in
|
||||||
|
place. This brings us in line with how Django's querysets work and removes a
|
||||||
|
long running gotcha. If you edit your querysets inplace you will have to
|
||||||
|
update your code like so: ::
|
||||||
|
|
||||||
|
# Old code:
|
||||||
|
mammals = Animal.objects(type="mammal")
|
||||||
|
mammals.filter(order="Carnivora") # Returns a cloned queryset that isn't assigned to anything - so this will break in 0.8
|
||||||
|
[m for m in mammals] # This will return all mammals in 0.8 as the 2nd filter returned a new queryset
|
||||||
|
|
||||||
|
# Update example a) assign queryset after a change:
|
||||||
|
mammals = Animal.objects(type="mammal")
|
||||||
|
carnivores = mammals.filter(order="Carnivora") # Reassign the new queryset so fitler can be applied
|
||||||
|
[m for m in carnivores] # This will return all carnivores
|
||||||
|
|
||||||
|
# Update example b) chain the queryset:
|
||||||
|
mammals = Animal.objects(type="mammal").filter(order="Carnivora") # The final queryset is assgined to mammals
|
||||||
|
[m for m in mammals] # This will return all carnivores
|
||||||
|
|
||||||
|
Len iterates the queryset
|
||||||
|
--------------------------
|
||||||
|
|
||||||
|
If you ever did `len(queryset)` it previously did a `count()` under the covers,
|
||||||
|
this caused some unusual issues. As `len(queryset)` is most often used by
|
||||||
|
`list(queryset)` we now cache the queryset results and use that for the length.
|
||||||
|
|
||||||
|
This isn't as performant as a `count()` and if you aren't iterating the
|
||||||
|
queryset you should upgrade to use count::
|
||||||
|
|
||||||
|
# Old code
|
||||||
|
len(Animal.objects(type="mammal"))
|
||||||
|
|
||||||
|
# New code
|
||||||
|
Animal.objects(type="mammal").count())
|
||||||
|
|
||||||
|
|
||||||
|
.only() now inline with .exclude()
|
||||||
|
----------------------------------
|
||||||
|
|
||||||
|
The behaviour of `.only()` was highly ambious, now it works in the mirror fashion
|
||||||
|
to `.exclude()`. Chaining `.only()` calls will increase the fields required::
|
||||||
|
|
||||||
|
# Old code
|
||||||
|
Animal.objects().only(['type', 'name']).only('name', 'order') # Would have returned just `name`
|
||||||
|
|
||||||
|
# New code
|
||||||
|
Animal.objects().only('name')
|
||||||
|
|
||||||
|
# Note:
|
||||||
|
Animal.objects().only(['name']).only('order') # Now returns `name` *and* `order`
|
||||||
|
|
||||||
|
|
||||||
|
Client
|
||||||
|
======
|
||||||
|
PyMongo 2.4 came with a new connection client; MongoClient_ and started the
|
||||||
|
depreciation of the old :class:`~pymongo.connection.Connection`. MongoEngine
|
||||||
|
now uses the latest `MongoClient` for connections. By default operations were
|
||||||
|
`safe` but if you turned them off or used the connection directly this will
|
||||||
|
impact your queries.
|
||||||
|
|
||||||
|
Querysets
|
||||||
|
---------
|
||||||
|
|
||||||
|
Safe
|
||||||
|
^^^^
|
||||||
|
|
||||||
|
`safe` has been depreciated in the new MongoClient connection. Please use
|
||||||
|
`write_concern` instead. As `safe` always defaulted as `True` normally no code
|
||||||
|
change is required. To disable confirmation of the write just pass `{"w": 0}`
|
||||||
|
eg: ::
|
||||||
|
|
||||||
|
# Old
|
||||||
|
Animal(name="Dinasour").save(safe=False)
|
||||||
|
|
||||||
|
# new code:
|
||||||
|
Animal(name="Dinasour").save(write_concern={"w": 0})
|
||||||
|
|
||||||
|
Write Concern
|
||||||
|
^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
`write_options` has been replaced with `write_concern` to bring it inline with
|
||||||
|
pymongo. To upgrade simply rename any instances where you used the `write_option`
|
||||||
|
keyword to `write_concern` like so::
|
||||||
|
|
||||||
|
# Old code:
|
||||||
|
Animal(name="Dinasour").save(write_options={"w": 2})
|
||||||
|
|
||||||
|
# new code:
|
||||||
|
Animal(name="Dinasour").save(write_concern={"w": 2})
|
||||||
|
|
||||||
|
|
||||||
|
Indexes
|
||||||
|
=======
|
||||||
|
|
||||||
|
Index methods are no longer tied to querysets but rather to the document class.
|
||||||
|
Although `QuerySet._ensure_indexes` and `QuerySet.ensure_index` still exist.
|
||||||
|
They should be replaced with :func:`~mongoengine.Document.ensure_indexes` /
|
||||||
|
:func:`~mongoengine.Document.ensure_index`.
|
||||||
|
|
||||||
|
SequenceFields
|
||||||
|
==============
|
||||||
|
|
||||||
|
:class:`~mongoengine.fields.SequenceField` now inherits from `BaseField` to
|
||||||
|
allow flexible storage of the calculated value. As such MIN and MAX settings
|
||||||
|
are no longer handled.
|
||||||
|
|
||||||
|
.. _MongoClient: http://blog.mongodb.org/post/36666163412/introducing-mongoclient
|
||||||
|
|
||||||
|
0.6 to 0.7
|
||||||
|
**********
|
||||||
|
|
||||||
|
Cascade saves
|
||||||
|
=============
|
||||||
|
|
||||||
|
Saves will raise a `FutureWarning` if they cascade and cascade hasn't been set
|
||||||
|
to True. This is because in 0.8 it will default to False. If you require
|
||||||
|
cascading saves then either set it in the `meta` or pass
|
||||||
|
via `save` eg ::
|
||||||
|
|
||||||
|
# At the class level:
|
||||||
|
class Person(Document):
|
||||||
|
meta = {'cascade': True}
|
||||||
|
|
||||||
|
# Or in code:
|
||||||
|
my_document.save(cascade=True)
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
Remember: cascading saves **do not** cascade through lists.
|
||||||
|
|
||||||
|
ReferenceFields
|
||||||
|
===============
|
||||||
|
|
||||||
|
ReferenceFields now can store references as ObjectId strings instead of DBRefs.
|
||||||
|
This will become the default in 0.8 and if `dbref` is not set a `FutureWarning`
|
||||||
|
will be raised.
|
||||||
|
|
||||||
|
|
||||||
|
To explicitly continue to use DBRefs change the `dbref` flag
|
||||||
|
to True ::
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
groups = ListField(ReferenceField(Group, dbref=True))
|
||||||
|
|
||||||
|
To migrate to using strings instead of DBRefs you will have to manually
|
||||||
|
migrate ::
|
||||||
|
|
||||||
|
# Step 1 - Migrate the model definition
|
||||||
|
class Group(Document):
|
||||||
|
author = ReferenceField(User, dbref=False)
|
||||||
|
members = ListField(ReferenceField(User, dbref=False))
|
||||||
|
|
||||||
|
# Step 2 - Migrate the data
|
||||||
|
for g in Group.objects():
|
||||||
|
g.author = g.author
|
||||||
|
g.members = g.members
|
||||||
|
g.save()
|
||||||
|
|
||||||
|
|
||||||
|
item_frequencies
|
||||||
|
================
|
||||||
|
|
||||||
|
In the 0.6 series we added support for null / zero / false values in
|
||||||
|
item_frequencies. A side effect was to return keys in the value they are
|
||||||
|
stored in rather than as string representations. Your code may need to be
|
||||||
|
updated to handle native types rather than strings keys for the results of
|
||||||
|
item frequency queries.
|
||||||
|
|
||||||
|
BinaryFields
|
||||||
|
============
|
||||||
|
|
||||||
|
Binary fields have been updated so that they are native binary types. If you
|
||||||
|
previously were doing `str` comparisons with binary field values you will have
|
||||||
|
to update and wrap the value in a `str`.
|
||||||
|
|
||||||
|
0.5 to 0.6
|
||||||
|
**********
|
||||||
|
|
||||||
|
Embedded Documents - if you had a `pk` field you will have to rename it from
|
||||||
|
`_id` to `pk` as pk is no longer a property of Embedded Documents.
|
||||||
|
|
||||||
|
Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw
|
||||||
|
an InvalidDocument error as they aren't currently supported.
|
||||||
|
|
||||||
|
Document._get_subclasses - Is no longer used and the class method has been
|
||||||
|
removed.
|
||||||
|
|
||||||
|
Document.objects.with_id - now raises an InvalidQueryError if used with a
|
||||||
|
filter.
|
||||||
|
|
||||||
|
FutureWarning - A future warning has been added to all inherited classes that
|
||||||
|
don't define :attr:`allow_inheritance` in their meta.
|
||||||
|
|
||||||
|
You may need to update pyMongo to 2.0 for use with Sharding.
|
||||||
|
|
||||||
|
0.4 to 0.5
|
||||||
|
**********
|
||||||
|
|
||||||
|
There have been the following backwards incompatibilities from 0.4 to 0.5. The
|
||||||
|
main areas of changed are: choices in fields, map_reduce and collection names.
|
||||||
|
|
||||||
|
Choice options:
|
||||||
|
===============
|
||||||
|
|
||||||
|
Are now expected to be an iterable of tuples, with the first element in each
|
||||||
|
tuple being the actual value to be stored. The second element is the
|
||||||
|
human-readable name for the option.
|
||||||
|
|
||||||
|
|
||||||
|
PyMongo / MongoDB
|
||||||
|
=================
|
||||||
|
|
||||||
|
map reduce now requires pymongo 1.11+- The pymongo `merge_output` and
|
||||||
|
`reduce_output` parameters, have been depreciated.
|
||||||
|
|
||||||
|
More methods now use map_reduce as db.eval is not supported for sharding as
|
||||||
|
such the following have been changed:
|
||||||
|
|
||||||
|
* :meth:`~mongoengine.queryset.QuerySet.sum`
|
||||||
|
* :meth:`~mongoengine.queryset.QuerySet.average`
|
||||||
|
* :meth:`~mongoengine.queryset.QuerySet.item_frequencies`
|
||||||
|
|
||||||
|
|
||||||
|
Default collection naming
|
||||||
|
=========================
|
||||||
|
|
||||||
|
Previously it was just lowercase, its now much more pythonic and readable as
|
||||||
|
its lowercase and underscores, previously ::
|
||||||
|
|
||||||
|
class MyAceDocument(Document):
|
||||||
|
pass
|
||||||
|
|
||||||
|
MyAceDocument._meta['collection'] == myacedocument
|
||||||
|
|
||||||
|
In 0.5 this will change to ::
|
||||||
|
|
||||||
|
class MyAceDocument(Document):
|
||||||
|
pass
|
||||||
|
|
||||||
|
MyAceDocument._get_collection_name() == my_ace_document
|
||||||
|
|
||||||
|
To upgrade use a Mixin class to set meta like so ::
|
||||||
|
|
||||||
|
class BaseMixin(object):
|
||||||
|
meta = {
|
||||||
|
'collection': lambda c: c.__name__.lower()
|
||||||
|
}
|
||||||
|
|
||||||
|
class MyAceDocument(Document, BaseMixin):
|
||||||
|
pass
|
||||||
|
|
||||||
|
MyAceDocument._get_collection_name() == "myacedocument"
|
||||||
|
|
||||||
|
Alternatively, you can rename your collections eg ::
|
||||||
|
|
||||||
|
from mongoengine.connection import _get_db
|
||||||
|
from mongoengine.base import _document_registry
|
||||||
|
|
||||||
|
def rename_collections():
|
||||||
|
db = _get_db()
|
||||||
|
|
||||||
|
failure = False
|
||||||
|
|
||||||
|
collection_names = [d._get_collection_name()
|
||||||
|
for d in _document_registry.values()]
|
||||||
|
|
||||||
|
for new_style_name in collection_names:
|
||||||
|
if not new_style_name: # embedded documents don't have collections
|
||||||
|
continue
|
||||||
|
old_style_name = new_style_name.replace('_', '')
|
||||||
|
|
||||||
|
if old_style_name == new_style_name:
|
||||||
|
continue # Nothing to do
|
||||||
|
|
||||||
|
existing = db.collection_names()
|
||||||
|
if old_style_name in existing:
|
||||||
|
if new_style_name in existing:
|
||||||
|
failure = True
|
||||||
|
print "FAILED to rename: %s to %s (already exists)" % (
|
||||||
|
old_style_name, new_style_name)
|
||||||
|
else:
|
||||||
|
db[old_style_name].rename(new_style_name)
|
||||||
|
print "Renamed: %s to %s" % (old_style_name,
|
||||||
|
new_style_name)
|
||||||
|
|
||||||
|
if failure:
|
||||||
|
print "Upgrading collection names failed"
|
||||||
|
else:
|
||||||
|
print "Upgraded collection names"
|
||||||
|
|
||||||
|
|
||||||
|
mongodb 1.8 > 2.0 +
|
||||||
|
===================
|
||||||
|
|
||||||
|
Its been reported that indexes may need to be recreated to the newer version of indexes.
|
||||||
|
To do this drop indexes and call ``ensure_indexes`` on each model.
|
||||||
@@ -1,385 +0,0 @@
|
|||||||
==========
|
|
||||||
User Guide
|
|
||||||
==========
|
|
||||||
|
|
||||||
.. _guide-connecting:
|
|
||||||
|
|
||||||
Installing
|
|
||||||
==========
|
|
||||||
MongoEngine is available on PyPI, so to use it you can use
|
|
||||||
:program:`easy_install`
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
# easy_install mongoengine
|
|
||||||
|
|
||||||
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
|
||||||
<http://pypi.python.org/pypi/mongoengine/>`_ and run
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
# python setup.py install
|
|
||||||
|
|
||||||
Connecting to MongoDB
|
|
||||||
=====================
|
|
||||||
To connect to a running instance of :program:`mongod`, use the
|
|
||||||
:func:`~mongoengine.connect` function. The first argument is the name of the
|
|
||||||
database to connect to. If the database does not exist, it will be created. If
|
|
||||||
the database requires authentication, :attr:`username` and :attr:`password`
|
|
||||||
arguments may be provided::
|
|
||||||
|
|
||||||
from mongoengine import connect
|
|
||||||
connect('project1', username='webapp', password='pwd123')
|
|
||||||
|
|
||||||
By default, MongoEngine assumes that the :program:`mongod` instance is running
|
|
||||||
on **localhost** on port **27017**. If MongoDB is running elsewhere, you may
|
|
||||||
provide :attr:`host` and :attr:`port` arguments to
|
|
||||||
:func:`~mongoengine.connect`::
|
|
||||||
|
|
||||||
connect('project1', host='192.168.1.35', port=12345)
|
|
||||||
|
|
||||||
Defining documents
|
|
||||||
==================
|
|
||||||
In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When
|
|
||||||
working with relational databases, rows are stored in **tables**, which have a
|
|
||||||
strict **schema** that the rows follow. MongoDB stores documents in
|
|
||||||
**collections** rather than tables - the principle difference is that no schema
|
|
||||||
is enforced at a database level.
|
|
||||||
|
|
||||||
Defining a document's schema
|
|
||||||
----------------------------
|
|
||||||
MongoEngine allows you to define schemata for documents as this helps to reduce
|
|
||||||
coding errors, and allows for utility methods to be defined on fields which may
|
|
||||||
be present.
|
|
||||||
|
|
||||||
To define a schema for a document, create a class that inherits from
|
|
||||||
:class:`~mongoengine.Document`. Fields are specified by adding **field
|
|
||||||
objects** as class attributes to the document class::
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
class Page(Document):
|
|
||||||
title = StringField(max_length=200, required=True)
|
|
||||||
date_modified = DateTimeField(default=datetime.now)
|
|
||||||
|
|
||||||
Fields
|
|
||||||
------
|
|
||||||
By default, fields are not required. To make a field mandatory, set the
|
|
||||||
:attr:`required` keyword argument of a field to ``True``. Fields also may have
|
|
||||||
validation constraints available (such as :attr:`max_length` in the example
|
|
||||||
above). Fields may also take default values, which will be used if a value is
|
|
||||||
not provided. Default values may optionally be a callable, which will be called
|
|
||||||
to retrieve the value (such as in the above example). The field types available
|
|
||||||
are as follows:
|
|
||||||
|
|
||||||
* :class:`~mongoengine.StringField`
|
|
||||||
* :class:`~mongoengine.IntField`
|
|
||||||
* :class:`~mongoengine.FloatField`
|
|
||||||
* :class:`~mongoengine.DateTimeField`
|
|
||||||
* :class:`~mongoengine.ListField`
|
|
||||||
* :class:`~mongoengine.ObjectIdField`
|
|
||||||
* :class:`~mongoengine.EmbeddedDocumentField`
|
|
||||||
* :class:`~mongoengine.ReferenceField`
|
|
||||||
|
|
||||||
List fields
|
|
||||||
^^^^^^^^^^^
|
|
||||||
MongoDB allows the storage of lists of items. To add a list of items to a
|
|
||||||
:class:`~mongoengine.Document`, use the :class:`~mongoengine.ListField` field
|
|
||||||
type. :class:`~mongoengine.ListField` takes another field object as its first
|
|
||||||
argument, which specifies which type elements may be stored within the list::
|
|
||||||
|
|
||||||
class Page(Document):
|
|
||||||
tags = ListField(StringField(max_length=50))
|
|
||||||
|
|
||||||
Embedded documents
|
|
||||||
^^^^^^^^^^^^^^^^^^
|
|
||||||
MongoDB has the ability to embed documents within other documents. Schemata may
|
|
||||||
be defined for these embedded documents, just as they may be for regular
|
|
||||||
documents. To create an embedded document, just define a document as usual, but
|
|
||||||
inherit from :class:`~mongoengine.EmbeddedDocument` rather than
|
|
||||||
:class:`~mongoengine.Document`::
|
|
||||||
|
|
||||||
class Comment(EmbeddedDocument):
|
|
||||||
content = StringField()
|
|
||||||
|
|
||||||
To embed the document within another document, use the
|
|
||||||
:class:`~mongoengine.EmbeddedDocumentField` field type, providing the embedded
|
|
||||||
document class as the first argument::
|
|
||||||
|
|
||||||
class Page(Document):
|
|
||||||
comments = ListField(EmbeddedDocumentField(Comment))
|
|
||||||
|
|
||||||
comment1 = Comment('Good work!')
|
|
||||||
comment2 = Comment('Nice article!')
|
|
||||||
page = Page(comments=[comment1, comment2])
|
|
||||||
|
|
||||||
Reference fields
|
|
||||||
^^^^^^^^^^^^^^^^
|
|
||||||
References may be stored to other documents in the database using the
|
|
||||||
:class:`~mongoengine.ReferenceField`. Pass in another document class as the
|
|
||||||
first argument to the constructor, then simply assign document objects to the
|
|
||||||
field::
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class Page(Document):
|
|
||||||
content = StringField()
|
|
||||||
author = ReferenceField(User)
|
|
||||||
|
|
||||||
john = User(name="John Smith")
|
|
||||||
john.save()
|
|
||||||
|
|
||||||
post = Page(content="Test Page")
|
|
||||||
post.author = john
|
|
||||||
post.save()
|
|
||||||
|
|
||||||
The :class:`User` object is automatically turned into a reference behind the
|
|
||||||
scenes, and dereferenced when the :class:`Page` object is retrieved.
|
|
||||||
|
|
||||||
Document collections
|
|
||||||
--------------------
|
|
||||||
Document classes that inherit **directly** from :class:`~mongoengine.Document`
|
|
||||||
will have their own **collection** in the database. The name of the collection
|
|
||||||
is by default the name of the class, coverted to lowercase (so in the example
|
|
||||||
above, the collection would be called `page`). If you need to change the name
|
|
||||||
of the collection (e.g. to use MongoEngine with an existing database), then
|
|
||||||
create a class dictionary attribute called :attr:`meta` on your document, and
|
|
||||||
set :attr:`collection` to the name of the collection that you want your
|
|
||||||
document class to use::
|
|
||||||
|
|
||||||
class Page(Document):
|
|
||||||
title = StringField(max_length=200, required=True)
|
|
||||||
meta = {'collection': 'cmsPage'}
|
|
||||||
|
|
||||||
Capped collections
|
|
||||||
^^^^^^^^^^^^^^^^^^
|
|
||||||
A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying
|
|
||||||
:attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary.
|
|
||||||
:attr:`max_documents` is the maximum number of documents that is allowed to be
|
|
||||||
stored in the collection, and :attr:`max_size` is the maximum size of the
|
|
||||||
collection in bytes. If :attr:`max_size` is not specified and
|
|
||||||
:attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB).
|
|
||||||
The following example shows a :class:`Log` document that will be limited to
|
|
||||||
1000 entries and 2MB of disk space::
|
|
||||||
|
|
||||||
class Log(Document):
|
|
||||||
ip_address = StringField()
|
|
||||||
meta = {'max_documents': 1000, 'max_size': 2000000}
|
|
||||||
|
|
||||||
Document inheritance
|
|
||||||
--------------------
|
|
||||||
To create a specialised type of a :class:`~mongoengine.Document` you have
|
|
||||||
defined, you may subclass it and add any extra fields or methods you may need.
|
|
||||||
As this is new class is not a direct subclass of
|
|
||||||
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
|
|
||||||
will use the same collection as its superclass uses. This allows for more
|
|
||||||
convenient and efficient retrieval of related documents::
|
|
||||||
|
|
||||||
# Stored in a collection named 'page'
|
|
||||||
class Page(Document):
|
|
||||||
title = StringField(max_length=200, required=True)
|
|
||||||
|
|
||||||
# Also stored in the collection named 'page'
|
|
||||||
class DatedPage(Page):
|
|
||||||
date = DateTimeField()
|
|
||||||
|
|
||||||
Working with existing data
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
To enable correct retrieval of documents involved in this kind of heirarchy,
|
|
||||||
two extra attributes are stored on each document in the database: :attr:`_cls`
|
|
||||||
and :attr:`_types`. These are hidden from the user through the MongoEngine
|
|
||||||
interface, but may not be present if you are trying to use MongoEngine with
|
|
||||||
an existing database. For this reason, you may disable this inheritance
|
|
||||||
mechansim, removing the dependency of :attr:`_cls` and :attr:`_types`, enabling
|
|
||||||
you to work with existing databases. To disable inheritance on a document
|
|
||||||
class, set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
|
|
||||||
dictionary::
|
|
||||||
|
|
||||||
# Will work with data in an existing collection named 'cmsPage'
|
|
||||||
class Page(Document):
|
|
||||||
title = StringField(max_length=200, required=True)
|
|
||||||
meta = {
|
|
||||||
'collection': 'cmsPage',
|
|
||||||
'allow_inheritance': False,
|
|
||||||
}
|
|
||||||
|
|
||||||
Documents instances
|
|
||||||
===================
|
|
||||||
To create a new document object, create an instance of the relevant document
|
|
||||||
class, providing values for its fields as its constructor keyword arguments.
|
|
||||||
You may provide values for any of the fields on the document::
|
|
||||||
|
|
||||||
>>> page = Page(title="Test Page")
|
|
||||||
>>> page.title
|
|
||||||
'Test Page'
|
|
||||||
|
|
||||||
You may also assign values to the document's fields using standard object
|
|
||||||
attribute syntax::
|
|
||||||
|
|
||||||
>>> page.title = "Example Page"
|
|
||||||
>>> page.title
|
|
||||||
'Example Page'
|
|
||||||
|
|
||||||
Saving and deleting documents
|
|
||||||
-----------------------------
|
|
||||||
To save the document to the database, call the
|
|
||||||
:meth:`~mongoengine.Document.save` method. If the document does not exist in
|
|
||||||
the database, it will be created. If it does already exist, it will be
|
|
||||||
updated.
|
|
||||||
|
|
||||||
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
|
|
||||||
Note that this will only work if the document exists in the database and has a
|
|
||||||
valide :attr:`id`.
|
|
||||||
|
|
||||||
Document IDs
|
|
||||||
------------
|
|
||||||
Each document in the database has a unique id. This may be accessed through the
|
|
||||||
:attr:`id` attribute on :class:`~mongoengine.Document` objects. Usually, the id
|
|
||||||
will be generated automatically by the database server when the object is save,
|
|
||||||
meaning that you may only access the :attr:`id` field once a document has been
|
|
||||||
saved::
|
|
||||||
|
|
||||||
>>> page = Page(title="Test Page")
|
|
||||||
>>> page.id
|
|
||||||
>>> page.save()
|
|
||||||
>>> page.id
|
|
||||||
ObjectId('123456789abcdef000000000')
|
|
||||||
|
|
||||||
Alternatively, you may explicitly set the :attr:`id` before you save the
|
|
||||||
document, but the id must be a valid PyMongo :class:`ObjectId`.
|
|
||||||
|
|
||||||
Querying the database
|
|
||||||
=====================
|
|
||||||
:class:`~mongoengine.Document` classes have an :attr:`objects` attribute, which
|
|
||||||
is used for accessing the objects in the database associated with the class.
|
|
||||||
The :attr:`objects` attribute is actually a
|
|
||||||
:class:`~mongoengine.queryset.QuerySetManager`, which creates and returns a new
|
|
||||||
a new :class:`~mongoengine.queryset.QuerySet` object on access. The
|
|
||||||
:class:`~mongoengine.queryset.QuerySet` object may may be iterated over to
|
|
||||||
fetch documents from the database::
|
|
||||||
|
|
||||||
# Prints out the names of all the users in the database
|
|
||||||
for user in User.objects:
|
|
||||||
print user.name
|
|
||||||
|
|
||||||
Filtering queries
|
|
||||||
-----------------
|
|
||||||
The query may be filtered by calling the
|
|
||||||
:class:`~mongoengine.queryset.QuerySet` object with field lookup keyword
|
|
||||||
arguments. The keys in the keyword arguments correspond to fields on the
|
|
||||||
:class:`~mongoengine.Document` you are querying::
|
|
||||||
|
|
||||||
# This will return a QuerySet that will only iterate over users whose
|
|
||||||
# 'country' field is set to 'uk'
|
|
||||||
uk_users = User.objects(country='uk')
|
|
||||||
|
|
||||||
Fields on embedded documents may also be referred to using field lookup syntax
|
|
||||||
by using a double-underscore in place of the dot in object attribute access
|
|
||||||
syntax::
|
|
||||||
|
|
||||||
# This will return a QuerySet that will only iterate over pages that have
|
|
||||||
# been written by a user whose 'country' field is set to 'uk'
|
|
||||||
uk_pages = Page.objects(author__country='uk')
|
|
||||||
|
|
||||||
Querying lists
|
|
||||||
^^^^^^^^^^^^^^
|
|
||||||
On most fields, this syntax will look up documents where the field specified
|
|
||||||
matches the given value exactly, but when the field refers to a
|
|
||||||
:class:`~mongoengine.ListField`, a single item may be provided, in which case
|
|
||||||
lists that contain that item will be matched::
|
|
||||||
|
|
||||||
class Page(Document):
|
|
||||||
tags = ListField(StringField())
|
|
||||||
|
|
||||||
# This will match all pages that have the word 'coding' as an item in the
|
|
||||||
# 'tags' list
|
|
||||||
Page.objects(tags='coding')
|
|
||||||
|
|
||||||
Query operators
|
|
||||||
---------------
|
|
||||||
Operators other than equality may also be used in queries; just attach the
|
|
||||||
operator name to a key with a double-underscore::
|
|
||||||
|
|
||||||
# Only find users whose age is 18 or less
|
|
||||||
young_users = Users.objects(age__lte=18)
|
|
||||||
|
|
||||||
Available operators are as follows:
|
|
||||||
|
|
||||||
* ``neq`` -- not equal to
|
|
||||||
* ``lt`` -- less than
|
|
||||||
* ``lte`` -- less than or equal to
|
|
||||||
* ``gt`` -- greater than
|
|
||||||
* ``gte`` -- greater than or equal to
|
|
||||||
* ``in`` -- value is in list (a list of values should be provided)
|
|
||||||
* ``nin`` -- value is not in list (a list of values should be provided)
|
|
||||||
* ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values
|
|
||||||
* ``all`` -- every item in array is in list of values provided
|
|
||||||
* ``size`` -- the size of the array is
|
|
||||||
* ``exists`` -- value for field exists
|
|
||||||
|
|
||||||
Limiting and skipping results
|
|
||||||
-----------------------------
|
|
||||||
Just as with traditional ORMs, you may limit the number of results returned, or
|
|
||||||
skip a number or results in you query.
|
|
||||||
:meth:`~mongoengine.queryset.QuerySet.limit` and
|
|
||||||
:meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on
|
|
||||||
:class:`~mongoengine.queryset.QuerySet` objects, but the prefered syntax for
|
|
||||||
achieving this is using array-slicing syntax::
|
|
||||||
|
|
||||||
# Only the first 5 people
|
|
||||||
users = User.objects[:5]
|
|
||||||
|
|
||||||
# All except for the first 5 people
|
|
||||||
users = User.objects[5:]
|
|
||||||
|
|
||||||
# 5 users, starting from the 10th user found
|
|
||||||
users = User.objects[10:15]
|
|
||||||
|
|
||||||
Aggregation
|
|
||||||
-----------
|
|
||||||
MongoDB provides some aggregation methods out of the box, but there are not as
|
|
||||||
many as you typically get with an RDBMS. MongoEngine provides a wrapper around
|
|
||||||
the built-in methods and provides some of its own, which are implemented as
|
|
||||||
Javascript code that is executed on the database server.
|
|
||||||
|
|
||||||
Counting results
|
|
||||||
^^^^^^^^^^^^^^^^
|
|
||||||
Just as with limiting and skipping results, there is a method on
|
|
||||||
:class:`~mongoengine.queryset.QuerySet` objects --
|
|
||||||
:meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic
|
|
||||||
way of achieving this::
|
|
||||||
|
|
||||||
num_users = len(User.objects)
|
|
||||||
|
|
||||||
Further aggregation
|
|
||||||
^^^^^^^^^^^^^^^^^^^
|
|
||||||
You may sum over the values of a specific field on documents using
|
|
||||||
:meth:`~mongoengine.queryset.QuerySet.sum`::
|
|
||||||
|
|
||||||
yearly_expense = Employee.objects.sum('salary')
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
If the field isn't present on a document, that document will be ignored from
|
|
||||||
the sum.
|
|
||||||
|
|
||||||
To get the average (mean) of a field on a collection of documents, use
|
|
||||||
:meth:`~mongoengine.queryset.QuerySet.average`::
|
|
||||||
|
|
||||||
mean_age = User.objects.average('age')
|
|
||||||
|
|
||||||
As MongoDB provides native lists, MongoEngine provides a helper method to get a
|
|
||||||
dictionary of the frequencies of items in lists across an entire collection --
|
|
||||||
:meth:`~mongoengine.queryset.QuerySet.item_frequencies`. An example of its use
|
|
||||||
would be generating "tag-clouds"::
|
|
||||||
|
|
||||||
class Article(Document):
|
|
||||||
tag = ListField(StringField())
|
|
||||||
|
|
||||||
# After adding some tagged articles...
|
|
||||||
tag_freqs = Article.objects.item_frequencies('tag', normalize=True)
|
|
||||||
|
|
||||||
from operator import itemgetter
|
|
||||||
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
|
|
||||||
|
|
||||||
@@ -6,19 +6,21 @@ import connection
|
|||||||
from connection import *
|
from connection import *
|
||||||
import queryset
|
import queryset
|
||||||
from queryset import *
|
from queryset import *
|
||||||
|
import signals
|
||||||
|
from signals import *
|
||||||
|
from errors import *
|
||||||
|
import errors
|
||||||
|
import django
|
||||||
|
|
||||||
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
__all__ = (list(document.__all__) + fields.__all__ + connection.__all__ +
|
||||||
queryset.__all__)
|
list(queryset.__all__) + signals.__all__ + list(errors.__all__))
|
||||||
|
|
||||||
__author__ = 'Harry Marr'
|
VERSION = (0, 8, 1)
|
||||||
|
|
||||||
VERSION = (0, 1, 1)
|
|
||||||
|
|
||||||
def get_version():
|
def get_version():
|
||||||
version = '%s.%s' % (VERSION[0], VERSION[1])
|
if isinstance(VERSION[-1], basestring):
|
||||||
if VERSION[2]:
|
return '.'.join(map(str, VERSION[:-1])) + VERSION[-1]
|
||||||
version = '%s.%s' % (version, VERSION[2])
|
return '.'.join(map(str, VERSION))
|
||||||
return version
|
|
||||||
|
|
||||||
__version__ = get_version()
|
__version__ = get_version()
|
||||||
|
|
||||||
|
|||||||
@@ -1,271 +0,0 @@
|
|||||||
from queryset import QuerySetManager
|
|
||||||
|
|
||||||
import pymongo
|
|
||||||
|
|
||||||
|
|
||||||
class ValidationError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class BaseField(object):
|
|
||||||
"""A base class for fields in a MongoDB document. Instances of this class
|
|
||||||
may be added to subclasses of `Document` to define a document's schema.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, name=None, required=False, default=None):
|
|
||||||
self.name = name
|
|
||||||
self.required = required
|
|
||||||
self.default = default
|
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
|
||||||
"""Descriptor for retrieving a value from a field in a document. Do
|
|
||||||
any necessary conversion between Python and MongoDB types.
|
|
||||||
"""
|
|
||||||
if instance is None:
|
|
||||||
# Document class being used rather than a document object
|
|
||||||
return self
|
|
||||||
|
|
||||||
# Get value from document instance if available, if not use default
|
|
||||||
value = instance._data.get(self.name)
|
|
||||||
if value is None:
|
|
||||||
value = self.default
|
|
||||||
# Allow callable default values
|
|
||||||
if callable(value):
|
|
||||||
value = value()
|
|
||||||
return value
|
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
|
||||||
"""Descriptor for assigning a value to a field in a document.
|
|
||||||
"""
|
|
||||||
instance._data[self.name] = value
|
|
||||||
|
|
||||||
def to_python(self, value):
|
|
||||||
"""Convert a MongoDB-compatible type to a Python type.
|
|
||||||
"""
|
|
||||||
return value
|
|
||||||
|
|
||||||
def to_mongo(self, value):
|
|
||||||
"""Convert a Python type to a MongoDB-compatible type.
|
|
||||||
"""
|
|
||||||
return self.to_python(value)
|
|
||||||
|
|
||||||
def validate(self, value):
|
|
||||||
"""Perform validation on a value.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ObjectIdField(BaseField):
|
|
||||||
"""An field wrapper around MongoDB's ObjectIds.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def to_python(self, value):
|
|
||||||
return str(value)
|
|
||||||
|
|
||||||
def to_mongo(self, value):
|
|
||||||
if not isinstance(value, pymongo.objectid.ObjectId):
|
|
||||||
return pymongo.objectid.ObjectId(value)
|
|
||||||
return value
|
|
||||||
|
|
||||||
def validate(self, value):
|
|
||||||
try:
|
|
||||||
pymongo.objectid.ObjectId(str(value))
|
|
||||||
except:
|
|
||||||
raise ValidationError('Invalid Object ID')
|
|
||||||
|
|
||||||
|
|
||||||
class DocumentMetaclass(type):
|
|
||||||
"""Metaclass for all documents.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __new__(cls, name, bases, attrs):
|
|
||||||
metaclass = attrs.get('__metaclass__')
|
|
||||||
super_new = super(DocumentMetaclass, cls).__new__
|
|
||||||
if metaclass and issubclass(metaclass, DocumentMetaclass):
|
|
||||||
return super_new(cls, name, bases, attrs)
|
|
||||||
|
|
||||||
doc_fields = {}
|
|
||||||
class_name = [name]
|
|
||||||
superclasses = {}
|
|
||||||
for base in bases:
|
|
||||||
# Include all fields present in superclasses
|
|
||||||
if hasattr(base, '_fields'):
|
|
||||||
doc_fields.update(base._fields)
|
|
||||||
class_name.append(base._class_name)
|
|
||||||
# Get superclasses from superclass
|
|
||||||
superclasses[base._class_name] = base
|
|
||||||
superclasses.update(base._superclasses)
|
|
||||||
attrs['_class_name'] = '.'.join(reversed(class_name))
|
|
||||||
attrs['_superclasses'] = superclasses
|
|
||||||
|
|
||||||
# Add the document's fields to the _fields attribute
|
|
||||||
for attr_name, attr_value in attrs.items():
|
|
||||||
if hasattr(attr_value, "__class__") and \
|
|
||||||
issubclass(attr_value.__class__, BaseField):
|
|
||||||
if not attr_value.name:
|
|
||||||
attr_value.name = attr_name
|
|
||||||
doc_fields[attr_name] = attr_value
|
|
||||||
attrs['_fields'] = doc_fields
|
|
||||||
|
|
||||||
return super_new(cls, name, bases, attrs)
|
|
||||||
|
|
||||||
|
|
||||||
class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|
||||||
"""Metaclass for top-level documents (i.e. documents that have their own
|
|
||||||
collection in the database.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __new__(cls, name, bases, attrs):
|
|
||||||
super_new = super(TopLevelDocumentMetaclass, cls).__new__
|
|
||||||
# Classes defined in this package are abstract and should not have
|
|
||||||
# their own metadata with DB collection, etc.
|
|
||||||
# __metaclass__ is only set on the class with the __metaclass__
|
|
||||||
# attribute (i.e. it is not set on subclasses). This differentiates
|
|
||||||
# 'real' documents from the 'Document' class
|
|
||||||
if attrs.get('__metaclass__') == TopLevelDocumentMetaclass:
|
|
||||||
return super_new(cls, name, bases, attrs)
|
|
||||||
|
|
||||||
collection = name.lower()
|
|
||||||
|
|
||||||
simple_class = True
|
|
||||||
# Subclassed documents inherit collection from superclass
|
|
||||||
for base in bases:
|
|
||||||
if hasattr(base, '_meta') and 'collection' in base._meta:
|
|
||||||
# Ensure that the Document class may be subclassed -
|
|
||||||
# inheritance may be disabled to remove dependency on
|
|
||||||
# additional fields _cls and _types
|
|
||||||
if base._meta.get('allow_inheritance', True) == False:
|
|
||||||
raise ValueError('Document %s may not be subclassed' %
|
|
||||||
base.__name__)
|
|
||||||
else:
|
|
||||||
simple_class = False
|
|
||||||
collection = base._meta['collection']
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'collection': collection,
|
|
||||||
'allow_inheritance': True,
|
|
||||||
'max_documents': None,
|
|
||||||
'max_size': None,
|
|
||||||
}
|
|
||||||
meta.update(attrs.get('meta', {}))
|
|
||||||
# Only simple classes - direct subclasses of Document - may set
|
|
||||||
# allow_inheritance to False
|
|
||||||
if not simple_class and not meta['allow_inheritance']:
|
|
||||||
raise ValueError('Only direct subclasses of Document may set '
|
|
||||||
'"allow_inheritance" to False')
|
|
||||||
attrs['_meta'] = meta
|
|
||||||
|
|
||||||
attrs['id'] = ObjectIdField(name='_id')
|
|
||||||
|
|
||||||
# Set up collection manager, needs the class to have fields so use
|
|
||||||
# DocumentMetaclass before instantiating CollectionManager object
|
|
||||||
new_class = super_new(cls, name, bases, attrs)
|
|
||||||
new_class.objects = QuerySetManager()
|
|
||||||
|
|
||||||
return new_class
|
|
||||||
|
|
||||||
|
|
||||||
class BaseDocument(object):
|
|
||||||
|
|
||||||
def __init__(self, **values):
|
|
||||||
self._data = {}
|
|
||||||
# Assign initial values to instance
|
|
||||||
for attr_name, attr_value in self._fields.items():
|
|
||||||
if attr_name in values:
|
|
||||||
setattr(self, attr_name, values.pop(attr_name))
|
|
||||||
else:
|
|
||||||
# Use default value if present
|
|
||||||
value = getattr(self, attr_name, None)
|
|
||||||
setattr(self, attr_name, value)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _get_subclasses(cls):
|
|
||||||
"""Return a dictionary of all subclasses (found recursively).
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
subclasses = cls.__subclasses__()
|
|
||||||
except:
|
|
||||||
subclasses = cls.__subclasses__(cls)
|
|
||||||
|
|
||||||
all_subclasses = {}
|
|
||||||
for subclass in subclasses:
|
|
||||||
all_subclasses[subclass._class_name] = subclass
|
|
||||||
all_subclasses.update(subclass._get_subclasses())
|
|
||||||
return all_subclasses
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
# Use _data rather than _fields as iterator only looks at names so
|
|
||||||
# values don't need to be converted to Python types
|
|
||||||
return iter(self._data)
|
|
||||||
|
|
||||||
def __getitem__(self, name):
|
|
||||||
"""Dictionary-style field access, return a field's value if present.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return getattr(self, name)
|
|
||||||
except AttributeError:
|
|
||||||
raise KeyError(name)
|
|
||||||
|
|
||||||
def __setitem__(self, name, value):
|
|
||||||
"""Dictionary-style field access, set a field's value.
|
|
||||||
"""
|
|
||||||
# Ensure that the field exists before settings its value
|
|
||||||
if name not in self._fields:
|
|
||||||
raise KeyError(name)
|
|
||||||
return setattr(self, name, value)
|
|
||||||
|
|
||||||
def __contains__(self, name):
|
|
||||||
try:
|
|
||||||
val = getattr(self, name)
|
|
||||||
return val is not None
|
|
||||||
except AttributeError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return len(self._data)
|
|
||||||
|
|
||||||
def to_mongo(self):
|
|
||||||
"""Return data dictionary ready for use with MongoDB.
|
|
||||||
"""
|
|
||||||
data = {}
|
|
||||||
for field_name, field in self._fields.items():
|
|
||||||
value = getattr(self, field_name, None)
|
|
||||||
if value is not None:
|
|
||||||
data[field.name] = field.to_mongo(value)
|
|
||||||
# Only add _cls and _types if allow_inheritance is not False
|
|
||||||
if not (hasattr(self, '_meta') and
|
|
||||||
self._meta.get('allow_inheritance', True) == False):
|
|
||||||
data['_cls'] = self._class_name
|
|
||||||
data['_types'] = self._superclasses.keys() + [self._class_name]
|
|
||||||
return data
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _from_son(cls, son):
|
|
||||||
"""Create an instance of a Document (subclass) from a PyMongo SOM.
|
|
||||||
"""
|
|
||||||
# get the class name from the document, falling back to the given
|
|
||||||
# class if unavailable
|
|
||||||
class_name = son.get(u'_cls', cls._class_name)
|
|
||||||
|
|
||||||
data = dict((str(key), value) for key, value in son.items())
|
|
||||||
|
|
||||||
if '_types' in data:
|
|
||||||
del data['_types']
|
|
||||||
|
|
||||||
if '_cls' in data:
|
|
||||||
del data['_cls']
|
|
||||||
|
|
||||||
# Return correct subclass for document type
|
|
||||||
if class_name != cls._class_name:
|
|
||||||
subclasses = cls._get_subclasses()
|
|
||||||
if class_name not in subclasses:
|
|
||||||
# Type of document is probably more generic than the class
|
|
||||||
# that has been queried to return this SON
|
|
||||||
return None
|
|
||||||
cls = subclasses[class_name]
|
|
||||||
|
|
||||||
for field_name, field in cls._fields.items():
|
|
||||||
if field.name in data:
|
|
||||||
data[field_name] = field.to_python(data[field.name])
|
|
||||||
|
|
||||||
return cls(**data)
|
|
||||||
8
mongoengine/base/__init__.py
Normal file
8
mongoengine/base/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
from mongoengine.base.common import *
|
||||||
|
from mongoengine.base.datastructures import *
|
||||||
|
from mongoengine.base.document import *
|
||||||
|
from mongoengine.base.fields import *
|
||||||
|
from mongoengine.base.metaclasses import *
|
||||||
|
|
||||||
|
# Help with backwards compatibility
|
||||||
|
from mongoengine.errors import *
|
||||||
26
mongoengine/base/common.py
Normal file
26
mongoengine/base/common.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
from mongoengine.errors import NotRegistered
|
||||||
|
|
||||||
|
__all__ = ('ALLOW_INHERITANCE', 'get_document', '_document_registry')
|
||||||
|
|
||||||
|
ALLOW_INHERITANCE = False
|
||||||
|
|
||||||
|
_document_registry = {}
|
||||||
|
|
||||||
|
|
||||||
|
def get_document(name):
|
||||||
|
doc = _document_registry.get(name, None)
|
||||||
|
if not doc:
|
||||||
|
# Possible old style name
|
||||||
|
single_end = name.split('.')[-1]
|
||||||
|
compound_end = '.%s' % single_end
|
||||||
|
possible_match = [k for k in _document_registry.keys()
|
||||||
|
if k.endswith(compound_end) or k == single_end]
|
||||||
|
if len(possible_match) == 1:
|
||||||
|
doc = _document_registry.get(possible_match.pop(), None)
|
||||||
|
if not doc:
|
||||||
|
raise NotRegistered("""
|
||||||
|
`%s` has not been registered in the document registry.
|
||||||
|
Importing the document class automatically registers it, has it
|
||||||
|
been imported?
|
||||||
|
""".strip() % name)
|
||||||
|
return doc
|
||||||
142
mongoengine/base/datastructures.py
Normal file
142
mongoengine/base/datastructures.py
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
import weakref
|
||||||
|
from mongoengine.common import _import_class
|
||||||
|
|
||||||
|
__all__ = ("BaseDict", "BaseList")
|
||||||
|
|
||||||
|
|
||||||
|
class BaseDict(dict):
|
||||||
|
"""A special dict so we can watch any changes
|
||||||
|
"""
|
||||||
|
|
||||||
|
_dereferenced = False
|
||||||
|
_instance = None
|
||||||
|
_name = None
|
||||||
|
|
||||||
|
def __init__(self, dict_items, instance, name):
|
||||||
|
self._instance = weakref.proxy(instance)
|
||||||
|
self._name = name
|
||||||
|
return super(BaseDict, self).__init__(dict_items)
|
||||||
|
|
||||||
|
def __getitem__(self, *args, **kwargs):
|
||||||
|
value = super(BaseDict, self).__getitem__(*args, **kwargs)
|
||||||
|
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||||
|
value._instance = self._instance
|
||||||
|
return value
|
||||||
|
|
||||||
|
def __setitem__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).__setitem__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __delete__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).__delete__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __delitem__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).__delitem__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __delattr__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).__delattr__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __getstate__(self):
|
||||||
|
self.instance = None
|
||||||
|
self._dereferenced = False
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __setstate__(self, state):
|
||||||
|
self = state
|
||||||
|
return self
|
||||||
|
|
||||||
|
def clear(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).clear(*args, **kwargs)
|
||||||
|
|
||||||
|
def pop(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).pop(*args, **kwargs)
|
||||||
|
|
||||||
|
def popitem(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).popitem(*args, **kwargs)
|
||||||
|
|
||||||
|
def update(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).update(*args, **kwargs)
|
||||||
|
|
||||||
|
def _mark_as_changed(self):
|
||||||
|
if hasattr(self._instance, '_mark_as_changed'):
|
||||||
|
self._instance._mark_as_changed(self._name)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseList(list):
|
||||||
|
"""A special list so we can watch any changes
|
||||||
|
"""
|
||||||
|
|
||||||
|
_dereferenced = False
|
||||||
|
_instance = None
|
||||||
|
_name = None
|
||||||
|
|
||||||
|
def __init__(self, list_items, instance, name):
|
||||||
|
self._instance = weakref.proxy(instance)
|
||||||
|
self._name = name
|
||||||
|
return super(BaseList, self).__init__(list_items)
|
||||||
|
|
||||||
|
def __getitem__(self, *args, **kwargs):
|
||||||
|
value = super(BaseList, self).__getitem__(*args, **kwargs)
|
||||||
|
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||||
|
value._instance = self._instance
|
||||||
|
return value
|
||||||
|
|
||||||
|
def __setitem__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).__setitem__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __delitem__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).__delitem__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __getstate__(self):
|
||||||
|
self.instance = None
|
||||||
|
self._dereferenced = False
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __setstate__(self, state):
|
||||||
|
self = state
|
||||||
|
return self
|
||||||
|
|
||||||
|
def append(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).append(*args, **kwargs)
|
||||||
|
|
||||||
|
def extend(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).extend(*args, **kwargs)
|
||||||
|
|
||||||
|
def insert(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).insert(*args, **kwargs)
|
||||||
|
|
||||||
|
def pop(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).pop(*args, **kwargs)
|
||||||
|
|
||||||
|
def remove(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).remove(*args, **kwargs)
|
||||||
|
|
||||||
|
def reverse(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).reverse(*args, **kwargs)
|
||||||
|
|
||||||
|
def sort(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).sort(*args, **kwargs)
|
||||||
|
|
||||||
|
def _mark_as_changed(self):
|
||||||
|
if hasattr(self._instance, '_mark_as_changed'):
|
||||||
|
self._instance._mark_as_changed(self._name)
|
||||||
823
mongoengine/base/document.py
Normal file
823
mongoengine/base/document.py
Normal file
@@ -0,0 +1,823 @@
|
|||||||
|
import copy
|
||||||
|
import operator
|
||||||
|
import numbers
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
import pymongo
|
||||||
|
from bson import json_util
|
||||||
|
from bson.dbref import DBRef
|
||||||
|
from bson.son import SON
|
||||||
|
|
||||||
|
from mongoengine import signals
|
||||||
|
from mongoengine.common import _import_class
|
||||||
|
from mongoengine.errors import (ValidationError, InvalidDocumentError,
|
||||||
|
LookUpError)
|
||||||
|
from mongoengine.python_support import (PY3, UNICODE_KWARGS, txt_type,
|
||||||
|
to_str_keys_recursive)
|
||||||
|
|
||||||
|
from mongoengine.base.common import get_document, ALLOW_INHERITANCE
|
||||||
|
from mongoengine.base.datastructures import BaseDict, BaseList
|
||||||
|
from mongoengine.base.fields import ComplexBaseField
|
||||||
|
|
||||||
|
__all__ = ('BaseDocument', 'NON_FIELD_ERRORS')
|
||||||
|
|
||||||
|
NON_FIELD_ERRORS = '__all__'
|
||||||
|
|
||||||
|
|
||||||
|
class BaseDocument(object):
|
||||||
|
|
||||||
|
_dynamic = False
|
||||||
|
_created = True
|
||||||
|
_dynamic_lock = True
|
||||||
|
_initialised = False
|
||||||
|
|
||||||
|
def __init__(self, *args, **values):
|
||||||
|
"""
|
||||||
|
Initialise a document or embedded document
|
||||||
|
|
||||||
|
:param __auto_convert: Try and will cast python objects to Object types
|
||||||
|
:param values: A dictionary of values for the document
|
||||||
|
"""
|
||||||
|
if args:
|
||||||
|
# Combine positional arguments with named arguments.
|
||||||
|
# We only want named arguments.
|
||||||
|
field = iter(self._fields_ordered)
|
||||||
|
for value in args:
|
||||||
|
name = next(field)
|
||||||
|
if name in values:
|
||||||
|
raise TypeError("Multiple values for keyword argument '" + name + "'")
|
||||||
|
values[name] = value
|
||||||
|
__auto_convert = values.pop("__auto_convert", True)
|
||||||
|
signals.pre_init.send(self.__class__, document=self, values=values)
|
||||||
|
|
||||||
|
self._data = {}
|
||||||
|
|
||||||
|
# Assign default values to instance
|
||||||
|
for key, field in self._fields.iteritems():
|
||||||
|
if self._db_field_map.get(key, key) in values:
|
||||||
|
continue
|
||||||
|
value = getattr(self, key, None)
|
||||||
|
setattr(self, key, value)
|
||||||
|
|
||||||
|
# Set passed values after initialisation
|
||||||
|
if self._dynamic:
|
||||||
|
self._dynamic_fields = {}
|
||||||
|
dynamic_data = {}
|
||||||
|
for key, value in values.iteritems():
|
||||||
|
if key in self._fields or key == '_id':
|
||||||
|
setattr(self, key, value)
|
||||||
|
elif self._dynamic:
|
||||||
|
dynamic_data[key] = value
|
||||||
|
else:
|
||||||
|
FileField = _import_class('FileField')
|
||||||
|
for key, value in values.iteritems():
|
||||||
|
if key == '__auto_convert':
|
||||||
|
continue
|
||||||
|
key = self._reverse_db_field_map.get(key, key)
|
||||||
|
if key in self._fields or key in ('id', 'pk', '_cls'):
|
||||||
|
if __auto_convert and value is not None:
|
||||||
|
field = self._fields.get(key)
|
||||||
|
if field and not isinstance(field, FileField):
|
||||||
|
value = field.to_python(value)
|
||||||
|
setattr(self, key, value)
|
||||||
|
else:
|
||||||
|
self._data[key] = value
|
||||||
|
|
||||||
|
# Set any get_fieldname_display methods
|
||||||
|
self.__set_field_display()
|
||||||
|
|
||||||
|
if self._dynamic:
|
||||||
|
self._dynamic_lock = False
|
||||||
|
for key, value in dynamic_data.iteritems():
|
||||||
|
setattr(self, key, value)
|
||||||
|
|
||||||
|
# Flag initialised
|
||||||
|
self._initialised = True
|
||||||
|
signals.post_init.send(self.__class__, document=self)
|
||||||
|
|
||||||
|
def __delattr__(self, *args, **kwargs):
|
||||||
|
"""Handle deletions of fields"""
|
||||||
|
field_name = args[0]
|
||||||
|
if field_name in self._fields:
|
||||||
|
default = self._fields[field_name].default
|
||||||
|
if callable(default):
|
||||||
|
default = default()
|
||||||
|
setattr(self, field_name, default)
|
||||||
|
else:
|
||||||
|
super(BaseDocument, self).__delattr__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __setattr__(self, name, value):
|
||||||
|
# Handle dynamic data only if an initialised dynamic document
|
||||||
|
if self._dynamic and not self._dynamic_lock:
|
||||||
|
|
||||||
|
field = None
|
||||||
|
if not hasattr(self, name) and not name.startswith('_'):
|
||||||
|
DynamicField = _import_class("DynamicField")
|
||||||
|
field = DynamicField(db_field=name)
|
||||||
|
field.name = name
|
||||||
|
self._dynamic_fields[name] = field
|
||||||
|
|
||||||
|
if not name.startswith('_'):
|
||||||
|
value = self.__expand_dynamic_values(name, value)
|
||||||
|
|
||||||
|
# Handle marking data as changed
|
||||||
|
if name in self._dynamic_fields:
|
||||||
|
self._data[name] = value
|
||||||
|
if hasattr(self, '_changed_fields'):
|
||||||
|
self._mark_as_changed(name)
|
||||||
|
|
||||||
|
if (self._is_document and not self._created and
|
||||||
|
name in self._meta.get('shard_key', tuple()) and
|
||||||
|
self._data.get(name) != value):
|
||||||
|
OperationError = _import_class('OperationError')
|
||||||
|
msg = "Shard Keys are immutable. Tried to update %s" % name
|
||||||
|
raise OperationError(msg)
|
||||||
|
|
||||||
|
# Check if the user has created a new instance of a class
|
||||||
|
if (self._is_document and self._initialised
|
||||||
|
and self._created and name == self._meta['id_field']):
|
||||||
|
super(BaseDocument, self).__setattr__('_created', False)
|
||||||
|
|
||||||
|
super(BaseDocument, self).__setattr__(name, value)
|
||||||
|
|
||||||
|
def __getstate__(self):
|
||||||
|
data = {}
|
||||||
|
for k in ('_changed_fields', '_initialised', '_created'):
|
||||||
|
if hasattr(self, k):
|
||||||
|
data[k] = getattr(self, k)
|
||||||
|
data['_data'] = self.to_mongo()
|
||||||
|
return data
|
||||||
|
|
||||||
|
def __setstate__(self, data):
|
||||||
|
if isinstance(data["_data"], SON):
|
||||||
|
data["_data"] = self.__class__._from_son(data["_data"])._data
|
||||||
|
for k in ('_changed_fields', '_initialised', '_created', '_data'):
|
||||||
|
setattr(self, k, data[k])
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
if 'id' in self._fields and 'id' not in self._fields_ordered:
|
||||||
|
return iter(('id', ) + self._fields_ordered)
|
||||||
|
|
||||||
|
return iter(self._fields_ordered)
|
||||||
|
|
||||||
|
def __getitem__(self, name):
|
||||||
|
"""Dictionary-style field access, return a field's value if present.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if name in self._fields:
|
||||||
|
return getattr(self, name)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
raise KeyError(name)
|
||||||
|
|
||||||
|
def __setitem__(self, name, value):
|
||||||
|
"""Dictionary-style field access, set a field's value.
|
||||||
|
"""
|
||||||
|
# Ensure that the field exists before settings its value
|
||||||
|
if name not in self._fields:
|
||||||
|
raise KeyError(name)
|
||||||
|
return setattr(self, name, value)
|
||||||
|
|
||||||
|
def __contains__(self, name):
|
||||||
|
try:
|
||||||
|
val = getattr(self, name)
|
||||||
|
return val is not None
|
||||||
|
except AttributeError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._data)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
try:
|
||||||
|
u = self.__str__()
|
||||||
|
except (UnicodeEncodeError, UnicodeDecodeError):
|
||||||
|
u = '[Bad Unicode data]'
|
||||||
|
repr_type = type(u)
|
||||||
|
return repr_type('<%s: %s>' % (self.__class__.__name__, u))
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
if hasattr(self, '__unicode__'):
|
||||||
|
if PY3:
|
||||||
|
return self.__unicode__()
|
||||||
|
else:
|
||||||
|
return unicode(self).encode('utf-8')
|
||||||
|
return txt_type('%s object' % self.__class__.__name__)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, self.__class__) and hasattr(other, 'id'):
|
||||||
|
if self.id == other.id:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self.__eq__(other)
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
if self.pk is None:
|
||||||
|
# For new object
|
||||||
|
return super(BaseDocument, self).__hash__()
|
||||||
|
else:
|
||||||
|
return hash(self.pk)
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
"""
|
||||||
|
Hook for doing document level data cleaning before validation is run.
|
||||||
|
|
||||||
|
Any ValidationError raised by this method will not be associated with
|
||||||
|
a particular field; it will have a special-case association with the
|
||||||
|
field defined by NON_FIELD_ERRORS.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def to_mongo(self):
|
||||||
|
"""Return as SON data ready for use with MongoDB.
|
||||||
|
"""
|
||||||
|
data = SON()
|
||||||
|
data["_id"] = None
|
||||||
|
data['_cls'] = self._class_name
|
||||||
|
|
||||||
|
for field_name in self:
|
||||||
|
value = self._data.get(field_name, None)
|
||||||
|
field = self._fields.get(field_name)
|
||||||
|
|
||||||
|
if value is not None:
|
||||||
|
value = field.to_mongo(value)
|
||||||
|
|
||||||
|
# Handle self generating fields
|
||||||
|
if value is None and field._auto_gen:
|
||||||
|
value = field.generate()
|
||||||
|
self._data[field_name] = value
|
||||||
|
|
||||||
|
if value is not None:
|
||||||
|
data[field.db_field] = value
|
||||||
|
|
||||||
|
# If "_id" has not been set, then try and set it
|
||||||
|
if data["_id"] is None:
|
||||||
|
data["_id"] = self._data.get("id", None)
|
||||||
|
|
||||||
|
if data['_id'] is None:
|
||||||
|
data.pop('_id')
|
||||||
|
|
||||||
|
# Only add _cls if allow_inheritance is True
|
||||||
|
if (not hasattr(self, '_meta') or
|
||||||
|
not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)):
|
||||||
|
data.pop('_cls')
|
||||||
|
|
||||||
|
if not self._dynamic:
|
||||||
|
return data
|
||||||
|
|
||||||
|
# Sort dynamic fields by key
|
||||||
|
dynamic_fields = sorted(self._dynamic_fields.iteritems(),
|
||||||
|
key=operator.itemgetter(0))
|
||||||
|
for name, field in dynamic_fields:
|
||||||
|
data[name] = field.to_mongo(self._data.get(name, None))
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
def validate(self, clean=True):
|
||||||
|
"""Ensure that all fields' values are valid and that required fields
|
||||||
|
are present.
|
||||||
|
"""
|
||||||
|
# Ensure that each field is matched to a valid value
|
||||||
|
errors = {}
|
||||||
|
if clean:
|
||||||
|
try:
|
||||||
|
self.clean()
|
||||||
|
except ValidationError, error:
|
||||||
|
errors[NON_FIELD_ERRORS] = error
|
||||||
|
|
||||||
|
# Get a list of tuples of field names and their current values
|
||||||
|
fields = [(field, self._data.get(name))
|
||||||
|
for name, field in self._fields.items()]
|
||||||
|
if self._dynamic:
|
||||||
|
fields += [(field, self._data.get(name))
|
||||||
|
for name, field in self._dynamic_fields.items()]
|
||||||
|
|
||||||
|
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
|
||||||
|
GenericEmbeddedDocumentField = _import_class("GenericEmbeddedDocumentField")
|
||||||
|
|
||||||
|
for field, value in fields:
|
||||||
|
if value is not None:
|
||||||
|
try:
|
||||||
|
if isinstance(field, (EmbeddedDocumentField,
|
||||||
|
GenericEmbeddedDocumentField)):
|
||||||
|
field._validate(value, clean=clean)
|
||||||
|
else:
|
||||||
|
field._validate(value)
|
||||||
|
except ValidationError, error:
|
||||||
|
errors[field.name] = error.errors or error
|
||||||
|
except (ValueError, AttributeError, AssertionError), error:
|
||||||
|
errors[field.name] = error
|
||||||
|
elif field.required and not getattr(field, '_auto_gen', False):
|
||||||
|
errors[field.name] = ValidationError('Field is required',
|
||||||
|
field_name=field.name)
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
pk = "None"
|
||||||
|
if hasattr(self, 'pk'):
|
||||||
|
pk = self.pk
|
||||||
|
elif self._instance:
|
||||||
|
pk = self._instance.pk
|
||||||
|
message = "ValidationError (%s:%s) " % (self._class_name, pk)
|
||||||
|
raise ValidationError(message, errors=errors)
|
||||||
|
|
||||||
|
def to_json(self):
|
||||||
|
"""Converts a document to JSON"""
|
||||||
|
return json_util.dumps(self.to_mongo())
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_json(cls, json_data):
|
||||||
|
"""Converts json data to an unsaved document instance"""
|
||||||
|
return cls._from_son(json_util.loads(json_data))
|
||||||
|
|
||||||
|
def __expand_dynamic_values(self, name, value):
|
||||||
|
"""expand any dynamic values to their correct types / values"""
|
||||||
|
if not isinstance(value, (dict, list, tuple)):
|
||||||
|
return value
|
||||||
|
|
||||||
|
is_list = False
|
||||||
|
if not hasattr(value, 'items'):
|
||||||
|
is_list = True
|
||||||
|
value = dict([(k, v) for k, v in enumerate(value)])
|
||||||
|
|
||||||
|
if not is_list and '_cls' in value:
|
||||||
|
cls = get_document(value['_cls'])
|
||||||
|
return cls(**value)
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
for k, v in value.items():
|
||||||
|
key = name if is_list else k
|
||||||
|
data[k] = self.__expand_dynamic_values(key, v)
|
||||||
|
|
||||||
|
if is_list: # Convert back to a list
|
||||||
|
data_items = sorted(data.items(), key=operator.itemgetter(0))
|
||||||
|
value = [v for k, v in data_items]
|
||||||
|
else:
|
||||||
|
value = data
|
||||||
|
|
||||||
|
# Convert lists / values so we can watch for any changes on them
|
||||||
|
if (isinstance(value, (list, tuple)) and
|
||||||
|
not isinstance(value, BaseList)):
|
||||||
|
value = BaseList(value, self, name)
|
||||||
|
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||||
|
value = BaseDict(value, self, name)
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
def _mark_as_changed(self, key):
|
||||||
|
"""Marks a key as explicitly changed by the user
|
||||||
|
"""
|
||||||
|
if not key:
|
||||||
|
return
|
||||||
|
key = self._db_field_map.get(key, key)
|
||||||
|
if (hasattr(self, '_changed_fields') and
|
||||||
|
key not in self._changed_fields):
|
||||||
|
self._changed_fields.append(key)
|
||||||
|
|
||||||
|
def _clear_changed_fields(self):
|
||||||
|
self._changed_fields = []
|
||||||
|
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
|
||||||
|
for field_name, field in self._fields.iteritems():
|
||||||
|
if (isinstance(field, ComplexBaseField) and
|
||||||
|
isinstance(field.field, EmbeddedDocumentField)):
|
||||||
|
field_value = getattr(self, field_name, None)
|
||||||
|
if field_value:
|
||||||
|
for idx in (field_value if isinstance(field_value, dict)
|
||||||
|
else xrange(len(field_value))):
|
||||||
|
field_value[idx]._clear_changed_fields()
|
||||||
|
elif isinstance(field, EmbeddedDocumentField):
|
||||||
|
field_value = getattr(self, field_name, None)
|
||||||
|
if field_value:
|
||||||
|
field_value._clear_changed_fields()
|
||||||
|
|
||||||
|
def _get_changed_fields(self, key='', inspected=None):
|
||||||
|
"""Returns a list of all fields that have explicitly been changed.
|
||||||
|
"""
|
||||||
|
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||||
|
DynamicEmbeddedDocument = _import_class("DynamicEmbeddedDocument")
|
||||||
|
_changed_fields = []
|
||||||
|
_changed_fields += getattr(self, '_changed_fields', [])
|
||||||
|
|
||||||
|
inspected = inspected or set()
|
||||||
|
if hasattr(self, 'id'):
|
||||||
|
if self.id in inspected:
|
||||||
|
return _changed_fields
|
||||||
|
inspected.add(self.id)
|
||||||
|
|
||||||
|
field_list = self._fields.copy()
|
||||||
|
if self._dynamic:
|
||||||
|
field_list.update(self._dynamic_fields)
|
||||||
|
|
||||||
|
for field_name in field_list:
|
||||||
|
|
||||||
|
db_field_name = self._db_field_map.get(field_name, field_name)
|
||||||
|
key = '%s.' % db_field_name
|
||||||
|
field = self._data.get(field_name, None)
|
||||||
|
if hasattr(field, 'id'):
|
||||||
|
if field.id in inspected:
|
||||||
|
continue
|
||||||
|
inspected.add(field.id)
|
||||||
|
|
||||||
|
if (isinstance(field, (EmbeddedDocument, DynamicEmbeddedDocument))
|
||||||
|
and db_field_name not in _changed_fields):
|
||||||
|
# Find all embedded fields that have been changed
|
||||||
|
changed = field._get_changed_fields(key, inspected)
|
||||||
|
_changed_fields += ["%s%s" % (key, k) for k in changed if k]
|
||||||
|
elif (isinstance(field, (list, tuple, dict)) and
|
||||||
|
db_field_name not in _changed_fields):
|
||||||
|
# Loop list / dict fields as they contain documents
|
||||||
|
# Determine the iterator to use
|
||||||
|
if not hasattr(field, 'items'):
|
||||||
|
iterator = enumerate(field)
|
||||||
|
else:
|
||||||
|
iterator = field.iteritems()
|
||||||
|
for index, value in iterator:
|
||||||
|
if not hasattr(value, '_get_changed_fields'):
|
||||||
|
continue
|
||||||
|
list_key = "%s%s." % (key, index)
|
||||||
|
changed = value._get_changed_fields(list_key, inspected)
|
||||||
|
_changed_fields += ["%s%s" % (list_key, k)
|
||||||
|
for k in changed if k]
|
||||||
|
return _changed_fields
|
||||||
|
|
||||||
|
def _delta(self):
|
||||||
|
"""Returns the delta (set, unset) of the changes for a document.
|
||||||
|
Gets any values that have been explicitly changed.
|
||||||
|
"""
|
||||||
|
# Handles cases where not loaded from_son but has _id
|
||||||
|
doc = self.to_mongo()
|
||||||
|
|
||||||
|
set_fields = self._get_changed_fields()
|
||||||
|
set_data = {}
|
||||||
|
unset_data = {}
|
||||||
|
parts = []
|
||||||
|
if hasattr(self, '_changed_fields'):
|
||||||
|
set_data = {}
|
||||||
|
# Fetch each set item from its path
|
||||||
|
for path in set_fields:
|
||||||
|
parts = path.split('.')
|
||||||
|
d = doc
|
||||||
|
new_path = []
|
||||||
|
for p in parts:
|
||||||
|
if isinstance(d, DBRef):
|
||||||
|
break
|
||||||
|
elif isinstance(d, list) and p.isdigit():
|
||||||
|
d = d[int(p)]
|
||||||
|
elif hasattr(d, 'get'):
|
||||||
|
d = d.get(p)
|
||||||
|
new_path.append(p)
|
||||||
|
path = '.'.join(new_path)
|
||||||
|
set_data[path] = d
|
||||||
|
else:
|
||||||
|
set_data = doc
|
||||||
|
if '_id' in set_data:
|
||||||
|
del(set_data['_id'])
|
||||||
|
|
||||||
|
# Determine if any changed items were actually unset.
|
||||||
|
for path, value in set_data.items():
|
||||||
|
if value or isinstance(value, (numbers.Number, bool)):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If we've set a value that ain't the default value dont unset it.
|
||||||
|
default = None
|
||||||
|
if (self._dynamic and len(parts) and parts[0] in
|
||||||
|
self._dynamic_fields):
|
||||||
|
del(set_data[path])
|
||||||
|
unset_data[path] = 1
|
||||||
|
continue
|
||||||
|
elif path in self._fields:
|
||||||
|
default = self._fields[path].default
|
||||||
|
else: # Perform a full lookup for lists / embedded lookups
|
||||||
|
d = self
|
||||||
|
parts = path.split('.')
|
||||||
|
db_field_name = parts.pop()
|
||||||
|
for p in parts:
|
||||||
|
if isinstance(d, list) and p.isdigit():
|
||||||
|
d = d[int(p)]
|
||||||
|
elif (hasattr(d, '__getattribute__') and
|
||||||
|
not isinstance(d, dict)):
|
||||||
|
real_path = d._reverse_db_field_map.get(p, p)
|
||||||
|
d = getattr(d, real_path)
|
||||||
|
else:
|
||||||
|
d = d.get(p)
|
||||||
|
|
||||||
|
if hasattr(d, '_fields'):
|
||||||
|
field_name = d._reverse_db_field_map.get(db_field_name,
|
||||||
|
db_field_name)
|
||||||
|
if field_name in d._fields:
|
||||||
|
default = d._fields.get(field_name).default
|
||||||
|
else:
|
||||||
|
default = None
|
||||||
|
|
||||||
|
if default is not None:
|
||||||
|
if callable(default):
|
||||||
|
default = default()
|
||||||
|
|
||||||
|
if default != value:
|
||||||
|
continue
|
||||||
|
|
||||||
|
del(set_data[path])
|
||||||
|
unset_data[path] = 1
|
||||||
|
return set_data, unset_data
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_collection_name(cls):
|
||||||
|
"""Returns the collection name for this class.
|
||||||
|
"""
|
||||||
|
return cls._meta.get('collection', None)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _from_son(cls, son, _auto_dereference=True):
|
||||||
|
"""Create an instance of a Document (subclass) from a PyMongo SON.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# get the class name from the document, falling back to the given
|
||||||
|
# class if unavailable
|
||||||
|
class_name = son.get('_cls', cls._class_name)
|
||||||
|
data = dict(("%s" % key, value) for key, value in son.iteritems())
|
||||||
|
if not UNICODE_KWARGS:
|
||||||
|
# python 2.6.4 and lower cannot handle unicode keys
|
||||||
|
# passed to class constructor example: cls(**data)
|
||||||
|
to_str_keys_recursive(data)
|
||||||
|
|
||||||
|
# Return correct subclass for document type
|
||||||
|
if class_name != cls._class_name:
|
||||||
|
cls = get_document(class_name)
|
||||||
|
|
||||||
|
changed_fields = []
|
||||||
|
errors_dict = {}
|
||||||
|
|
||||||
|
fields = cls._fields
|
||||||
|
if not _auto_dereference:
|
||||||
|
fields = copy.copy(fields)
|
||||||
|
|
||||||
|
for field_name, field in fields.iteritems():
|
||||||
|
field._auto_dereference = _auto_dereference
|
||||||
|
if field.db_field in data:
|
||||||
|
value = data[field.db_field]
|
||||||
|
try:
|
||||||
|
data[field_name] = (value if value is None
|
||||||
|
else field.to_python(value))
|
||||||
|
if field_name != field.db_field:
|
||||||
|
del data[field.db_field]
|
||||||
|
except (AttributeError, ValueError), e:
|
||||||
|
errors_dict[field_name] = e
|
||||||
|
elif field.default:
|
||||||
|
default = field.default
|
||||||
|
if callable(default):
|
||||||
|
default = default()
|
||||||
|
if isinstance(default, BaseDocument):
|
||||||
|
changed_fields.append(field_name)
|
||||||
|
|
||||||
|
if errors_dict:
|
||||||
|
errors = "\n".join(["%s - %s" % (k, v)
|
||||||
|
for k, v in errors_dict.items()])
|
||||||
|
msg = ("Invalid data to create a `%s` instance.\n%s"
|
||||||
|
% (cls._class_name, errors))
|
||||||
|
raise InvalidDocumentError(msg)
|
||||||
|
|
||||||
|
obj = cls(__auto_convert=False, **data)
|
||||||
|
obj._changed_fields = changed_fields
|
||||||
|
obj._created = False
|
||||||
|
if not _auto_dereference:
|
||||||
|
obj._fields = fields
|
||||||
|
return obj
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _build_index_specs(cls, meta_indexes):
|
||||||
|
"""Generate and merge the full index specs
|
||||||
|
"""
|
||||||
|
|
||||||
|
geo_indices = cls._geo_indices()
|
||||||
|
unique_indices = cls._unique_with_indexes()
|
||||||
|
index_specs = [cls._build_index_spec(spec)
|
||||||
|
for spec in meta_indexes]
|
||||||
|
|
||||||
|
def merge_index_specs(index_specs, indices):
|
||||||
|
if not indices:
|
||||||
|
return index_specs
|
||||||
|
|
||||||
|
spec_fields = [v['fields']
|
||||||
|
for k, v in enumerate(index_specs)]
|
||||||
|
# Merge unqiue_indexes with existing specs
|
||||||
|
for k, v in enumerate(indices):
|
||||||
|
if v['fields'] in spec_fields:
|
||||||
|
index_specs[spec_fields.index(v['fields'])].update(v)
|
||||||
|
else:
|
||||||
|
index_specs.append(v)
|
||||||
|
return index_specs
|
||||||
|
|
||||||
|
index_specs = merge_index_specs(index_specs, geo_indices)
|
||||||
|
index_specs = merge_index_specs(index_specs, unique_indices)
|
||||||
|
return index_specs
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _build_index_spec(cls, spec):
|
||||||
|
"""Build a PyMongo index spec from a MongoEngine index spec.
|
||||||
|
"""
|
||||||
|
if isinstance(spec, basestring):
|
||||||
|
spec = {'fields': [spec]}
|
||||||
|
elif isinstance(spec, (list, tuple)):
|
||||||
|
spec = {'fields': list(spec)}
|
||||||
|
elif isinstance(spec, dict):
|
||||||
|
spec = dict(spec)
|
||||||
|
|
||||||
|
index_list = []
|
||||||
|
direction = None
|
||||||
|
|
||||||
|
# Check to see if we need to include _cls
|
||||||
|
allow_inheritance = cls._meta.get('allow_inheritance',
|
||||||
|
ALLOW_INHERITANCE)
|
||||||
|
include_cls = allow_inheritance and not spec.get('sparse', False)
|
||||||
|
|
||||||
|
for key in spec['fields']:
|
||||||
|
# If inherited spec continue
|
||||||
|
if isinstance(key, (list, tuple)):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# ASCENDING from +,
|
||||||
|
# DESCENDING from -
|
||||||
|
# GEO2D from *
|
||||||
|
direction = pymongo.ASCENDING
|
||||||
|
if key.startswith("-"):
|
||||||
|
direction = pymongo.DESCENDING
|
||||||
|
elif key.startswith("*"):
|
||||||
|
direction = pymongo.GEO2D
|
||||||
|
if key.startswith(("+", "-", "*")):
|
||||||
|
key = key[1:]
|
||||||
|
|
||||||
|
# Use real field name, do it manually because we need field
|
||||||
|
# objects for the next part (list field checking)
|
||||||
|
parts = key.split('.')
|
||||||
|
if parts in (['pk'], ['id'], ['_id']):
|
||||||
|
key = '_id'
|
||||||
|
fields = []
|
||||||
|
else:
|
||||||
|
fields = cls._lookup_field(parts)
|
||||||
|
parts = [field if field == '_id' else field.db_field
|
||||||
|
for field in fields]
|
||||||
|
key = '.'.join(parts)
|
||||||
|
index_list.append((key, direction))
|
||||||
|
|
||||||
|
# Don't add cls to a geo index
|
||||||
|
if include_cls and direction is not pymongo.GEO2D:
|
||||||
|
index_list.insert(0, ('_cls', 1))
|
||||||
|
|
||||||
|
if index_list:
|
||||||
|
spec['fields'] = index_list
|
||||||
|
if spec.get('sparse', False) and len(spec['fields']) > 1:
|
||||||
|
raise ValueError(
|
||||||
|
'Sparse indexes can only have one field in them. '
|
||||||
|
'See https://jira.mongodb.org/browse/SERVER-2193')
|
||||||
|
|
||||||
|
return spec
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _unique_with_indexes(cls, namespace=""):
|
||||||
|
"""
|
||||||
|
Find and set unique indexes
|
||||||
|
"""
|
||||||
|
unique_indexes = []
|
||||||
|
for field_name, field in cls._fields.items():
|
||||||
|
sparse = False
|
||||||
|
# Generate a list of indexes needed by uniqueness constraints
|
||||||
|
if field.unique:
|
||||||
|
field.required = True
|
||||||
|
unique_fields = [field.db_field]
|
||||||
|
|
||||||
|
# Add any unique_with fields to the back of the index spec
|
||||||
|
if field.unique_with:
|
||||||
|
if isinstance(field.unique_with, basestring):
|
||||||
|
field.unique_with = [field.unique_with]
|
||||||
|
|
||||||
|
# Convert unique_with field names to real field names
|
||||||
|
unique_with = []
|
||||||
|
for other_name in field.unique_with:
|
||||||
|
parts = other_name.split('.')
|
||||||
|
# Lookup real name
|
||||||
|
parts = cls._lookup_field(parts)
|
||||||
|
name_parts = [part.db_field for part in parts]
|
||||||
|
unique_with.append('.'.join(name_parts))
|
||||||
|
# Unique field should be required
|
||||||
|
parts[-1].required = True
|
||||||
|
sparse = (not sparse and
|
||||||
|
parts[-1].name not in cls.__dict__)
|
||||||
|
unique_fields += unique_with
|
||||||
|
|
||||||
|
# Add the new index to the list
|
||||||
|
fields = [("%s%s" % (namespace, f), pymongo.ASCENDING)
|
||||||
|
for f in unique_fields]
|
||||||
|
index = {'fields': fields, 'unique': True, 'sparse': sparse}
|
||||||
|
unique_indexes.append(index)
|
||||||
|
|
||||||
|
# Grab any embedded document field unique indexes
|
||||||
|
if (field.__class__.__name__ == "EmbeddedDocumentField" and
|
||||||
|
field.document_type != cls):
|
||||||
|
field_namespace = "%s." % field_name
|
||||||
|
doc_cls = field.document_type
|
||||||
|
unique_indexes += doc_cls._unique_with_indexes(field_namespace)
|
||||||
|
|
||||||
|
return unique_indexes
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _geo_indices(cls, inspected=None, parent_field=None):
|
||||||
|
inspected = inspected or []
|
||||||
|
geo_indices = []
|
||||||
|
inspected.append(cls)
|
||||||
|
|
||||||
|
geo_field_type_names = ["EmbeddedDocumentField", "GeoPointField",
|
||||||
|
"PointField", "LineStringField", "PolygonField"]
|
||||||
|
|
||||||
|
geo_field_types = tuple([_import_class(field) for field in geo_field_type_names])
|
||||||
|
|
||||||
|
for field in cls._fields.values():
|
||||||
|
if not isinstance(field, geo_field_types):
|
||||||
|
continue
|
||||||
|
if hasattr(field, 'document_type'):
|
||||||
|
field_cls = field.document_type
|
||||||
|
if field_cls in inspected:
|
||||||
|
continue
|
||||||
|
if hasattr(field_cls, '_geo_indices'):
|
||||||
|
geo_indices += field_cls._geo_indices(inspected, parent_field=field.db_field)
|
||||||
|
elif field._geo_index:
|
||||||
|
field_name = field.db_field
|
||||||
|
if parent_field:
|
||||||
|
field_name = "%s.%s" % (parent_field, field_name)
|
||||||
|
geo_indices.append({'fields':
|
||||||
|
[(field_name, field._geo_index)]})
|
||||||
|
return geo_indices
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _lookup_field(cls, parts):
|
||||||
|
"""Lookup a field based on its attribute and return a list containing
|
||||||
|
the field's parents and the field.
|
||||||
|
"""
|
||||||
|
if not isinstance(parts, (list, tuple)):
|
||||||
|
parts = [parts]
|
||||||
|
fields = []
|
||||||
|
field = None
|
||||||
|
|
||||||
|
for field_name in parts:
|
||||||
|
# Handle ListField indexing:
|
||||||
|
if field_name.isdigit():
|
||||||
|
new_field = field.field
|
||||||
|
fields.append(field_name)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if field is None:
|
||||||
|
# Look up first field from the document
|
||||||
|
if field_name == 'pk':
|
||||||
|
# Deal with "primary key" alias
|
||||||
|
field_name = cls._meta['id_field']
|
||||||
|
if field_name in cls._fields:
|
||||||
|
field = cls._fields[field_name]
|
||||||
|
elif cls._dynamic:
|
||||||
|
DynamicField = _import_class('DynamicField')
|
||||||
|
field = DynamicField(db_field=field_name)
|
||||||
|
else:
|
||||||
|
raise LookUpError('Cannot resolve field "%s"'
|
||||||
|
% field_name)
|
||||||
|
else:
|
||||||
|
ReferenceField = _import_class('ReferenceField')
|
||||||
|
GenericReferenceField = _import_class('GenericReferenceField')
|
||||||
|
if isinstance(field, (ReferenceField, GenericReferenceField)):
|
||||||
|
raise LookUpError('Cannot perform join in mongoDB: %s' %
|
||||||
|
'__'.join(parts))
|
||||||
|
if hasattr(getattr(field, 'field', None), 'lookup_member'):
|
||||||
|
new_field = field.field.lookup_member(field_name)
|
||||||
|
else:
|
||||||
|
# Look up subfield on the previous field
|
||||||
|
new_field = field.lookup_member(field_name)
|
||||||
|
if not new_field and isinstance(field, ComplexBaseField):
|
||||||
|
fields.append(field_name)
|
||||||
|
continue
|
||||||
|
elif not new_field:
|
||||||
|
raise LookUpError('Cannot resolve field "%s"'
|
||||||
|
% field_name)
|
||||||
|
field = new_field # update field to the new field type
|
||||||
|
fields.append(field)
|
||||||
|
return fields
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _translate_field_name(cls, field, sep='.'):
|
||||||
|
"""Translate a field attribute name to a database field name.
|
||||||
|
"""
|
||||||
|
parts = field.split(sep)
|
||||||
|
parts = [f.db_field for f in cls._lookup_field(parts)]
|
||||||
|
return '.'.join(parts)
|
||||||
|
|
||||||
|
def __set_field_display(self):
|
||||||
|
"""Dynamically set the display value for a field with choices"""
|
||||||
|
for attr_name, field in self._fields.items():
|
||||||
|
if field.choices:
|
||||||
|
setattr(self,
|
||||||
|
'get_%s_display' % attr_name,
|
||||||
|
partial(self.__get_field_display, field=field))
|
||||||
|
|
||||||
|
def __get_field_display(self, field):
|
||||||
|
"""Returns the display value for a choice field"""
|
||||||
|
value = getattr(self, field.name)
|
||||||
|
if field.choices and isinstance(field.choices[0], (list, tuple)):
|
||||||
|
return dict(field.choices).get(value, value)
|
||||||
|
return value
|
||||||
496
mongoengine/base/fields.py
Normal file
496
mongoengine/base/fields.py
Normal file
@@ -0,0 +1,496 @@
|
|||||||
|
import operator
|
||||||
|
import warnings
|
||||||
|
import weakref
|
||||||
|
|
||||||
|
from bson import DBRef, ObjectId, SON
|
||||||
|
import pymongo
|
||||||
|
|
||||||
|
from mongoengine.common import _import_class
|
||||||
|
from mongoengine.errors import ValidationError
|
||||||
|
|
||||||
|
from mongoengine.base.common import ALLOW_INHERITANCE
|
||||||
|
from mongoengine.base.datastructures import BaseDict, BaseList
|
||||||
|
|
||||||
|
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField")
|
||||||
|
|
||||||
|
|
||||||
|
class BaseField(object):
|
||||||
|
"""A base class for fields in a MongoDB document. Instances of this class
|
||||||
|
may be added to subclasses of `Document` to define a document's schema.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.5 - added verbose and help text
|
||||||
|
"""
|
||||||
|
|
||||||
|
name = None
|
||||||
|
_geo_index = False
|
||||||
|
_auto_gen = False # Call `generate` to generate a value
|
||||||
|
_auto_dereference = True
|
||||||
|
|
||||||
|
# These track each time a Field instance is created. Used to retain order.
|
||||||
|
# The auto_creation_counter is used for fields that MongoEngine implicitly
|
||||||
|
# creates, creation_counter is used for all user-specified fields.
|
||||||
|
creation_counter = 0
|
||||||
|
auto_creation_counter = -1
|
||||||
|
|
||||||
|
def __init__(self, db_field=None, name=None, required=False, default=None,
|
||||||
|
unique=False, unique_with=None, primary_key=False,
|
||||||
|
validation=None, choices=None, verbose_name=None,
|
||||||
|
help_text=None):
|
||||||
|
self.db_field = (db_field or name) if not primary_key else '_id'
|
||||||
|
if name:
|
||||||
|
msg = "Fields' 'name' attribute deprecated in favour of 'db_field'"
|
||||||
|
warnings.warn(msg, DeprecationWarning)
|
||||||
|
self.required = required or primary_key
|
||||||
|
self.default = default
|
||||||
|
self.unique = bool(unique or unique_with)
|
||||||
|
self.unique_with = unique_with
|
||||||
|
self.primary_key = primary_key
|
||||||
|
self.validation = validation
|
||||||
|
self.choices = choices
|
||||||
|
self.verbose_name = verbose_name
|
||||||
|
self.help_text = help_text
|
||||||
|
|
||||||
|
# Adjust the appropriate creation counter, and save our local copy.
|
||||||
|
if self.db_field == '_id':
|
||||||
|
self.creation_counter = BaseField.auto_creation_counter
|
||||||
|
BaseField.auto_creation_counter -= 1
|
||||||
|
else:
|
||||||
|
self.creation_counter = BaseField.creation_counter
|
||||||
|
BaseField.creation_counter += 1
|
||||||
|
|
||||||
|
def __get__(self, instance, owner):
|
||||||
|
"""Descriptor for retrieving a value from a field in a document. Do
|
||||||
|
any necessary conversion between Python and MongoDB types.
|
||||||
|
"""
|
||||||
|
if instance is None:
|
||||||
|
# Document class being used rather than a document object
|
||||||
|
return self
|
||||||
|
# Get value from document instance if available, if not use default
|
||||||
|
value = instance._data.get(self.name)
|
||||||
|
|
||||||
|
if value is None:
|
||||||
|
value = self.default
|
||||||
|
# Allow callable default values
|
||||||
|
if callable(value):
|
||||||
|
value = value()
|
||||||
|
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||||
|
value._instance = weakref.proxy(instance)
|
||||||
|
return value
|
||||||
|
|
||||||
|
def __set__(self, instance, value):
|
||||||
|
"""Descriptor for assigning a value to a field in a document.
|
||||||
|
"""
|
||||||
|
if instance._initialised:
|
||||||
|
try:
|
||||||
|
if (self.name not in instance._data or
|
||||||
|
instance._data[self.name] != value):
|
||||||
|
instance._mark_as_changed(self.name)
|
||||||
|
except:
|
||||||
|
# Values cant be compared eg: naive and tz datetimes
|
||||||
|
# So mark it as changed
|
||||||
|
instance._mark_as_changed(self.name)
|
||||||
|
instance._data[self.name] = value
|
||||||
|
|
||||||
|
def error(self, message="", errors=None, field_name=None):
|
||||||
|
"""Raises a ValidationError.
|
||||||
|
"""
|
||||||
|
field_name = field_name if field_name else self.name
|
||||||
|
raise ValidationError(message, errors=errors, field_name=field_name)
|
||||||
|
|
||||||
|
def to_python(self, value):
|
||||||
|
"""Convert a MongoDB-compatible type to a Python type.
|
||||||
|
"""
|
||||||
|
return value
|
||||||
|
|
||||||
|
def to_mongo(self, value):
|
||||||
|
"""Convert a Python type to a MongoDB-compatible type.
|
||||||
|
"""
|
||||||
|
return self.to_python(value)
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
"""Prepare a value that is being used in a query for PyMongo.
|
||||||
|
"""
|
||||||
|
return value
|
||||||
|
|
||||||
|
def validate(self, value, clean=True):
|
||||||
|
"""Perform validation on a value.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _validate(self, value, **kwargs):
|
||||||
|
Document = _import_class('Document')
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
# check choices
|
||||||
|
if self.choices:
|
||||||
|
is_cls = isinstance(value, (Document, EmbeddedDocument))
|
||||||
|
value_to_check = value.__class__ if is_cls else value
|
||||||
|
err_msg = 'an instance' if is_cls else 'one'
|
||||||
|
if isinstance(self.choices[0], (list, tuple)):
|
||||||
|
option_keys = [k for k, v in self.choices]
|
||||||
|
if value_to_check not in option_keys:
|
||||||
|
msg = ('Value must be %s of %s' %
|
||||||
|
(err_msg, unicode(option_keys)))
|
||||||
|
self.error(msg)
|
||||||
|
elif value_to_check not in self.choices:
|
||||||
|
msg = ('Value must be %s of %s' %
|
||||||
|
(err_msg, unicode(self.choices)))
|
||||||
|
self.error(msg)
|
||||||
|
|
||||||
|
# check validation argument
|
||||||
|
if self.validation is not None:
|
||||||
|
if callable(self.validation):
|
||||||
|
if not self.validation(value):
|
||||||
|
self.error('Value does not match custom validation method')
|
||||||
|
else:
|
||||||
|
raise ValueError('validation argument for "%s" must be a '
|
||||||
|
'callable.' % self.name)
|
||||||
|
|
||||||
|
self.validate(value, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class ComplexBaseField(BaseField):
|
||||||
|
"""Handles complex fields, such as lists / dictionaries.
|
||||||
|
|
||||||
|
Allows for nesting of embedded documents inside complex types.
|
||||||
|
Handles the lazy dereferencing of a queryset by lazily dereferencing all
|
||||||
|
items in a list / dict rather than one at a time.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
field = None
|
||||||
|
__dereference = False
|
||||||
|
|
||||||
|
def __get__(self, instance, owner):
|
||||||
|
"""Descriptor to automatically dereference references.
|
||||||
|
"""
|
||||||
|
if instance is None:
|
||||||
|
# Document class being used rather than a document object
|
||||||
|
return self
|
||||||
|
|
||||||
|
ReferenceField = _import_class('ReferenceField')
|
||||||
|
GenericReferenceField = _import_class('GenericReferenceField')
|
||||||
|
dereference = (self._auto_dereference and
|
||||||
|
(self.field is None or isinstance(self.field,
|
||||||
|
(GenericReferenceField, ReferenceField))))
|
||||||
|
|
||||||
|
self._auto_dereference = instance._fields[self.name]._auto_dereference
|
||||||
|
if not self.__dereference and instance._initialised and dereference:
|
||||||
|
instance._data[self.name] = self._dereference(
|
||||||
|
instance._data.get(self.name), max_depth=1, instance=instance,
|
||||||
|
name=self.name
|
||||||
|
)
|
||||||
|
|
||||||
|
value = super(ComplexBaseField, self).__get__(instance, owner)
|
||||||
|
|
||||||
|
# Convert lists / values so we can watch for any changes on them
|
||||||
|
if (isinstance(value, (list, tuple)) and
|
||||||
|
not isinstance(value, BaseList)):
|
||||||
|
value = BaseList(value, instance, self.name)
|
||||||
|
instance._data[self.name] = value
|
||||||
|
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||||
|
value = BaseDict(value, instance, self.name)
|
||||||
|
instance._data[self.name] = value
|
||||||
|
|
||||||
|
if (self._auto_dereference and instance._initialised and
|
||||||
|
isinstance(value, (BaseList, BaseDict))
|
||||||
|
and not value._dereferenced):
|
||||||
|
value = self._dereference(
|
||||||
|
value, max_depth=1, instance=instance, name=self.name
|
||||||
|
)
|
||||||
|
value._dereferenced = True
|
||||||
|
instance._data[self.name] = value
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
def __set__(self, instance, value):
|
||||||
|
"""Descriptor for assigning a value to a field in a document.
|
||||||
|
"""
|
||||||
|
instance._data[self.name] = value
|
||||||
|
instance._mark_as_changed(self.name)
|
||||||
|
|
||||||
|
def to_python(self, value):
|
||||||
|
"""Convert a MongoDB-compatible type to a Python type.
|
||||||
|
"""
|
||||||
|
Document = _import_class('Document')
|
||||||
|
|
||||||
|
if isinstance(value, basestring):
|
||||||
|
return value
|
||||||
|
|
||||||
|
if hasattr(value, 'to_python'):
|
||||||
|
return value.to_python()
|
||||||
|
|
||||||
|
is_list = False
|
||||||
|
if not hasattr(value, 'items'):
|
||||||
|
try:
|
||||||
|
is_list = True
|
||||||
|
value = dict([(k, v) for k, v in enumerate(value)])
|
||||||
|
except TypeError: # Not iterable return the value
|
||||||
|
return value
|
||||||
|
|
||||||
|
if self.field:
|
||||||
|
value_dict = dict([(key, self.field.to_python(item))
|
||||||
|
for key, item in value.items()])
|
||||||
|
else:
|
||||||
|
value_dict = {}
|
||||||
|
for k, v in value.items():
|
||||||
|
if isinstance(v, Document):
|
||||||
|
# We need the id from the saved object to create the DBRef
|
||||||
|
if v.pk is None:
|
||||||
|
self.error('You can only reference documents once they'
|
||||||
|
' have been saved to the database')
|
||||||
|
collection = v._get_collection_name()
|
||||||
|
value_dict[k] = DBRef(collection, v.pk)
|
||||||
|
elif hasattr(v, 'to_python'):
|
||||||
|
value_dict[k] = v.to_python()
|
||||||
|
else:
|
||||||
|
value_dict[k] = self.to_python(v)
|
||||||
|
|
||||||
|
if is_list: # Convert back to a list
|
||||||
|
return [v for k, v in sorted(value_dict.items(),
|
||||||
|
key=operator.itemgetter(0))]
|
||||||
|
return value_dict
|
||||||
|
|
||||||
|
def to_mongo(self, value):
|
||||||
|
"""Convert a Python type to a MongoDB-compatible type.
|
||||||
|
"""
|
||||||
|
Document = _import_class("Document")
|
||||||
|
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||||
|
GenericReferenceField = _import_class("GenericReferenceField")
|
||||||
|
|
||||||
|
if isinstance(value, basestring):
|
||||||
|
return value
|
||||||
|
|
||||||
|
if hasattr(value, 'to_mongo'):
|
||||||
|
if isinstance(value, Document):
|
||||||
|
return GenericReferenceField().to_mongo(value)
|
||||||
|
cls = value.__class__
|
||||||
|
val = value.to_mongo()
|
||||||
|
# If we its a document thats not inherited add _cls
|
||||||
|
if (isinstance(value, EmbeddedDocument)):
|
||||||
|
val['_cls'] = cls.__name__
|
||||||
|
return val
|
||||||
|
|
||||||
|
is_list = False
|
||||||
|
if not hasattr(value, 'items'):
|
||||||
|
try:
|
||||||
|
is_list = True
|
||||||
|
value = dict([(k, v) for k, v in enumerate(value)])
|
||||||
|
except TypeError: # Not iterable return the value
|
||||||
|
return value
|
||||||
|
|
||||||
|
if self.field:
|
||||||
|
value_dict = dict([(key, self.field.to_mongo(item))
|
||||||
|
for key, item in value.iteritems()])
|
||||||
|
else:
|
||||||
|
value_dict = {}
|
||||||
|
for k, v in value.iteritems():
|
||||||
|
if isinstance(v, Document):
|
||||||
|
# We need the id from the saved object to create the DBRef
|
||||||
|
if v.pk is None:
|
||||||
|
self.error('You can only reference documents once they'
|
||||||
|
' have been saved to the database')
|
||||||
|
|
||||||
|
# If its a document that is not inheritable it won't have
|
||||||
|
# any _cls data so make it a generic reference allows
|
||||||
|
# us to dereference
|
||||||
|
meta = getattr(v, '_meta', {})
|
||||||
|
allow_inheritance = (
|
||||||
|
meta.get('allow_inheritance', ALLOW_INHERITANCE)
|
||||||
|
is True)
|
||||||
|
if not allow_inheritance and not self.field:
|
||||||
|
value_dict[k] = GenericReferenceField().to_mongo(v)
|
||||||
|
else:
|
||||||
|
collection = v._get_collection_name()
|
||||||
|
value_dict[k] = DBRef(collection, v.pk)
|
||||||
|
elif hasattr(v, 'to_mongo'):
|
||||||
|
cls = v.__class__
|
||||||
|
val = v.to_mongo()
|
||||||
|
# If we its a document thats not inherited add _cls
|
||||||
|
if (isinstance(v, (Document, EmbeddedDocument))):
|
||||||
|
val['_cls'] = cls.__name__
|
||||||
|
value_dict[k] = val
|
||||||
|
else:
|
||||||
|
value_dict[k] = self.to_mongo(v)
|
||||||
|
|
||||||
|
if is_list: # Convert back to a list
|
||||||
|
return [v for k, v in sorted(value_dict.items(),
|
||||||
|
key=operator.itemgetter(0))]
|
||||||
|
return value_dict
|
||||||
|
|
||||||
|
def validate(self, value):
|
||||||
|
"""If field is provided ensure the value is valid.
|
||||||
|
"""
|
||||||
|
errors = {}
|
||||||
|
if self.field:
|
||||||
|
if hasattr(value, 'iteritems') or hasattr(value, 'items'):
|
||||||
|
sequence = value.iteritems()
|
||||||
|
else:
|
||||||
|
sequence = enumerate(value)
|
||||||
|
for k, v in sequence:
|
||||||
|
try:
|
||||||
|
self.field._validate(v)
|
||||||
|
except ValidationError, error:
|
||||||
|
errors[k] = error.errors or error
|
||||||
|
except (ValueError, AssertionError), error:
|
||||||
|
errors[k] = error
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
field_class = self.field.__class__.__name__
|
||||||
|
self.error('Invalid %s item (%s)' % (field_class, value),
|
||||||
|
errors=errors)
|
||||||
|
# Don't allow empty values if required
|
||||||
|
if self.required and not value:
|
||||||
|
self.error('Field is required and cannot be empty')
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
return self.to_mongo(value)
|
||||||
|
|
||||||
|
def lookup_member(self, member_name):
|
||||||
|
if self.field:
|
||||||
|
return self.field.lookup_member(member_name)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _set_owner_document(self, owner_document):
|
||||||
|
if self.field:
|
||||||
|
self.field.owner_document = owner_document
|
||||||
|
self._owner_document = owner_document
|
||||||
|
|
||||||
|
def _get_owner_document(self, owner_document):
|
||||||
|
self._owner_document = owner_document
|
||||||
|
|
||||||
|
owner_document = property(_get_owner_document, _set_owner_document)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _dereference(self,):
|
||||||
|
if not self.__dereference:
|
||||||
|
DeReference = _import_class("DeReference")
|
||||||
|
self.__dereference = DeReference() # Cached
|
||||||
|
return self.__dereference
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectIdField(BaseField):
|
||||||
|
"""A field wrapper around MongoDB's ObjectIds.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def to_python(self, value):
|
||||||
|
if not isinstance(value, ObjectId):
|
||||||
|
value = ObjectId(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
def to_mongo(self, value):
|
||||||
|
if not isinstance(value, ObjectId):
|
||||||
|
try:
|
||||||
|
return ObjectId(unicode(value))
|
||||||
|
except Exception, e:
|
||||||
|
# e.message attribute has been deprecated since Python 2.6
|
||||||
|
self.error(unicode(e))
|
||||||
|
return value
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
return self.to_mongo(value)
|
||||||
|
|
||||||
|
def validate(self, value):
|
||||||
|
try:
|
||||||
|
ObjectId(unicode(value))
|
||||||
|
except:
|
||||||
|
self.error('Invalid Object ID')
|
||||||
|
|
||||||
|
|
||||||
|
class GeoJsonBaseField(BaseField):
|
||||||
|
"""A geo json field storing a geojson style object.
|
||||||
|
.. versionadded:: 0.8
|
||||||
|
"""
|
||||||
|
|
||||||
|
_geo_index = pymongo.GEOSPHERE
|
||||||
|
_type = "GeoBase"
|
||||||
|
|
||||||
|
def __init__(self, auto_index=True, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
:param auto_index: Automatically create a "2dsphere" index. Defaults
|
||||||
|
to `True`.
|
||||||
|
"""
|
||||||
|
self._name = "%sField" % self._type
|
||||||
|
if not auto_index:
|
||||||
|
self._geo_index = False
|
||||||
|
super(GeoJsonBaseField, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def validate(self, value):
|
||||||
|
"""Validate the GeoJson object based on its type
|
||||||
|
"""
|
||||||
|
if isinstance(value, dict):
|
||||||
|
if set(value.keys()) == set(['type', 'coordinates']):
|
||||||
|
if value['type'] != self._type:
|
||||||
|
self.error('%s type must be "%s"' % (self._name, self._type))
|
||||||
|
return self.validate(value['coordinates'])
|
||||||
|
else:
|
||||||
|
self.error('%s can only accept a valid GeoJson dictionary'
|
||||||
|
' or lists of (x, y)' % self._name)
|
||||||
|
return
|
||||||
|
elif not isinstance(value, (list, tuple)):
|
||||||
|
self.error('%s can only accept lists of [x, y]' % self._name)
|
||||||
|
return
|
||||||
|
|
||||||
|
validate = getattr(self, "_validate_%s" % self._type.lower())
|
||||||
|
error = validate(value)
|
||||||
|
if error:
|
||||||
|
self.error(error)
|
||||||
|
|
||||||
|
def _validate_polygon(self, value):
|
||||||
|
if not isinstance(value, (list, tuple)):
|
||||||
|
return 'Polygons must contain list of linestrings'
|
||||||
|
|
||||||
|
# Quick and dirty validator
|
||||||
|
try:
|
||||||
|
value[0][0][0]
|
||||||
|
except:
|
||||||
|
return "Invalid Polygon must contain at least one valid linestring"
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for val in value:
|
||||||
|
error = self._validate_linestring(val, False)
|
||||||
|
if not error and val[0] != val[-1]:
|
||||||
|
error = 'LineStrings must start and end at the same point'
|
||||||
|
if error and error not in errors:
|
||||||
|
errors.append(error)
|
||||||
|
if errors:
|
||||||
|
return "Invalid Polygon:\n%s" % ", ".join(errors)
|
||||||
|
|
||||||
|
def _validate_linestring(self, value, top_level=True):
|
||||||
|
"""Validates a linestring"""
|
||||||
|
if not isinstance(value, (list, tuple)):
|
||||||
|
return 'LineStrings must contain list of coordinate pairs'
|
||||||
|
|
||||||
|
# Quick and dirty validator
|
||||||
|
try:
|
||||||
|
value[0][0]
|
||||||
|
except:
|
||||||
|
return "Invalid LineString must contain at least one valid point"
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for val in value:
|
||||||
|
error = self._validate_point(val)
|
||||||
|
if error and error not in errors:
|
||||||
|
errors.append(error)
|
||||||
|
if errors:
|
||||||
|
if top_level:
|
||||||
|
return "Invalid LineString:\n%s" % ", ".join(errors)
|
||||||
|
else:
|
||||||
|
return "%s" % ", ".join(errors)
|
||||||
|
|
||||||
|
def _validate_point(self, value):
|
||||||
|
"""Validate each set of coords"""
|
||||||
|
if not isinstance(value, (list, tuple)):
|
||||||
|
return 'Points must be a list of coordinate pairs'
|
||||||
|
elif not len(value) == 2:
|
||||||
|
return "Value (%s) must be a two-dimensional point" % repr(value)
|
||||||
|
elif (not isinstance(value[0], (float, int)) or
|
||||||
|
not isinstance(value[1], (float, int))):
|
||||||
|
return "Both values (%s) in point must be float or int" % repr(value)
|
||||||
|
|
||||||
|
def to_mongo(self, value):
|
||||||
|
if isinstance(value, dict):
|
||||||
|
return value
|
||||||
|
return SON([("type", self._type), ("coordinates", value)])
|
||||||
397
mongoengine/base/metaclasses.py
Normal file
397
mongoengine/base/metaclasses.py
Normal file
@@ -0,0 +1,397 @@
|
|||||||
|
import warnings
|
||||||
|
|
||||||
|
import pymongo
|
||||||
|
|
||||||
|
from mongoengine.common import _import_class
|
||||||
|
from mongoengine.errors import InvalidDocumentError
|
||||||
|
from mongoengine.python_support import PY3
|
||||||
|
from mongoengine.queryset import (DO_NOTHING, DoesNotExist,
|
||||||
|
MultipleObjectsReturned,
|
||||||
|
QuerySet, QuerySetManager)
|
||||||
|
|
||||||
|
from mongoengine.base.common import _document_registry, ALLOW_INHERITANCE
|
||||||
|
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
|
||||||
|
|
||||||
|
__all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass')
|
||||||
|
|
||||||
|
|
||||||
|
class DocumentMetaclass(type):
|
||||||
|
"""Metaclass for all documents.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __new__(cls, name, bases, attrs):
|
||||||
|
flattened_bases = cls._get_bases(bases)
|
||||||
|
super_new = super(DocumentMetaclass, cls).__new__
|
||||||
|
|
||||||
|
# If a base class just call super
|
||||||
|
metaclass = attrs.get('my_metaclass')
|
||||||
|
if metaclass and issubclass(metaclass, DocumentMetaclass):
|
||||||
|
return super_new(cls, name, bases, attrs)
|
||||||
|
|
||||||
|
attrs['_is_document'] = attrs.get('_is_document', False)
|
||||||
|
|
||||||
|
# EmbeddedDocuments could have meta data for inheritance
|
||||||
|
if 'meta' in attrs:
|
||||||
|
attrs['_meta'] = attrs.pop('meta')
|
||||||
|
|
||||||
|
# EmbeddedDocuments should inherit meta data
|
||||||
|
if '_meta' not in attrs:
|
||||||
|
meta = MetaDict()
|
||||||
|
for base in flattened_bases[::-1]:
|
||||||
|
# Add any mixin metadata from plain objects
|
||||||
|
if hasattr(base, 'meta'):
|
||||||
|
meta.merge(base.meta)
|
||||||
|
elif hasattr(base, '_meta'):
|
||||||
|
meta.merge(base._meta)
|
||||||
|
attrs['_meta'] = meta
|
||||||
|
|
||||||
|
# Handle document Fields
|
||||||
|
|
||||||
|
# Merge all fields from subclasses
|
||||||
|
doc_fields = {}
|
||||||
|
for base in flattened_bases[::-1]:
|
||||||
|
if hasattr(base, '_fields'):
|
||||||
|
doc_fields.update(base._fields)
|
||||||
|
|
||||||
|
# Standard object mixin - merge in any Fields
|
||||||
|
if not hasattr(base, '_meta'):
|
||||||
|
base_fields = {}
|
||||||
|
for attr_name, attr_value in base.__dict__.iteritems():
|
||||||
|
if not isinstance(attr_value, BaseField):
|
||||||
|
continue
|
||||||
|
attr_value.name = attr_name
|
||||||
|
if not attr_value.db_field:
|
||||||
|
attr_value.db_field = attr_name
|
||||||
|
base_fields[attr_name] = attr_value
|
||||||
|
|
||||||
|
doc_fields.update(base_fields)
|
||||||
|
|
||||||
|
# Discover any document fields
|
||||||
|
field_names = {}
|
||||||
|
for attr_name, attr_value in attrs.iteritems():
|
||||||
|
if not isinstance(attr_value, BaseField):
|
||||||
|
continue
|
||||||
|
attr_value.name = attr_name
|
||||||
|
if not attr_value.db_field:
|
||||||
|
attr_value.db_field = attr_name
|
||||||
|
doc_fields[attr_name] = attr_value
|
||||||
|
|
||||||
|
# Count names to ensure no db_field redefinitions
|
||||||
|
field_names[attr_value.db_field] = field_names.get(
|
||||||
|
attr_value.db_field, 0) + 1
|
||||||
|
|
||||||
|
# Ensure no duplicate db_fields
|
||||||
|
duplicate_db_fields = [k for k, v in field_names.items() if v > 1]
|
||||||
|
if duplicate_db_fields:
|
||||||
|
msg = ("Multiple db_fields defined for: %s " %
|
||||||
|
", ".join(duplicate_db_fields))
|
||||||
|
raise InvalidDocumentError(msg)
|
||||||
|
|
||||||
|
# Set _fields and db_field maps
|
||||||
|
attrs['_fields'] = doc_fields
|
||||||
|
attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k))
|
||||||
|
for k, v in doc_fields.iteritems()])
|
||||||
|
attrs['_fields_ordered'] = tuple(i[1] for i in sorted(
|
||||||
|
(v.creation_counter, v.name)
|
||||||
|
for v in doc_fields.itervalues()))
|
||||||
|
attrs['_reverse_db_field_map'] = dict(
|
||||||
|
(v, k) for k, v in attrs['_db_field_map'].iteritems())
|
||||||
|
|
||||||
|
#
|
||||||
|
# Set document hierarchy
|
||||||
|
#
|
||||||
|
superclasses = ()
|
||||||
|
class_name = [name]
|
||||||
|
for base in flattened_bases:
|
||||||
|
if (not getattr(base, '_is_base_cls', True) and
|
||||||
|
not getattr(base, '_meta', {}).get('abstract', True)):
|
||||||
|
# Collate heirarchy for _cls and _subclasses
|
||||||
|
class_name.append(base.__name__)
|
||||||
|
|
||||||
|
if hasattr(base, '_meta'):
|
||||||
|
# Warn if allow_inheritance isn't set and prevent
|
||||||
|
# inheritance of classes where inheritance is set to False
|
||||||
|
allow_inheritance = base._meta.get('allow_inheritance',
|
||||||
|
ALLOW_INHERITANCE)
|
||||||
|
if (allow_inheritance is not True and
|
||||||
|
not base._meta.get('abstract')):
|
||||||
|
raise ValueError('Document %s may not be subclassed' %
|
||||||
|
base.__name__)
|
||||||
|
|
||||||
|
# Get superclasses from last base superclass
|
||||||
|
document_bases = [b for b in flattened_bases
|
||||||
|
if hasattr(b, '_class_name')]
|
||||||
|
if document_bases:
|
||||||
|
superclasses = document_bases[0]._superclasses
|
||||||
|
superclasses += (document_bases[0]._class_name, )
|
||||||
|
|
||||||
|
_cls = '.'.join(reversed(class_name))
|
||||||
|
attrs['_class_name'] = _cls
|
||||||
|
attrs['_superclasses'] = superclasses
|
||||||
|
attrs['_subclasses'] = (_cls, )
|
||||||
|
attrs['_types'] = attrs['_subclasses'] # TODO depreciate _types
|
||||||
|
|
||||||
|
# Create the new_class
|
||||||
|
new_class = super_new(cls, name, bases, attrs)
|
||||||
|
|
||||||
|
# Set _subclasses
|
||||||
|
for base in document_bases:
|
||||||
|
if _cls not in base._subclasses:
|
||||||
|
base._subclasses += (_cls,)
|
||||||
|
base._types = base._subclasses # TODO depreciate _types
|
||||||
|
|
||||||
|
Document, EmbeddedDocument, DictField = cls._import_classes()
|
||||||
|
|
||||||
|
if issubclass(new_class, Document):
|
||||||
|
new_class._collection = None
|
||||||
|
|
||||||
|
# Add class to the _document_registry
|
||||||
|
_document_registry[new_class._class_name] = new_class
|
||||||
|
|
||||||
|
# In Python 2, User-defined methods objects have special read-only
|
||||||
|
# attributes 'im_func' and 'im_self' which contain the function obj
|
||||||
|
# and class instance object respectively. With Python 3 these special
|
||||||
|
# attributes have been replaced by __func__ and __self__. The Blinker
|
||||||
|
# module continues to use im_func and im_self, so the code below
|
||||||
|
# copies __func__ into im_func and __self__ into im_self for
|
||||||
|
# classmethod objects in Document derived classes.
|
||||||
|
if PY3:
|
||||||
|
for key, val in new_class.__dict__.items():
|
||||||
|
if isinstance(val, classmethod):
|
||||||
|
f = val.__get__(new_class)
|
||||||
|
if hasattr(f, '__func__') and not hasattr(f, 'im_func'):
|
||||||
|
f.__dict__.update({'im_func': getattr(f, '__func__')})
|
||||||
|
if hasattr(f, '__self__') and not hasattr(f, 'im_self'):
|
||||||
|
f.__dict__.update({'im_self': getattr(f, '__self__')})
|
||||||
|
|
||||||
|
# Handle delete rules
|
||||||
|
for field in new_class._fields.itervalues():
|
||||||
|
f = field
|
||||||
|
f.owner_document = new_class
|
||||||
|
delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING)
|
||||||
|
if isinstance(f, ComplexBaseField) and hasattr(f, 'field'):
|
||||||
|
delete_rule = getattr(f.field,
|
||||||
|
'reverse_delete_rule',
|
||||||
|
DO_NOTHING)
|
||||||
|
if isinstance(f, DictField) and delete_rule != DO_NOTHING:
|
||||||
|
msg = ("Reverse delete rules are not supported "
|
||||||
|
"for %s (field: %s)" %
|
||||||
|
(field.__class__.__name__, field.name))
|
||||||
|
raise InvalidDocumentError(msg)
|
||||||
|
|
||||||
|
f = field.field
|
||||||
|
|
||||||
|
if delete_rule != DO_NOTHING:
|
||||||
|
if issubclass(new_class, EmbeddedDocument):
|
||||||
|
msg = ("Reverse delete rules are not supported for "
|
||||||
|
"EmbeddedDocuments (field: %s)" % field.name)
|
||||||
|
raise InvalidDocumentError(msg)
|
||||||
|
f.document_type.register_delete_rule(new_class,
|
||||||
|
field.name, delete_rule)
|
||||||
|
|
||||||
|
if (field.name and hasattr(Document, field.name) and
|
||||||
|
EmbeddedDocument not in new_class.mro()):
|
||||||
|
msg = ("%s is a document method and not a valid "
|
||||||
|
"field name" % field.name)
|
||||||
|
raise InvalidDocumentError(msg)
|
||||||
|
|
||||||
|
return new_class
|
||||||
|
|
||||||
|
def add_to_class(self, name, value):
|
||||||
|
setattr(self, name, value)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_bases(cls, bases):
|
||||||
|
if isinstance(bases, BasesTuple):
|
||||||
|
return bases
|
||||||
|
seen = []
|
||||||
|
bases = cls.__get_bases(bases)
|
||||||
|
unique_bases = (b for b in bases if not (b in seen or seen.append(b)))
|
||||||
|
return BasesTuple(unique_bases)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __get_bases(cls, bases):
|
||||||
|
for base in bases:
|
||||||
|
if base is object:
|
||||||
|
continue
|
||||||
|
yield base
|
||||||
|
for child_base in cls.__get_bases(base.__bases__):
|
||||||
|
yield child_base
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _import_classes(cls):
|
||||||
|
Document = _import_class('Document')
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
DictField = _import_class('DictField')
|
||||||
|
return (Document, EmbeddedDocument, DictField)
|
||||||
|
|
||||||
|
|
||||||
|
class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||||
|
"""Metaclass for top-level documents (i.e. documents that have their own
|
||||||
|
collection in the database.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __new__(cls, name, bases, attrs):
|
||||||
|
flattened_bases = cls._get_bases(bases)
|
||||||
|
super_new = super(TopLevelDocumentMetaclass, cls).__new__
|
||||||
|
|
||||||
|
# Set default _meta data if base class, otherwise get user defined meta
|
||||||
|
if (attrs.get('my_metaclass') == TopLevelDocumentMetaclass):
|
||||||
|
# defaults
|
||||||
|
attrs['_meta'] = {
|
||||||
|
'abstract': True,
|
||||||
|
'max_documents': None,
|
||||||
|
'max_size': None,
|
||||||
|
'ordering': [], # default ordering applied at runtime
|
||||||
|
'indexes': [], # indexes to be ensured at runtime
|
||||||
|
'id_field': None,
|
||||||
|
'index_background': False,
|
||||||
|
'index_drop_dups': False,
|
||||||
|
'index_opts': None,
|
||||||
|
'delete_rules': None,
|
||||||
|
'allow_inheritance': None,
|
||||||
|
}
|
||||||
|
attrs['_is_base_cls'] = True
|
||||||
|
attrs['_meta'].update(attrs.get('meta', {}))
|
||||||
|
else:
|
||||||
|
attrs['_meta'] = attrs.get('meta', {})
|
||||||
|
# Explictly set abstract to false unless set
|
||||||
|
attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False)
|
||||||
|
attrs['_is_base_cls'] = False
|
||||||
|
|
||||||
|
# Set flag marking as document class - as opposed to an object mixin
|
||||||
|
attrs['_is_document'] = True
|
||||||
|
|
||||||
|
# Ensure queryset_class is inherited
|
||||||
|
if 'objects' in attrs:
|
||||||
|
manager = attrs['objects']
|
||||||
|
if hasattr(manager, 'queryset_class'):
|
||||||
|
attrs['_meta']['queryset_class'] = manager.queryset_class
|
||||||
|
|
||||||
|
# Clean up top level meta
|
||||||
|
if 'meta' in attrs:
|
||||||
|
del(attrs['meta'])
|
||||||
|
|
||||||
|
# Find the parent document class
|
||||||
|
parent_doc_cls = [b for b in flattened_bases
|
||||||
|
if b.__class__ == TopLevelDocumentMetaclass]
|
||||||
|
parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0]
|
||||||
|
|
||||||
|
# Prevent classes setting collection different to their parents
|
||||||
|
# If parent wasn't an abstract class
|
||||||
|
if (parent_doc_cls and 'collection' in attrs.get('_meta', {})
|
||||||
|
and not parent_doc_cls._meta.get('abstract', True)):
|
||||||
|
msg = "Trying to set a collection on a subclass (%s)" % name
|
||||||
|
warnings.warn(msg, SyntaxWarning)
|
||||||
|
del(attrs['_meta']['collection'])
|
||||||
|
|
||||||
|
# Ensure abstract documents have abstract bases
|
||||||
|
if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
|
||||||
|
if (parent_doc_cls and
|
||||||
|
not parent_doc_cls._meta.get('abstract', False)):
|
||||||
|
msg = "Abstract document cannot have non-abstract base"
|
||||||
|
raise ValueError(msg)
|
||||||
|
return super_new(cls, name, bases, attrs)
|
||||||
|
|
||||||
|
# Merge base class metas.
|
||||||
|
# Uses a special MetaDict that handles various merging rules
|
||||||
|
meta = MetaDict()
|
||||||
|
for base in flattened_bases[::-1]:
|
||||||
|
# Add any mixin metadata from plain objects
|
||||||
|
if hasattr(base, 'meta'):
|
||||||
|
meta.merge(base.meta)
|
||||||
|
elif hasattr(base, '_meta'):
|
||||||
|
meta.merge(base._meta)
|
||||||
|
|
||||||
|
# Set collection in the meta if its callable
|
||||||
|
if (getattr(base, '_is_document', False) and
|
||||||
|
not base._meta.get('abstract')):
|
||||||
|
collection = meta.get('collection', None)
|
||||||
|
if callable(collection):
|
||||||
|
meta['collection'] = collection(base)
|
||||||
|
|
||||||
|
meta.merge(attrs.get('_meta', {})) # Top level meta
|
||||||
|
|
||||||
|
# Only simple classes (direct subclasses of Document)
|
||||||
|
# may set allow_inheritance to False
|
||||||
|
simple_class = all([b._meta.get('abstract')
|
||||||
|
for b in flattened_bases if hasattr(b, '_meta')])
|
||||||
|
if (not simple_class and meta['allow_inheritance'] is False and
|
||||||
|
not meta['abstract']):
|
||||||
|
raise ValueError('Only direct subclasses of Document may set '
|
||||||
|
'"allow_inheritance" to False')
|
||||||
|
|
||||||
|
# Set default collection name
|
||||||
|
if 'collection' not in meta:
|
||||||
|
meta['collection'] = ''.join('_%s' % c if c.isupper() else c
|
||||||
|
for c in name).strip('_').lower()
|
||||||
|
attrs['_meta'] = meta
|
||||||
|
|
||||||
|
# Call super and get the new class
|
||||||
|
new_class = super_new(cls, name, bases, attrs)
|
||||||
|
|
||||||
|
meta = new_class._meta
|
||||||
|
|
||||||
|
# Set index specifications
|
||||||
|
meta['index_specs'] = new_class._build_index_specs(meta['indexes'])
|
||||||
|
|
||||||
|
# If collection is a callable - call it and set the value
|
||||||
|
collection = meta.get('collection')
|
||||||
|
if callable(collection):
|
||||||
|
new_class._meta['collection'] = collection(new_class)
|
||||||
|
|
||||||
|
# Provide a default queryset unless exists or one has been set
|
||||||
|
if 'objects' not in dir(new_class):
|
||||||
|
new_class.objects = QuerySetManager()
|
||||||
|
|
||||||
|
# Validate the fields and set primary key if needed
|
||||||
|
for field_name, field in new_class._fields.iteritems():
|
||||||
|
if field.primary_key:
|
||||||
|
# Ensure only one primary key is set
|
||||||
|
current_pk = new_class._meta.get('id_field')
|
||||||
|
if current_pk and current_pk != field_name:
|
||||||
|
raise ValueError('Cannot override primary key field')
|
||||||
|
|
||||||
|
# Set primary key
|
||||||
|
if not current_pk:
|
||||||
|
new_class._meta['id_field'] = field_name
|
||||||
|
new_class.id = field
|
||||||
|
|
||||||
|
# Set primary key if not defined by the document
|
||||||
|
if not new_class._meta.get('id_field'):
|
||||||
|
new_class._meta['id_field'] = 'id'
|
||||||
|
new_class._fields['id'] = ObjectIdField(db_field='_id')
|
||||||
|
new_class._fields['id'].name = 'id'
|
||||||
|
new_class.id = new_class._fields['id']
|
||||||
|
|
||||||
|
# Merge in exceptions with parent hierarchy
|
||||||
|
exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned)
|
||||||
|
module = attrs.get('__module__')
|
||||||
|
for exc in exceptions_to_merge:
|
||||||
|
name = exc.__name__
|
||||||
|
parents = tuple(getattr(base, name) for base in flattened_bases
|
||||||
|
if hasattr(base, name)) or (exc,)
|
||||||
|
# Create new exception and set to new_class
|
||||||
|
exception = type(name, parents, {'__module__': module})
|
||||||
|
setattr(new_class, name, exception)
|
||||||
|
|
||||||
|
return new_class
|
||||||
|
|
||||||
|
|
||||||
|
class MetaDict(dict):
|
||||||
|
"""Custom dictionary for meta classes.
|
||||||
|
Handles the merging of set indexes
|
||||||
|
"""
|
||||||
|
_merge_options = ('indexes',)
|
||||||
|
|
||||||
|
def merge(self, new_options):
|
||||||
|
for k, v in new_options.iteritems():
|
||||||
|
if k in self._merge_options:
|
||||||
|
self[k] = self.get(k, []) + v
|
||||||
|
else:
|
||||||
|
self[k] = v
|
||||||
|
|
||||||
|
|
||||||
|
class BasesTuple(tuple):
|
||||||
|
"""Special class to handle introspection of bases tuple in __new__"""
|
||||||
|
pass
|
||||||
37
mongoengine/common.py
Normal file
37
mongoengine/common.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
_class_registry_cache = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _import_class(cls_name):
|
||||||
|
"""Cached mechanism for imports"""
|
||||||
|
if cls_name in _class_registry_cache:
|
||||||
|
return _class_registry_cache.get(cls_name)
|
||||||
|
|
||||||
|
doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument',
|
||||||
|
'MapReduceDocument')
|
||||||
|
field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField',
|
||||||
|
'FileField', 'GenericReferenceField',
|
||||||
|
'GenericEmbeddedDocumentField', 'GeoPointField',
|
||||||
|
'PointField', 'LineStringField', 'PolygonField',
|
||||||
|
'ReferenceField', 'StringField', 'ComplexBaseField')
|
||||||
|
queryset_classes = ('OperationError',)
|
||||||
|
deref_classes = ('DeReference',)
|
||||||
|
|
||||||
|
if cls_name in doc_classes:
|
||||||
|
from mongoengine import document as module
|
||||||
|
import_classes = doc_classes
|
||||||
|
elif cls_name in field_classes:
|
||||||
|
from mongoengine import fields as module
|
||||||
|
import_classes = field_classes
|
||||||
|
elif cls_name in queryset_classes:
|
||||||
|
from mongoengine import queryset as module
|
||||||
|
import_classes = queryset_classes
|
||||||
|
elif cls_name in deref_classes:
|
||||||
|
from mongoengine import dereference as module
|
||||||
|
import_classes = deref_classes
|
||||||
|
else:
|
||||||
|
raise ValueError('No import set for: ' % cls_name)
|
||||||
|
|
||||||
|
for cls in import_classes:
|
||||||
|
_class_registry_cache[cls] = getattr(module, cls)
|
||||||
|
|
||||||
|
return _class_registry_cache.get(cls_name)
|
||||||
@@ -1,46 +1,170 @@
|
|||||||
from pymongo import Connection
|
import pymongo
|
||||||
|
from pymongo import MongoClient, MongoReplicaSetClient, uri_parser
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['ConnectionError', 'connect']
|
__all__ = ['ConnectionError', 'connect', 'register_connection',
|
||||||
|
'DEFAULT_CONNECTION_NAME']
|
||||||
|
|
||||||
|
|
||||||
_connection_settings = {
|
DEFAULT_CONNECTION_NAME = 'default'
|
||||||
'host': 'localhost',
|
|
||||||
'port': 27017,
|
|
||||||
'pool_size': 1,
|
|
||||||
}
|
|
||||||
_connection = None
|
|
||||||
_db = None
|
|
||||||
|
|
||||||
|
|
||||||
class ConnectionError(Exception):
|
class ConnectionError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def _get_connection():
|
_connection_settings = {}
|
||||||
global _connection
|
_connections = {}
|
||||||
if _connection is None:
|
_dbs = {}
|
||||||
_connection = Connection(**_connection_settings)
|
|
||||||
return _connection
|
|
||||||
|
|
||||||
def _get_db():
|
|
||||||
global _db
|
|
||||||
if _db is None:
|
|
||||||
raise ConnectionError('Not connected to database')
|
|
||||||
return _db
|
|
||||||
|
|
||||||
def connect(db, username=None, password=None, **kwargs):
|
def register_connection(alias, name, host='localhost', port=27017,
|
||||||
"""Connect to the database specified by the 'db' argument. Connection
|
is_slave=False, read_preference=False, slaves=None,
|
||||||
settings may be provided here as well if the database is not running on
|
username=None, password=None, **kwargs):
|
||||||
the default port on localhost. If authentication is needed, provide
|
"""Add a connection.
|
||||||
username and password arguments as well.
|
|
||||||
|
:param alias: the name that will be used to refer to this connection
|
||||||
|
throughout MongoEngine
|
||||||
|
:param name: the name of the specific database to use
|
||||||
|
:param host: the host name of the :program:`mongod` instance to connect to
|
||||||
|
:param port: the port that the :program:`mongod` instance is running on
|
||||||
|
:param is_slave: whether the connection can act as a slave
|
||||||
|
** Depreciated pymongo 2.0.1+
|
||||||
|
:param read_preference: The read preference for the collection
|
||||||
|
** Added pymongo 2.1
|
||||||
|
:param slaves: a list of aliases of slave connections; each of these must
|
||||||
|
be a registered connection that has :attr:`is_slave` set to ``True``
|
||||||
|
:param username: username to authenticate with
|
||||||
|
:param password: password to authenticate with
|
||||||
|
:param kwargs: allow ad-hoc parameters to be passed into the pymongo driver
|
||||||
|
|
||||||
"""
|
"""
|
||||||
global _db
|
global _connection_settings
|
||||||
|
|
||||||
_connection_settings.update(kwargs)
|
conn_settings = {
|
||||||
connection = _get_connection()
|
'name': name,
|
||||||
# Get DB from connection and auth if necessary
|
'host': host,
|
||||||
_db = connection[db]
|
'port': port,
|
||||||
if username is not None and password is not None:
|
'is_slave': is_slave,
|
||||||
_db.authenticate(username, password)
|
'slaves': slaves or [],
|
||||||
|
'username': username,
|
||||||
|
'password': password,
|
||||||
|
'read_preference': read_preference
|
||||||
|
}
|
||||||
|
|
||||||
|
# Handle uri style connections
|
||||||
|
if "://" in host:
|
||||||
|
uri_dict = uri_parser.parse_uri(host)
|
||||||
|
if uri_dict.get('database') is None:
|
||||||
|
raise ConnectionError("If using URI style connection include "\
|
||||||
|
"database name in string")
|
||||||
|
conn_settings.update({
|
||||||
|
'host': host,
|
||||||
|
'name': uri_dict.get('database'),
|
||||||
|
'username': uri_dict.get('username'),
|
||||||
|
'password': uri_dict.get('password'),
|
||||||
|
'read_preference': read_preference,
|
||||||
|
})
|
||||||
|
if "replicaSet" in host:
|
||||||
|
conn_settings['replicaSet'] = True
|
||||||
|
|
||||||
|
conn_settings.update(kwargs)
|
||||||
|
_connection_settings[alias] = conn_settings
|
||||||
|
|
||||||
|
|
||||||
|
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
||||||
|
global _connections
|
||||||
|
global _dbs
|
||||||
|
|
||||||
|
if alias in _connections:
|
||||||
|
get_connection(alias=alias).disconnect()
|
||||||
|
del _connections[alias]
|
||||||
|
if alias in _dbs:
|
||||||
|
del _dbs[alias]
|
||||||
|
|
||||||
|
|
||||||
|
def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||||
|
global _connections
|
||||||
|
# Connect to the database if not already connected
|
||||||
|
if reconnect:
|
||||||
|
disconnect(alias)
|
||||||
|
|
||||||
|
if alias not in _connections:
|
||||||
|
if alias not in _connection_settings:
|
||||||
|
msg = 'Connection with alias "%s" has not been defined' % alias
|
||||||
|
if alias == DEFAULT_CONNECTION_NAME:
|
||||||
|
msg = 'You have not defined a default connection'
|
||||||
|
raise ConnectionError(msg)
|
||||||
|
conn_settings = _connection_settings[alias].copy()
|
||||||
|
|
||||||
|
if hasattr(pymongo, 'version_tuple'): # Support for 2.1+
|
||||||
|
conn_settings.pop('name', None)
|
||||||
|
conn_settings.pop('slaves', None)
|
||||||
|
conn_settings.pop('is_slave', None)
|
||||||
|
conn_settings.pop('username', None)
|
||||||
|
conn_settings.pop('password', None)
|
||||||
|
else:
|
||||||
|
# Get all the slave connections
|
||||||
|
if 'slaves' in conn_settings:
|
||||||
|
slaves = []
|
||||||
|
for slave_alias in conn_settings['slaves']:
|
||||||
|
slaves.append(get_connection(slave_alias))
|
||||||
|
conn_settings['slaves'] = slaves
|
||||||
|
conn_settings.pop('read_preference', None)
|
||||||
|
|
||||||
|
connection_class = MongoClient
|
||||||
|
if 'replicaSet' in conn_settings:
|
||||||
|
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
|
||||||
|
# Discard port since it can't be used on MongoReplicaSetClient
|
||||||
|
conn_settings.pop('port', None)
|
||||||
|
# Discard replicaSet if not base string
|
||||||
|
if not isinstance(conn_settings['replicaSet'], basestring):
|
||||||
|
conn_settings.pop('replicaSet', None)
|
||||||
|
connection_class = MongoReplicaSetClient
|
||||||
|
|
||||||
|
try:
|
||||||
|
_connections[alias] = connection_class(**conn_settings)
|
||||||
|
except Exception, e:
|
||||||
|
raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e))
|
||||||
|
return _connections[alias]
|
||||||
|
|
||||||
|
|
||||||
|
def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||||
|
global _dbs
|
||||||
|
if reconnect:
|
||||||
|
disconnect(alias)
|
||||||
|
|
||||||
|
if alias not in _dbs:
|
||||||
|
conn = get_connection(alias)
|
||||||
|
conn_settings = _connection_settings[alias]
|
||||||
|
db = conn[conn_settings['name']]
|
||||||
|
# Authenticate if necessary
|
||||||
|
if conn_settings['username'] and conn_settings['password']:
|
||||||
|
db.authenticate(conn_settings['username'],
|
||||||
|
conn_settings['password'])
|
||||||
|
_dbs[alias] = db
|
||||||
|
return _dbs[alias]
|
||||||
|
|
||||||
|
|
||||||
|
def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||||
|
"""Connect to the database specified by the 'db' argument.
|
||||||
|
|
||||||
|
Connection settings may be provided here as well if the database is not
|
||||||
|
running on the default port on localhost. If authentication is needed,
|
||||||
|
provide username and password arguments as well.
|
||||||
|
|
||||||
|
Multiple databases are supported by using aliases. Provide a separate
|
||||||
|
`alias` to connect to a different instance of :program:`mongod`.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.6 - added multiple database support.
|
||||||
|
"""
|
||||||
|
global _connections
|
||||||
|
if alias not in _connections:
|
||||||
|
register_connection(alias, db, **kwargs)
|
||||||
|
|
||||||
|
return get_connection(alias)
|
||||||
|
|
||||||
|
|
||||||
|
# Support old naming convention
|
||||||
|
_get_connection = get_connection
|
||||||
|
_get_db = get_db
|
||||||
|
|||||||
226
mongoengine/context_managers.py
Normal file
226
mongoengine/context_managers.py
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
from mongoengine.common import _import_class
|
||||||
|
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||||
|
from mongoengine.queryset import QuerySet
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("switch_db", "switch_collection", "no_dereference",
|
||||||
|
"no_sub_classes", "query_counter")
|
||||||
|
|
||||||
|
|
||||||
|
class switch_db(object):
|
||||||
|
""" switch_db alias context manager.
|
||||||
|
|
||||||
|
Example ::
|
||||||
|
|
||||||
|
# Register connections
|
||||||
|
register_connection('default', 'mongoenginetest')
|
||||||
|
register_connection('testdb-1', 'mongoenginetest2')
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Group(name="test").save() # Saves in the default db
|
||||||
|
|
||||||
|
with switch_db(Group, 'testdb-1') as Group:
|
||||||
|
Group(name="hello testdb!").save() # Saves in testdb-1
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cls, db_alias):
|
||||||
|
""" Construct the switch_db context manager
|
||||||
|
|
||||||
|
:param cls: the class to change the registered db
|
||||||
|
:param db_alias: the name of the specific database to use
|
||||||
|
"""
|
||||||
|
self.cls = cls
|
||||||
|
self.collection = cls._get_collection()
|
||||||
|
self.db_alias = db_alias
|
||||||
|
self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME)
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
""" change the db_alias and clear the cached collection """
|
||||||
|
self.cls._meta["db_alias"] = self.db_alias
|
||||||
|
self.cls._collection = None
|
||||||
|
return self.cls
|
||||||
|
|
||||||
|
def __exit__(self, t, value, traceback):
|
||||||
|
""" Reset the db_alias and collection """
|
||||||
|
self.cls._meta["db_alias"] = self.ori_db_alias
|
||||||
|
self.cls._collection = self.collection
|
||||||
|
|
||||||
|
|
||||||
|
class switch_collection(object):
|
||||||
|
""" switch_collection alias context manager.
|
||||||
|
|
||||||
|
Example ::
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Group(name="test").save() # Saves in the default db
|
||||||
|
|
||||||
|
with switch_collection(Group, 'group1') as Group:
|
||||||
|
Group(name="hello testdb!").save() # Saves in group1 collection
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cls, collection_name):
|
||||||
|
""" Construct the switch_collection context manager
|
||||||
|
|
||||||
|
:param cls: the class to change the registered db
|
||||||
|
:param collection_name: the name of the collection to use
|
||||||
|
"""
|
||||||
|
self.cls = cls
|
||||||
|
self.ori_collection = cls._get_collection()
|
||||||
|
self.ori_get_collection_name = cls._get_collection_name
|
||||||
|
self.collection_name = collection_name
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
""" change the _get_collection_name and clear the cached collection """
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_collection_name(cls):
|
||||||
|
return self.collection_name
|
||||||
|
|
||||||
|
self.cls._get_collection_name = _get_collection_name
|
||||||
|
self.cls._collection = None
|
||||||
|
return self.cls
|
||||||
|
|
||||||
|
def __exit__(self, t, value, traceback):
|
||||||
|
""" Reset the collection """
|
||||||
|
self.cls._collection = self.ori_collection
|
||||||
|
self.cls._get_collection_name = self.ori_get_collection_name
|
||||||
|
|
||||||
|
|
||||||
|
class no_dereference(object):
|
||||||
|
""" no_dereference context manager.
|
||||||
|
|
||||||
|
Turns off all dereferencing in Documents for the duration of the context
|
||||||
|
manager::
|
||||||
|
|
||||||
|
with no_dereference(Group) as Group:
|
||||||
|
Group.objects.find()
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cls):
|
||||||
|
""" Construct the no_dereference context manager.
|
||||||
|
|
||||||
|
:param cls: the class to turn dereferencing off on
|
||||||
|
"""
|
||||||
|
self.cls = cls
|
||||||
|
|
||||||
|
ReferenceField = _import_class('ReferenceField')
|
||||||
|
GenericReferenceField = _import_class('GenericReferenceField')
|
||||||
|
ComplexBaseField = _import_class('ComplexBaseField')
|
||||||
|
|
||||||
|
self.deref_fields = [k for k, v in self.cls._fields.iteritems()
|
||||||
|
if isinstance(v, (ReferenceField,
|
||||||
|
GenericReferenceField,
|
||||||
|
ComplexBaseField))]
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
""" change the objects default and _auto_dereference values"""
|
||||||
|
for field in self.deref_fields:
|
||||||
|
self.cls._fields[field]._auto_dereference = False
|
||||||
|
return self.cls
|
||||||
|
|
||||||
|
def __exit__(self, t, value, traceback):
|
||||||
|
""" Reset the default and _auto_dereference values"""
|
||||||
|
for field in self.deref_fields:
|
||||||
|
self.cls._fields[field]._auto_dereference = True
|
||||||
|
return self.cls
|
||||||
|
|
||||||
|
|
||||||
|
class no_sub_classes(object):
|
||||||
|
""" no_sub_classes context manager.
|
||||||
|
|
||||||
|
Only returns instances of this class and no sub (inherited) classes::
|
||||||
|
|
||||||
|
with no_sub_classes(Group) as Group:
|
||||||
|
Group.objects.find()
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cls):
|
||||||
|
""" Construct the no_sub_classes context manager.
|
||||||
|
|
||||||
|
:param cls: the class to turn querying sub classes on
|
||||||
|
"""
|
||||||
|
self.cls = cls
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
""" change the objects default and _auto_dereference values"""
|
||||||
|
self.cls._all_subclasses = self.cls._subclasses
|
||||||
|
self.cls._subclasses = (self.cls,)
|
||||||
|
return self.cls
|
||||||
|
|
||||||
|
def __exit__(self, t, value, traceback):
|
||||||
|
""" Reset the default and _auto_dereference values"""
|
||||||
|
self.cls._subclasses = self.cls._all_subclasses
|
||||||
|
delattr(self.cls, '_all_subclasses')
|
||||||
|
return self.cls
|
||||||
|
|
||||||
|
|
||||||
|
class QuerySetNoDeRef(QuerySet):
|
||||||
|
"""Special no_dereference QuerySet"""
|
||||||
|
def __dereference(items, max_depth=1, instance=None, name=None):
|
||||||
|
return items
|
||||||
|
|
||||||
|
|
||||||
|
class query_counter(object):
|
||||||
|
""" Query_counter context manager to get the number of queries. """
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
""" Construct the query_counter. """
|
||||||
|
self.counter = 0
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
""" On every with block we need to drop the profile collection. """
|
||||||
|
self.db.set_profiling_level(0)
|
||||||
|
self.db.system.profile.drop()
|
||||||
|
self.db.set_profiling_level(2)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, t, value, traceback):
|
||||||
|
""" Reset the profiling level. """
|
||||||
|
self.db.set_profiling_level(0)
|
||||||
|
|
||||||
|
def __eq__(self, value):
|
||||||
|
""" == Compare querycounter. """
|
||||||
|
return value == self._get_count()
|
||||||
|
|
||||||
|
def __ne__(self, value):
|
||||||
|
""" != Compare querycounter. """
|
||||||
|
return not self.__eq__(value)
|
||||||
|
|
||||||
|
def __lt__(self, value):
|
||||||
|
""" < Compare querycounter. """
|
||||||
|
return self._get_count() < value
|
||||||
|
|
||||||
|
def __le__(self, value):
|
||||||
|
""" <= Compare querycounter. """
|
||||||
|
return self._get_count() <= value
|
||||||
|
|
||||||
|
def __gt__(self, value):
|
||||||
|
""" > Compare querycounter. """
|
||||||
|
return self._get_count() > value
|
||||||
|
|
||||||
|
def __ge__(self, value):
|
||||||
|
""" >= Compare querycounter. """
|
||||||
|
return self._get_count() >= value
|
||||||
|
|
||||||
|
def __int__(self):
|
||||||
|
""" int representation. """
|
||||||
|
return self._get_count()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
""" repr query_counter as the number of queries. """
|
||||||
|
return u"%s" % self._get_count()
|
||||||
|
|
||||||
|
def _get_count(self):
|
||||||
|
""" Get the number of queries. """
|
||||||
|
count = self.db.system.profile.find().count() - self.counter
|
||||||
|
self.counter += 1
|
||||||
|
return count
|
||||||
215
mongoengine/dereference.py
Normal file
215
mongoengine/dereference.py
Normal file
@@ -0,0 +1,215 @@
|
|||||||
|
from bson import DBRef, SON
|
||||||
|
|
||||||
|
from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document)
|
||||||
|
from fields import (ReferenceField, ListField, DictField, MapField)
|
||||||
|
from connection import get_db
|
||||||
|
from queryset import QuerySet
|
||||||
|
from document import Document
|
||||||
|
|
||||||
|
|
||||||
|
class DeReference(object):
|
||||||
|
|
||||||
|
def __call__(self, items, max_depth=1, instance=None, name=None):
|
||||||
|
"""
|
||||||
|
Cheaply dereferences the items to a set depth.
|
||||||
|
Also handles the convertion of complex data types.
|
||||||
|
|
||||||
|
:param items: The iterable (dict, list, queryset) to be dereferenced.
|
||||||
|
:param max_depth: The maximum depth to recurse to
|
||||||
|
:param instance: The owning instance used for tracking changes by
|
||||||
|
:class:`~mongoengine.base.ComplexBaseField`
|
||||||
|
:param name: The name of the field, used for tracking changes by
|
||||||
|
:class:`~mongoengine.base.ComplexBaseField`
|
||||||
|
:param get: A boolean determining if being called by __get__
|
||||||
|
"""
|
||||||
|
if items is None or isinstance(items, basestring):
|
||||||
|
return items
|
||||||
|
|
||||||
|
# cheapest way to convert a queryset to a list
|
||||||
|
# list(queryset) uses a count() query to determine length
|
||||||
|
if isinstance(items, QuerySet):
|
||||||
|
items = [i for i in items]
|
||||||
|
|
||||||
|
self.max_depth = max_depth
|
||||||
|
doc_type = None
|
||||||
|
|
||||||
|
if instance and isinstance(instance, (Document, TopLevelDocumentMetaclass)):
|
||||||
|
doc_type = instance._fields.get(name)
|
||||||
|
if hasattr(doc_type, 'field'):
|
||||||
|
doc_type = doc_type.field
|
||||||
|
|
||||||
|
if isinstance(doc_type, ReferenceField):
|
||||||
|
field = doc_type
|
||||||
|
doc_type = doc_type.document_type
|
||||||
|
is_list = not hasattr(items, 'items')
|
||||||
|
|
||||||
|
if is_list and all([i.__class__ == doc_type for i in items]):
|
||||||
|
return items
|
||||||
|
elif not is_list and all([i.__class__ == doc_type
|
||||||
|
for i in items.values()]):
|
||||||
|
return items
|
||||||
|
elif not field.dbref:
|
||||||
|
if not hasattr(items, 'items'):
|
||||||
|
items = [field.to_python(v)
|
||||||
|
if not isinstance(v, (DBRef, Document)) else v
|
||||||
|
for v in items]
|
||||||
|
else:
|
||||||
|
items = dict([
|
||||||
|
(k, field.to_python(v))
|
||||||
|
if not isinstance(v, (DBRef, Document)) else (k, v)
|
||||||
|
for k, v in items.iteritems()]
|
||||||
|
)
|
||||||
|
|
||||||
|
self.reference_map = self._find_references(items)
|
||||||
|
self.object_map = self._fetch_objects(doc_type=doc_type)
|
||||||
|
return self._attach_objects(items, 0, instance, name)
|
||||||
|
|
||||||
|
def _find_references(self, items, depth=0):
|
||||||
|
"""
|
||||||
|
Recursively finds all db references to be dereferenced
|
||||||
|
|
||||||
|
:param items: The iterable (dict, list, queryset)
|
||||||
|
:param depth: The current depth of recursion
|
||||||
|
"""
|
||||||
|
reference_map = {}
|
||||||
|
if not items or depth >= self.max_depth:
|
||||||
|
return reference_map
|
||||||
|
|
||||||
|
# Determine the iterator to use
|
||||||
|
if not hasattr(items, 'items'):
|
||||||
|
iterator = enumerate(items)
|
||||||
|
else:
|
||||||
|
iterator = items.iteritems()
|
||||||
|
|
||||||
|
# Recursively find dbreferences
|
||||||
|
depth += 1
|
||||||
|
for k, item in iterator:
|
||||||
|
if isinstance(item, Document):
|
||||||
|
for field_name, field in item._fields.iteritems():
|
||||||
|
v = item._data.get(field_name, None)
|
||||||
|
if isinstance(v, (DBRef)):
|
||||||
|
reference_map.setdefault(field.document_type, []).append(v.id)
|
||||||
|
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||||
|
reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id)
|
||||||
|
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||||
|
field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
|
||||||
|
references = self._find_references(v, depth)
|
||||||
|
for key, refs in references.iteritems():
|
||||||
|
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
||||||
|
key = field_cls
|
||||||
|
reference_map.setdefault(key, []).extend(refs)
|
||||||
|
elif isinstance(item, (DBRef)):
|
||||||
|
reference_map.setdefault(item.collection, []).append(item.id)
|
||||||
|
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
||||||
|
reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id)
|
||||||
|
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
||||||
|
references = self._find_references(item, depth - 1)
|
||||||
|
for key, refs in references.iteritems():
|
||||||
|
reference_map.setdefault(key, []).extend(refs)
|
||||||
|
|
||||||
|
return reference_map
|
||||||
|
|
||||||
|
def _fetch_objects(self, doc_type=None):
|
||||||
|
"""Fetch all references and convert to their document objects
|
||||||
|
"""
|
||||||
|
object_map = {}
|
||||||
|
for col, dbrefs in self.reference_map.iteritems():
|
||||||
|
keys = object_map.keys()
|
||||||
|
refs = list(set([dbref for dbref in dbrefs if unicode(dbref).encode('utf-8') not in keys]))
|
||||||
|
if hasattr(col, 'objects'): # We have a document class for the refs
|
||||||
|
references = col.objects.in_bulk(refs)
|
||||||
|
for key, doc in references.iteritems():
|
||||||
|
object_map[key] = doc
|
||||||
|
else: # Generic reference: use the refs data to convert to document
|
||||||
|
if isinstance(doc_type, (ListField, DictField, MapField,)):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if doc_type:
|
||||||
|
references = doc_type._get_db()[col].find({'_id': {'$in': refs}})
|
||||||
|
for ref in references:
|
||||||
|
doc = doc_type._from_son(ref)
|
||||||
|
object_map[doc.id] = doc
|
||||||
|
else:
|
||||||
|
references = get_db()[col].find({'_id': {'$in': refs}})
|
||||||
|
for ref in references:
|
||||||
|
if '_cls' in ref:
|
||||||
|
doc = get_document(ref["_cls"])._from_son(ref)
|
||||||
|
elif doc_type is None:
|
||||||
|
doc = get_document(
|
||||||
|
''.join(x.capitalize()
|
||||||
|
for x in col.split('_')))._from_son(ref)
|
||||||
|
else:
|
||||||
|
doc = doc_type._from_son(ref)
|
||||||
|
object_map[doc.id] = doc
|
||||||
|
return object_map
|
||||||
|
|
||||||
|
def _attach_objects(self, items, depth=0, instance=None, name=None):
|
||||||
|
"""
|
||||||
|
Recursively finds all db references to be dereferenced
|
||||||
|
|
||||||
|
:param items: The iterable (dict, list, queryset)
|
||||||
|
:param depth: The current depth of recursion
|
||||||
|
:param instance: The owning instance used for tracking changes by
|
||||||
|
:class:`~mongoengine.base.ComplexBaseField`
|
||||||
|
:param name: The name of the field, used for tracking changes by
|
||||||
|
:class:`~mongoengine.base.ComplexBaseField`
|
||||||
|
"""
|
||||||
|
if not items:
|
||||||
|
if isinstance(items, (BaseDict, BaseList)):
|
||||||
|
return items
|
||||||
|
|
||||||
|
if instance:
|
||||||
|
if isinstance(items, dict):
|
||||||
|
return BaseDict(items, instance, name)
|
||||||
|
else:
|
||||||
|
return BaseList(items, instance, name)
|
||||||
|
|
||||||
|
if isinstance(items, (dict, SON)):
|
||||||
|
if '_ref' in items:
|
||||||
|
return self.object_map.get(items['_ref'].id, items)
|
||||||
|
elif '_cls' in items:
|
||||||
|
doc = get_document(items['_cls'])._from_son(items)
|
||||||
|
doc._data = self._attach_objects(doc._data, depth, doc, None)
|
||||||
|
return doc
|
||||||
|
|
||||||
|
if not hasattr(items, 'items'):
|
||||||
|
is_list = True
|
||||||
|
as_tuple = isinstance(items, tuple)
|
||||||
|
iterator = enumerate(items)
|
||||||
|
data = []
|
||||||
|
else:
|
||||||
|
is_list = False
|
||||||
|
iterator = items.iteritems()
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
depth += 1
|
||||||
|
for k, v in iterator:
|
||||||
|
if is_list:
|
||||||
|
data.append(v)
|
||||||
|
else:
|
||||||
|
data[k] = v
|
||||||
|
|
||||||
|
if k in self.object_map and not is_list:
|
||||||
|
data[k] = self.object_map[k]
|
||||||
|
elif isinstance(v, Document):
|
||||||
|
for field_name, field in v._fields.iteritems():
|
||||||
|
v = data[k]._data.get(field_name, None)
|
||||||
|
if isinstance(v, (DBRef)):
|
||||||
|
data[k]._data[field_name] = self.object_map.get(v.id, v)
|
||||||
|
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||||
|
data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v)
|
||||||
|
elif isinstance(v, dict) and depth <= self.max_depth:
|
||||||
|
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
||||||
|
elif isinstance(v, (list, tuple)) and depth <= self.max_depth:
|
||||||
|
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
||||||
|
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||||
|
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name)
|
||||||
|
elif hasattr(v, 'id'):
|
||||||
|
data[k] = self.object_map.get(v.id, v)
|
||||||
|
|
||||||
|
if instance and name:
|
||||||
|
if is_list:
|
||||||
|
return tuple(data) if as_tuple else BaseList(data, instance, name)
|
||||||
|
return BaseDict(data, instance, name)
|
||||||
|
depth += 1
|
||||||
|
return data
|
||||||
0
mongoengine/django/__init__.py
Normal file
0
mongoengine/django/__init__.py
Normal file
402
mongoengine/django/auth.py
Normal file
402
mongoengine/django/auth.py
Normal file
@@ -0,0 +1,402 @@
|
|||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
from django.utils.encoding import smart_str
|
||||||
|
from django.contrib.auth.models import _user_has_perm, _user_get_all_permissions, _user_has_module_perms
|
||||||
|
from django.db import models
|
||||||
|
from django.contrib.contenttypes.models import ContentTypeManager
|
||||||
|
from django.contrib import auth
|
||||||
|
from django.contrib.auth.models import AnonymousUser
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
|
try:
|
||||||
|
from django.contrib.auth.hashers import check_password, make_password
|
||||||
|
except ImportError:
|
||||||
|
"""Handle older versions of Django"""
|
||||||
|
from django.utils.hashcompat import md5_constructor, sha_constructor
|
||||||
|
|
||||||
|
def get_hexdigest(algorithm, salt, raw_password):
|
||||||
|
raw_password, salt = smart_str(raw_password), smart_str(salt)
|
||||||
|
if algorithm == 'md5':
|
||||||
|
return md5_constructor(salt + raw_password).hexdigest()
|
||||||
|
elif algorithm == 'sha1':
|
||||||
|
return sha_constructor(salt + raw_password).hexdigest()
|
||||||
|
raise ValueError('Got unknown password algorithm type in password')
|
||||||
|
|
||||||
|
def check_password(raw_password, password):
|
||||||
|
algo, salt, hash = password.split('$')
|
||||||
|
return hash == get_hexdigest(algo, salt, raw_password)
|
||||||
|
|
||||||
|
def make_password(raw_password):
|
||||||
|
from random import random
|
||||||
|
algo = 'sha1'
|
||||||
|
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
|
||||||
|
hash = get_hexdigest(algo, salt, raw_password)
|
||||||
|
return '%s$%s$%s' % (algo, salt, hash)
|
||||||
|
|
||||||
|
from .utils import datetime_now
|
||||||
|
|
||||||
|
REDIRECT_FIELD_NAME = 'next'
|
||||||
|
|
||||||
|
|
||||||
|
class ContentType(Document):
|
||||||
|
name = StringField(max_length=100)
|
||||||
|
app_label = StringField(max_length=100)
|
||||||
|
model = StringField(max_length=100, verbose_name=_('python model class name'),
|
||||||
|
unique_with='app_label')
|
||||||
|
objects = ContentTypeManager()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _('content type')
|
||||||
|
verbose_name_plural = _('content types')
|
||||||
|
# db_table = 'django_content_type'
|
||||||
|
# ordering = ('name',)
|
||||||
|
# unique_together = (('app_label', 'model'),)
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
def model_class(self):
|
||||||
|
"Returns the Python model class for this type of content."
|
||||||
|
from django.db import models
|
||||||
|
return models.get_model(self.app_label, self.model)
|
||||||
|
|
||||||
|
def get_object_for_this_type(self, **kwargs):
|
||||||
|
"""
|
||||||
|
Returns an object of this type for the keyword arguments given.
|
||||||
|
Basically, this is a proxy around this object_type's get_object() model
|
||||||
|
method. The ObjectNotExist exception, if thrown, will not be caught,
|
||||||
|
so code that calls this method should catch it.
|
||||||
|
"""
|
||||||
|
return self.model_class()._default_manager.using(self._state.db).get(**kwargs)
|
||||||
|
|
||||||
|
def natural_key(self):
|
||||||
|
return (self.app_label, self.model)
|
||||||
|
|
||||||
|
|
||||||
|
class SiteProfileNotAvailable(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PermissionManager(models.Manager):
|
||||||
|
def get_by_natural_key(self, codename, app_label, model):
|
||||||
|
return self.get(
|
||||||
|
codename=codename,
|
||||||
|
content_type=ContentType.objects.get_by_natural_key(app_label, model)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Permission(Document):
|
||||||
|
"""The permissions system provides a way to assign permissions to specific
|
||||||
|
users and groups of users.
|
||||||
|
|
||||||
|
The permission system is used by the Django admin site, but may also be
|
||||||
|
useful in your own code. The Django admin site uses permissions as follows:
|
||||||
|
|
||||||
|
- The "add" permission limits the user's ability to view the "add"
|
||||||
|
form and add an object.
|
||||||
|
- The "change" permission limits a user's ability to view the change
|
||||||
|
list, view the "change" form and change an object.
|
||||||
|
- The "delete" permission limits the ability to delete an object.
|
||||||
|
|
||||||
|
Permissions are set globally per type of object, not per specific object
|
||||||
|
instance. It is possible to say "Mary may change news stories," but it's
|
||||||
|
not currently possible to say "Mary may change news stories, but only the
|
||||||
|
ones she created herself" or "Mary may only change news stories that have
|
||||||
|
a certain status or publication date."
|
||||||
|
|
||||||
|
Three basic permissions -- add, change and delete -- are automatically
|
||||||
|
created for each Django model.
|
||||||
|
"""
|
||||||
|
name = StringField(max_length=50, verbose_name=_('username'))
|
||||||
|
content_type = ReferenceField(ContentType)
|
||||||
|
codename = StringField(max_length=100, verbose_name=_('codename'))
|
||||||
|
# FIXME: don't access field of the other class
|
||||||
|
# unique_with=['content_type__app_label', 'content_type__model'])
|
||||||
|
|
||||||
|
objects = PermissionManager()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _('permission')
|
||||||
|
verbose_name_plural = _('permissions')
|
||||||
|
# unique_together = (('content_type', 'codename'),)
|
||||||
|
# ordering = ('content_type__app_label', 'content_type__model', 'codename')
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return u"%s | %s | %s" % (
|
||||||
|
unicode(self.content_type.app_label),
|
||||||
|
unicode(self.content_type),
|
||||||
|
unicode(self.name))
|
||||||
|
|
||||||
|
def natural_key(self):
|
||||||
|
return (self.codename,) + self.content_type.natural_key()
|
||||||
|
natural_key.dependencies = ['contenttypes.contenttype']
|
||||||
|
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
"""Groups are a generic way of categorizing users to apply permissions,
|
||||||
|
or some other label, to those users. A user can belong to any number of
|
||||||
|
groups.
|
||||||
|
|
||||||
|
A user in a group automatically has all the permissions granted to that
|
||||||
|
group. For example, if the group Site editors has the permission
|
||||||
|
can_edit_home_page, any user in that group will have that permission.
|
||||||
|
|
||||||
|
Beyond permissions, groups are a convenient way to categorize users to
|
||||||
|
apply some label, or extended functionality, to them. For example, you
|
||||||
|
could create a group 'Special users', and you could write code that would
|
||||||
|
do special things to those users -- such as giving them access to a
|
||||||
|
members-only portion of your site, or sending them members-only
|
||||||
|
e-mail messages.
|
||||||
|
"""
|
||||||
|
name = StringField(max_length=80, unique=True, verbose_name=_('name'))
|
||||||
|
permissions = ListField(ReferenceField(Permission, verbose_name=_('permissions'), required=False))
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _('group')
|
||||||
|
verbose_name_plural = _('groups')
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
|
||||||
|
class UserManager(models.Manager):
|
||||||
|
def create_user(self, username, email, password=None):
|
||||||
|
"""
|
||||||
|
Creates and saves a User with the given username, e-mail and password.
|
||||||
|
"""
|
||||||
|
now = datetime_now()
|
||||||
|
|
||||||
|
# Normalize the address by lowercasing the domain part of the email
|
||||||
|
# address.
|
||||||
|
try:
|
||||||
|
email_name, domain_part = email.strip().split('@', 1)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
email = '@'.join([email_name, domain_part.lower()])
|
||||||
|
|
||||||
|
user = self.model(username=username, email=email, is_staff=False,
|
||||||
|
is_active=True, is_superuser=False, last_login=now,
|
||||||
|
date_joined=now)
|
||||||
|
|
||||||
|
user.set_password(password)
|
||||||
|
user.save(using=self._db)
|
||||||
|
return user
|
||||||
|
|
||||||
|
def create_superuser(self, username, email, password):
|
||||||
|
u = self.create_user(username, email, password)
|
||||||
|
u.is_staff = True
|
||||||
|
u.is_active = True
|
||||||
|
u.is_superuser = True
|
||||||
|
u.save(using=self._db)
|
||||||
|
return u
|
||||||
|
|
||||||
|
def make_random_password(self, length=10, allowed_chars='abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789'):
|
||||||
|
"Generates a random password with the given length and given allowed_chars"
|
||||||
|
# Note that default value of allowed_chars does not have "I" or letters
|
||||||
|
# that look like it -- just to avoid confusion.
|
||||||
|
from random import choice
|
||||||
|
return ''.join([choice(allowed_chars) for i in range(length)])
|
||||||
|
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
"""A User document that aims to mirror most of the API specified by Django
|
||||||
|
at http://docs.djangoproject.com/en/dev/topics/auth/#users
|
||||||
|
"""
|
||||||
|
username = StringField(max_length=30, required=True,
|
||||||
|
verbose_name=_('username'),
|
||||||
|
help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters"))
|
||||||
|
|
||||||
|
first_name = StringField(max_length=30,
|
||||||
|
verbose_name=_('first name'))
|
||||||
|
|
||||||
|
last_name = StringField(max_length=30,
|
||||||
|
verbose_name=_('last name'))
|
||||||
|
email = EmailField(verbose_name=_('e-mail address'))
|
||||||
|
password = StringField(max_length=128,
|
||||||
|
verbose_name=_('password'),
|
||||||
|
help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
|
||||||
|
is_staff = BooleanField(default=False,
|
||||||
|
verbose_name=_('staff status'),
|
||||||
|
help_text=_("Designates whether the user can log into this admin site."))
|
||||||
|
is_active = BooleanField(default=True,
|
||||||
|
verbose_name=_('active'),
|
||||||
|
help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts."))
|
||||||
|
is_superuser = BooleanField(default=False,
|
||||||
|
verbose_name=_('superuser status'),
|
||||||
|
help_text=_("Designates that this user has all permissions without explicitly assigning them."))
|
||||||
|
last_login = DateTimeField(default=datetime_now,
|
||||||
|
verbose_name=_('last login'))
|
||||||
|
date_joined = DateTimeField(default=datetime_now,
|
||||||
|
verbose_name=_('date joined'))
|
||||||
|
|
||||||
|
USERNAME_FIELD = 'username'
|
||||||
|
REQUIRED_FIELDS = ['email']
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'indexes': [
|
||||||
|
{'fields': ['username'], 'unique': True, 'sparse': True}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.username
|
||||||
|
|
||||||
|
def get_full_name(self):
|
||||||
|
"""Returns the users first and last names, separated by a space.
|
||||||
|
"""
|
||||||
|
full_name = u'%s %s' % (self.first_name or '', self.last_name or '')
|
||||||
|
return full_name.strip()
|
||||||
|
|
||||||
|
def is_anonymous(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def is_authenticated(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def set_password(self, raw_password):
|
||||||
|
"""Sets the user's password - always use this rather than directly
|
||||||
|
assigning to :attr:`~mongoengine.django.auth.User.password` as the
|
||||||
|
password is hashed before storage.
|
||||||
|
"""
|
||||||
|
self.password = make_password(raw_password)
|
||||||
|
self.save()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def check_password(self, raw_password):
|
||||||
|
"""Checks the user's password against a provided password - always use
|
||||||
|
this rather than directly comparing to
|
||||||
|
:attr:`~mongoengine.django.auth.User.password` as the password is
|
||||||
|
hashed before storage.
|
||||||
|
"""
|
||||||
|
return check_password(raw_password, self.password)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_user(cls, username, password, email=None):
|
||||||
|
"""Create (and save) a new user with the given username, password and
|
||||||
|
email address.
|
||||||
|
"""
|
||||||
|
now = datetime_now()
|
||||||
|
|
||||||
|
# Normalize the address by lowercasing the domain part of the email
|
||||||
|
# address.
|
||||||
|
if email is not None:
|
||||||
|
try:
|
||||||
|
email_name, domain_part = email.strip().split('@', 1)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
email = '@'.join([email_name, domain_part.lower()])
|
||||||
|
|
||||||
|
user = cls(username=username, email=email, date_joined=now)
|
||||||
|
user.set_password(password)
|
||||||
|
user.save()
|
||||||
|
return user
|
||||||
|
|
||||||
|
def get_group_permissions(self, obj=None):
|
||||||
|
"""
|
||||||
|
Returns a list of permission strings that this user has through his/her
|
||||||
|
groups. This method queries all available auth backends. If an object
|
||||||
|
is passed in, only permissions matching this object are returned.
|
||||||
|
"""
|
||||||
|
permissions = set()
|
||||||
|
for backend in auth.get_backends():
|
||||||
|
if hasattr(backend, "get_group_permissions"):
|
||||||
|
permissions.update(backend.get_group_permissions(self, obj))
|
||||||
|
return permissions
|
||||||
|
|
||||||
|
def get_all_permissions(self, obj=None):
|
||||||
|
return _user_get_all_permissions(self, obj)
|
||||||
|
|
||||||
|
def has_perm(self, perm, obj=None):
|
||||||
|
"""
|
||||||
|
Returns True if the user has the specified permission. This method
|
||||||
|
queries all available auth backends, but returns immediately if any
|
||||||
|
backend returns True. Thus, a user who has permission from a single
|
||||||
|
auth backend is assumed to have permission in general. If an object is
|
||||||
|
provided, permissions for this specific object are checked.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Active superusers have all permissions.
|
||||||
|
if self.is_active and self.is_superuser:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Otherwise we need to check the backends.
|
||||||
|
return _user_has_perm(self, perm, obj)
|
||||||
|
|
||||||
|
def has_module_perms(self, app_label):
|
||||||
|
"""
|
||||||
|
Returns True if the user has any permissions in the given app label.
|
||||||
|
Uses pretty much the same logic as has_perm, above.
|
||||||
|
"""
|
||||||
|
# Active superusers have all permissions.
|
||||||
|
if self.is_active and self.is_superuser:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return _user_has_module_perms(self, app_label)
|
||||||
|
|
||||||
|
def email_user(self, subject, message, from_email=None):
|
||||||
|
"Sends an e-mail to this User."
|
||||||
|
from django.core.mail import send_mail
|
||||||
|
send_mail(subject, message, from_email, [self.email])
|
||||||
|
|
||||||
|
def get_profile(self):
|
||||||
|
"""
|
||||||
|
Returns site-specific profile for this user. Raises
|
||||||
|
SiteProfileNotAvailable if this site does not allow profiles.
|
||||||
|
"""
|
||||||
|
if not hasattr(self, '_profile_cache'):
|
||||||
|
from django.conf import settings
|
||||||
|
if not getattr(settings, 'AUTH_PROFILE_MODULE', False):
|
||||||
|
raise SiteProfileNotAvailable('You need to set AUTH_PROFILE_MO'
|
||||||
|
'DULE in your project settings')
|
||||||
|
try:
|
||||||
|
app_label, model_name = settings.AUTH_PROFILE_MODULE.split('.')
|
||||||
|
except ValueError:
|
||||||
|
raise SiteProfileNotAvailable('app_label and model_name should'
|
||||||
|
' be separated by a dot in the AUTH_PROFILE_MODULE set'
|
||||||
|
'ting')
|
||||||
|
|
||||||
|
try:
|
||||||
|
model = models.get_model(app_label, model_name)
|
||||||
|
if model is None:
|
||||||
|
raise SiteProfileNotAvailable('Unable to load the profile '
|
||||||
|
'model, check AUTH_PROFILE_MODULE in your project sett'
|
||||||
|
'ings')
|
||||||
|
self._profile_cache = model._default_manager.using(self._state.db).get(user__id__exact=self.id)
|
||||||
|
self._profile_cache.user = self
|
||||||
|
except (ImportError, ImproperlyConfigured):
|
||||||
|
raise SiteProfileNotAvailable
|
||||||
|
return self._profile_cache
|
||||||
|
|
||||||
|
|
||||||
|
class MongoEngineBackend(object):
|
||||||
|
"""Authenticate using MongoEngine and mongoengine.django.auth.User.
|
||||||
|
"""
|
||||||
|
|
||||||
|
supports_object_permissions = False
|
||||||
|
supports_anonymous_user = False
|
||||||
|
supports_inactive_user = False
|
||||||
|
|
||||||
|
def authenticate(self, username=None, password=None):
|
||||||
|
user = User.objects(username=username).first()
|
||||||
|
if user:
|
||||||
|
if password and user.check_password(password):
|
||||||
|
backend = auth.get_backends()[0]
|
||||||
|
user.backend = "%s.%s" % (backend.__module__, backend.__class__.__name__)
|
||||||
|
return user
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_user(self, user_id):
|
||||||
|
return User.objects.with_id(user_id)
|
||||||
|
|
||||||
|
|
||||||
|
def get_user(userid):
|
||||||
|
"""Returns a User object from an id (User.id). Django's equivalent takes
|
||||||
|
request, but taking an id instead leaves it up to the developer to store
|
||||||
|
the id in any way they want (session, signed cookie, etc.)
|
||||||
|
"""
|
||||||
|
if not userid:
|
||||||
|
return AnonymousUser()
|
||||||
|
return MongoEngineBackend().get_user(userid) or AnonymousUser()
|
||||||
0
mongoengine/django/mongo_auth/__init__.py
Normal file
0
mongoengine/django/mongo_auth/__init__.py
Normal file
89
mongoengine/django/mongo_auth/models.py
Normal file
89
mongoengine/django/mongo_auth/models.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
from django.conf import settings
|
||||||
|
from django.contrib.auth.models import UserManager
|
||||||
|
from django.core.exceptions import ImproperlyConfigured
|
||||||
|
from django.db import models
|
||||||
|
from django.utils.importlib import import_module
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
|
|
||||||
|
MONGOENGINE_USER_DOCUMENT = getattr(
|
||||||
|
settings, 'MONGOENGINE_USER_DOCUMENT', 'mongoengine.django.auth.User')
|
||||||
|
|
||||||
|
|
||||||
|
class MongoUserManager(UserManager):
|
||||||
|
"""A User manager wich allows the use of MongoEngine documents in Django.
|
||||||
|
|
||||||
|
To use the manager, you must tell django.contrib.auth to use MongoUser as
|
||||||
|
the user model. In you settings.py, you need:
|
||||||
|
|
||||||
|
INSTALLED_APPS = (
|
||||||
|
...
|
||||||
|
'django.contrib.auth',
|
||||||
|
'mongoengine.django.mongo_auth',
|
||||||
|
...
|
||||||
|
)
|
||||||
|
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
|
||||||
|
|
||||||
|
Django will use the model object to access the custom Manager, which will
|
||||||
|
replace the original queryset with MongoEngine querysets.
|
||||||
|
|
||||||
|
By default, mongoengine.django.auth.User will be used to store users. You
|
||||||
|
can specify another document class in MONGOENGINE_USER_DOCUMENT in your
|
||||||
|
settings.py.
|
||||||
|
|
||||||
|
The User Document class has the same requirements as a standard custom user
|
||||||
|
model: https://docs.djangoproject.com/en/dev/topics/auth/customizing/
|
||||||
|
|
||||||
|
In particular, the User Document class must define USERNAME_FIELD and
|
||||||
|
REQUIRED_FIELDS.
|
||||||
|
|
||||||
|
`AUTH_USER_MODEL` has been added in Django 1.5.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def contribute_to_class(self, model, name):
|
||||||
|
super(MongoUserManager, self).contribute_to_class(model, name)
|
||||||
|
self.dj_model = self.model
|
||||||
|
self.model = self._get_user_document()
|
||||||
|
|
||||||
|
self.dj_model.USERNAME_FIELD = self.model.USERNAME_FIELD
|
||||||
|
username = models.CharField(_('username'), max_length=30, unique=True)
|
||||||
|
username.contribute_to_class(self.dj_model, self.dj_model.USERNAME_FIELD)
|
||||||
|
|
||||||
|
self.dj_model.REQUIRED_FIELDS = self.model.REQUIRED_FIELDS
|
||||||
|
for name in self.dj_model.REQUIRED_FIELDS:
|
||||||
|
field = models.CharField(_(name), max_length=30)
|
||||||
|
field.contribute_to_class(self.dj_model, name)
|
||||||
|
|
||||||
|
def _get_user_document(self):
|
||||||
|
try:
|
||||||
|
name = MONGOENGINE_USER_DOCUMENT
|
||||||
|
dot = name.rindex('.')
|
||||||
|
module = import_module(name[:dot])
|
||||||
|
return getattr(module, name[dot + 1:])
|
||||||
|
except ImportError:
|
||||||
|
raise ImproperlyConfigured("Error importing %s, please check "
|
||||||
|
"settings.MONGOENGINE_USER_DOCUMENT"
|
||||||
|
% name)
|
||||||
|
|
||||||
|
def get(self, *args, **kwargs):
|
||||||
|
try:
|
||||||
|
return self.get_query_set().get(*args, **kwargs)
|
||||||
|
except self.model.DoesNotExist:
|
||||||
|
# ModelBackend expects this exception
|
||||||
|
raise self.dj_model.DoesNotExist
|
||||||
|
|
||||||
|
@property
|
||||||
|
def db(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def get_empty_query_set(self):
|
||||||
|
return self.model.objects.none()
|
||||||
|
|
||||||
|
def get_query_set(self):
|
||||||
|
return self.model.objects
|
||||||
|
|
||||||
|
|
||||||
|
class MongoUser(models.Model):
|
||||||
|
objects = MongoUserManager()
|
||||||
|
|
||||||
102
mongoengine/django/sessions.py
Normal file
102
mongoengine/django/sessions.py
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
from django.conf import settings
|
||||||
|
from django.contrib.sessions.backends.base import SessionBase, CreateError
|
||||||
|
from django.core.exceptions import SuspiciousOperation
|
||||||
|
from django.utils.encoding import force_unicode
|
||||||
|
|
||||||
|
from mongoengine.document import Document
|
||||||
|
from mongoengine import fields
|
||||||
|
from mongoengine.queryset import OperationError
|
||||||
|
from mongoengine.connection import DEFAULT_CONNECTION_NAME
|
||||||
|
|
||||||
|
from .utils import datetime_now
|
||||||
|
|
||||||
|
|
||||||
|
MONGOENGINE_SESSION_DB_ALIAS = getattr(
|
||||||
|
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
|
||||||
|
DEFAULT_CONNECTION_NAME)
|
||||||
|
|
||||||
|
# a setting for the name of the collection used to store sessions
|
||||||
|
MONGOENGINE_SESSION_COLLECTION = getattr(
|
||||||
|
settings, 'MONGOENGINE_SESSION_COLLECTION',
|
||||||
|
'django_session')
|
||||||
|
|
||||||
|
# a setting for whether session data is stored encoded or not
|
||||||
|
MONGOENGINE_SESSION_DATA_ENCODE = getattr(
|
||||||
|
settings, 'MONGOENGINE_SESSION_DATA_ENCODE',
|
||||||
|
True)
|
||||||
|
|
||||||
|
|
||||||
|
class MongoSession(Document):
|
||||||
|
session_key = fields.StringField(primary_key=True, max_length=40)
|
||||||
|
session_data = fields.StringField() if MONGOENGINE_SESSION_DATA_ENCODE \
|
||||||
|
else fields.DictField()
|
||||||
|
expire_date = fields.DateTimeField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'collection': MONGOENGINE_SESSION_COLLECTION,
|
||||||
|
'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
|
||||||
|
'allow_inheritance': False,
|
||||||
|
'indexes': [
|
||||||
|
{
|
||||||
|
'fields': ['expire_date'],
|
||||||
|
'expireAfterSeconds': settings.SESSION_COOKIE_AGE
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_decoded(self):
|
||||||
|
return SessionStore().decode(self.session_data)
|
||||||
|
|
||||||
|
|
||||||
|
class SessionStore(SessionBase):
|
||||||
|
"""A MongoEngine-based session store for Django.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def load(self):
|
||||||
|
try:
|
||||||
|
s = MongoSession.objects(session_key=self.session_key,
|
||||||
|
expire_date__gt=datetime_now)[0]
|
||||||
|
if MONGOENGINE_SESSION_DATA_ENCODE:
|
||||||
|
return self.decode(force_unicode(s.session_data))
|
||||||
|
else:
|
||||||
|
return s.session_data
|
||||||
|
except (IndexError, SuspiciousOperation):
|
||||||
|
self.create()
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def exists(self, session_key):
|
||||||
|
return bool(MongoSession.objects(session_key=session_key).first())
|
||||||
|
|
||||||
|
def create(self):
|
||||||
|
while True:
|
||||||
|
self._session_key = self._get_new_session_key()
|
||||||
|
try:
|
||||||
|
self.save(must_create=True)
|
||||||
|
except CreateError:
|
||||||
|
continue
|
||||||
|
self.modified = True
|
||||||
|
self._session_cache = {}
|
||||||
|
return
|
||||||
|
|
||||||
|
def save(self, must_create=False):
|
||||||
|
if self.session_key is None:
|
||||||
|
self._session_key = self._get_new_session_key()
|
||||||
|
s = MongoSession(session_key=self.session_key)
|
||||||
|
if MONGOENGINE_SESSION_DATA_ENCODE:
|
||||||
|
s.session_data = self.encode(self._get_session(no_load=must_create))
|
||||||
|
else:
|
||||||
|
s.session_data = self._get_session(no_load=must_create)
|
||||||
|
s.expire_date = self.get_expiry_date()
|
||||||
|
try:
|
||||||
|
s.save(force_insert=must_create)
|
||||||
|
except OperationError:
|
||||||
|
if must_create:
|
||||||
|
raise CreateError
|
||||||
|
raise
|
||||||
|
|
||||||
|
def delete(self, session_key=None):
|
||||||
|
if session_key is None:
|
||||||
|
if self.session_key is None:
|
||||||
|
return
|
||||||
|
session_key = self.session_key
|
||||||
|
MongoSession.objects(session_key=session_key).delete()
|
||||||
47
mongoengine/django/shortcuts.py
Normal file
47
mongoengine/django/shortcuts.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
from mongoengine.queryset import QuerySet
|
||||||
|
from mongoengine.base import BaseDocument
|
||||||
|
from mongoengine.errors import ValidationError
|
||||||
|
|
||||||
|
def _get_queryset(cls):
|
||||||
|
"""Inspired by django.shortcuts.*"""
|
||||||
|
if isinstance(cls, QuerySet):
|
||||||
|
return cls
|
||||||
|
else:
|
||||||
|
return cls.objects
|
||||||
|
|
||||||
|
def get_document_or_404(cls, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Uses get() to return an document, or raises a Http404 exception if the document
|
||||||
|
does not exist.
|
||||||
|
|
||||||
|
cls may be a Document or QuerySet object. All other passed
|
||||||
|
arguments and keyword arguments are used in the get() query.
|
||||||
|
|
||||||
|
Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
|
||||||
|
object is found.
|
||||||
|
|
||||||
|
Inspired by django.shortcuts.*
|
||||||
|
"""
|
||||||
|
queryset = _get_queryset(cls)
|
||||||
|
try:
|
||||||
|
return queryset.get(*args, **kwargs)
|
||||||
|
except (queryset._document.DoesNotExist, ValidationError):
|
||||||
|
from django.http import Http404
|
||||||
|
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
||||||
|
|
||||||
|
def get_list_or_404(cls, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Uses filter() to return a list of documents, or raise a Http404 exception if
|
||||||
|
the list is empty.
|
||||||
|
|
||||||
|
cls may be a Document or QuerySet object. All other passed
|
||||||
|
arguments and keyword arguments are used in the filter() query.
|
||||||
|
|
||||||
|
Inspired by django.shortcuts.*
|
||||||
|
"""
|
||||||
|
queryset = _get_queryset(cls)
|
||||||
|
obj_list = list(queryset.filter(*args, **kwargs))
|
||||||
|
if not obj_list:
|
||||||
|
from django.http import Http404
|
||||||
|
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
||||||
|
return obj_list
|
||||||
112
mongoengine/django/storage.py
Normal file
112
mongoengine/django/storage.py
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
import os
|
||||||
|
import itertools
|
||||||
|
import urlparse
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.files.storage import Storage
|
||||||
|
from django.core.exceptions import ImproperlyConfigured
|
||||||
|
|
||||||
|
|
||||||
|
class FileDocument(Document):
|
||||||
|
"""A document used to store a single file in GridFS.
|
||||||
|
"""
|
||||||
|
file = FileField()
|
||||||
|
|
||||||
|
|
||||||
|
class GridFSStorage(Storage):
|
||||||
|
"""A custom storage backend to store files in GridFS
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, base_url=None):
|
||||||
|
|
||||||
|
if base_url is None:
|
||||||
|
base_url = settings.MEDIA_URL
|
||||||
|
self.base_url = base_url
|
||||||
|
self.document = FileDocument
|
||||||
|
self.field = 'file'
|
||||||
|
|
||||||
|
def delete(self, name):
|
||||||
|
"""Deletes the specified file from the storage system.
|
||||||
|
"""
|
||||||
|
if self.exists(name):
|
||||||
|
doc = self.document.objects.first()
|
||||||
|
field = getattr(doc, self.field)
|
||||||
|
self._get_doc_with_name(name).delete() # Delete the FileField
|
||||||
|
field.delete() # Delete the FileDocument
|
||||||
|
|
||||||
|
def exists(self, name):
|
||||||
|
"""Returns True if a file referened by the given name already exists in the
|
||||||
|
storage system, or False if the name is available for a new file.
|
||||||
|
"""
|
||||||
|
doc = self._get_doc_with_name(name)
|
||||||
|
if doc:
|
||||||
|
field = getattr(doc, self.field)
|
||||||
|
return bool(field.name)
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def listdir(self, path=None):
|
||||||
|
"""Lists the contents of the specified path, returning a 2-tuple of lists;
|
||||||
|
the first item being directories, the second item being files.
|
||||||
|
"""
|
||||||
|
def name(doc):
|
||||||
|
return getattr(doc, self.field).name
|
||||||
|
docs = self.document.objects
|
||||||
|
return [], [name(d) for d in docs if name(d)]
|
||||||
|
|
||||||
|
def size(self, name):
|
||||||
|
"""Returns the total size, in bytes, of the file specified by name.
|
||||||
|
"""
|
||||||
|
doc = self._get_doc_with_name(name)
|
||||||
|
if doc:
|
||||||
|
return getattr(doc, self.field).length
|
||||||
|
else:
|
||||||
|
raise ValueError("No such file or directory: '%s'" % name)
|
||||||
|
|
||||||
|
def url(self, name):
|
||||||
|
"""Returns an absolute URL where the file's contents can be accessed
|
||||||
|
directly by a web browser.
|
||||||
|
"""
|
||||||
|
if self.base_url is None:
|
||||||
|
raise ValueError("This file is not accessible via a URL.")
|
||||||
|
return urlparse.urljoin(self.base_url, name).replace('\\', '/')
|
||||||
|
|
||||||
|
def _get_doc_with_name(self, name):
|
||||||
|
"""Find the documents in the store with the given name
|
||||||
|
"""
|
||||||
|
docs = self.document.objects
|
||||||
|
doc = [d for d in docs if getattr(d, self.field).name == name]
|
||||||
|
if doc:
|
||||||
|
return doc[0]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _open(self, name, mode='rb'):
|
||||||
|
doc = self._get_doc_with_name(name)
|
||||||
|
if doc:
|
||||||
|
return getattr(doc, self.field)
|
||||||
|
else:
|
||||||
|
raise ValueError("No file found with the name '%s'." % name)
|
||||||
|
|
||||||
|
def get_available_name(self, name):
|
||||||
|
"""Returns a filename that's free on the target storage system, and
|
||||||
|
available for new content to be written to.
|
||||||
|
"""
|
||||||
|
file_root, file_ext = os.path.splitext(name)
|
||||||
|
# If the filename already exists, add an underscore and a number (before
|
||||||
|
# the file extension, if one exists) to the filename until the generated
|
||||||
|
# filename doesn't exist.
|
||||||
|
count = itertools.count(1)
|
||||||
|
while self.exists(name):
|
||||||
|
# file_ext includes the dot.
|
||||||
|
name = os.path.join("%s_%s%s" % (file_root, count.next(), file_ext))
|
||||||
|
|
||||||
|
return name
|
||||||
|
|
||||||
|
def _save(self, name, content):
|
||||||
|
doc = self.document()
|
||||||
|
getattr(doc, self.field).put(content, filename=name)
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
return name
|
||||||
39
mongoengine/django/tests.py
Normal file
39
mongoengine/django/tests.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
#coding: utf-8
|
||||||
|
from nose.plugins.skip import SkipTest
|
||||||
|
|
||||||
|
from mongoengine.python_support import PY3
|
||||||
|
from mongoengine import connect
|
||||||
|
|
||||||
|
try:
|
||||||
|
from django.test import TestCase
|
||||||
|
from django.conf import settings
|
||||||
|
except Exception as err:
|
||||||
|
if PY3:
|
||||||
|
from unittest import TestCase
|
||||||
|
# Dummy value so no error
|
||||||
|
class settings:
|
||||||
|
MONGO_DATABASE_NAME = 'dummy'
|
||||||
|
else:
|
||||||
|
raise err
|
||||||
|
|
||||||
|
|
||||||
|
class MongoTestCase(TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('django does not have Python 3 support')
|
||||||
|
|
||||||
|
"""
|
||||||
|
TestCase class that clear the collection between the tests
|
||||||
|
"""
|
||||||
|
db_name = 'test_%s' % settings.MONGO_DATABASE_NAME
|
||||||
|
def __init__(self, methodName='runtest'):
|
||||||
|
self.db = connect(self.db_name).get_db()
|
||||||
|
super(MongoTestCase, self).__init__(methodName)
|
||||||
|
|
||||||
|
def _post_teardown(self):
|
||||||
|
super(MongoTestCase, self)._post_teardown()
|
||||||
|
for collection in self.db.collection_names():
|
||||||
|
if collection == 'system.indexes':
|
||||||
|
continue
|
||||||
|
self.db.drop_collection(collection)
|
||||||
6
mongoengine/django/utils.py
Normal file
6
mongoengine/django/utils.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
try:
|
||||||
|
# django >= 1.4
|
||||||
|
from django.utils.timezone import now as datetime_now
|
||||||
|
except ImportError:
|
||||||
|
from datetime import datetime
|
||||||
|
datetime_now = datetime.now
|
||||||
@@ -1,9 +1,24 @@
|
|||||||
from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument,
|
import warnings
|
||||||
ValidationError)
|
|
||||||
from connection import _get_db
|
import pymongo
|
||||||
|
import re
|
||||||
|
|
||||||
|
from bson.dbref import DBRef
|
||||||
|
from mongoengine import signals
|
||||||
|
from mongoengine.base import (DocumentMetaclass, TopLevelDocumentMetaclass,
|
||||||
|
BaseDocument, BaseDict, BaseList,
|
||||||
|
ALLOW_INHERITANCE, get_document)
|
||||||
|
from mongoengine.queryset import OperationError, NotUniqueError, QuerySet
|
||||||
|
from mongoengine.connection import get_db, DEFAULT_CONNECTION_NAME
|
||||||
|
from mongoengine.context_managers import switch_db, switch_collection
|
||||||
|
|
||||||
|
__all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument',
|
||||||
|
'DynamicEmbeddedDocument', 'OperationError',
|
||||||
|
'InvalidCollectionError', 'NotUniqueError', 'MapReduceDocument')
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['Document', 'EmbeddedDocument']
|
class InvalidCollectionError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class EmbeddedDocument(BaseDocument):
|
class EmbeddedDocument(BaseDocument):
|
||||||
@@ -11,10 +26,32 @@ class EmbeddedDocument(BaseDocument):
|
|||||||
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
|
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
|
||||||
fields on :class:`~mongoengine.Document`\ s through the
|
fields on :class:`~mongoengine.Document`\ s through the
|
||||||
:class:`~mongoengine.EmbeddedDocumentField` field type.
|
:class:`~mongoengine.EmbeddedDocumentField` field type.
|
||||||
|
|
||||||
|
A :class:`~mongoengine.EmbeddedDocument` subclass may be itself subclassed,
|
||||||
|
to create a specialised version of the embedded document that will be
|
||||||
|
stored in the same collection. To facilitate this behaviour a `_cls`
|
||||||
|
field is added to documents (hidden though the MongoEngine interface).
|
||||||
|
To disable this behaviour and remove the dependence on the presence of
|
||||||
|
`_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
|
||||||
|
dictionary.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||||
|
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||||
|
my_metaclass = DocumentMetaclass
|
||||||
__metaclass__ = DocumentMetaclass
|
__metaclass__ = DocumentMetaclass
|
||||||
|
|
||||||
|
_instance = None
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(EmbeddedDocument, self).__init__(*args, **kwargs)
|
||||||
|
self._changed_fields = []
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, self.__class__):
|
||||||
|
return self._data == other._data
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class Document(BaseDocument):
|
class Document(BaseDocument):
|
||||||
"""The base class used for defining the structure and properties of
|
"""The base class used for defining the structure and properties of
|
||||||
@@ -31,62 +68,580 @@ class Document(BaseDocument):
|
|||||||
|
|
||||||
A :class:`~mongoengine.Document` subclass may be itself subclassed, to
|
A :class:`~mongoengine.Document` subclass may be itself subclassed, to
|
||||||
create a specialised version of the document that will be stored in the
|
create a specialised version of the document that will be stored in the
|
||||||
same collection. To facilitate this behaviour, `_cls` and `_types`
|
same collection. To facilitate this behaviour a `_cls`
|
||||||
fields are added to documents (hidden though the MongoEngine interface
|
field is added to documents (hidden though the MongoEngine interface).
|
||||||
though). To disable this behaviour and remove the dependence on the
|
To disable this behaviour and remove the dependence on the presence of
|
||||||
presence of `_cls` and `_types`, set :attr:`allow_inheritance` to
|
`_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
|
||||||
``False`` in the :attr:`meta` dictionary.
|
dictionary.
|
||||||
|
|
||||||
A :class:`~mongoengine.Document` may use a **Capped Collection** by
|
A :class:`~mongoengine.Document` may use a **Capped Collection** by
|
||||||
specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta`
|
specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta`
|
||||||
dictionary. :attr:`max_documents` is the maximum number of documents that
|
dictionary. :attr:`max_documents` is the maximum number of documents that
|
||||||
is allowed to be stored in the collection, and :attr:`max_size` is the
|
is allowed to be stored in the collection, and :attr:`max_size` is the
|
||||||
maximum size of the collection in bytes. If :attr:`max_size` is not
|
maximum size of the collection in bytes. If :attr:`max_size` is not
|
||||||
specified and :attr:`max_documents` is, :attr:`max_size` defaults to
|
specified and :attr:`max_documents` is, :attr:`max_size` defaults to
|
||||||
10000000 bytes (10MB).
|
10000000 bytes (10MB).
|
||||||
|
|
||||||
|
Indexes may be created by specifying :attr:`indexes` in the :attr:`meta`
|
||||||
|
dictionary. The value should be a list of field names or tuples of field
|
||||||
|
names. Index direction may be specified by prefixing the field names with
|
||||||
|
a **+** or **-** sign.
|
||||||
|
|
||||||
|
Automatic index creation can be disabled by specifying
|
||||||
|
attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to
|
||||||
|
False then indexes will not be created by MongoEngine. This is useful in
|
||||||
|
production systems where index creation is performed as part of a
|
||||||
|
deployment system.
|
||||||
|
|
||||||
|
By default, _cls will be added to the start of every index (that
|
||||||
|
doesn't contain a list) if allow_inheritance is True. This can be
|
||||||
|
disabled by either setting cls to False on the specific index or
|
||||||
|
by setting index_cls to False on the meta dictionary for the document.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||||
|
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||||
|
my_metaclass = TopLevelDocumentMetaclass
|
||||||
__metaclass__ = TopLevelDocumentMetaclass
|
__metaclass__ = TopLevelDocumentMetaclass
|
||||||
|
|
||||||
def save(self):
|
def pk():
|
||||||
|
"""Primary key alias
|
||||||
|
"""
|
||||||
|
def fget(self):
|
||||||
|
return getattr(self, self._meta['id_field'])
|
||||||
|
|
||||||
|
def fset(self, value):
|
||||||
|
return setattr(self, self._meta['id_field'], value)
|
||||||
|
return property(fget, fset)
|
||||||
|
pk = pk()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_db(cls):
|
||||||
|
"""Some Model using other db_alias"""
|
||||||
|
return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_collection(cls):
|
||||||
|
"""Returns the collection for the document."""
|
||||||
|
if not hasattr(cls, '_collection') or cls._collection is None:
|
||||||
|
db = cls._get_db()
|
||||||
|
collection_name = cls._get_collection_name()
|
||||||
|
# Create collection as a capped collection if specified
|
||||||
|
if cls._meta['max_size'] or cls._meta['max_documents']:
|
||||||
|
# Get max document limit and max byte size from meta
|
||||||
|
max_size = cls._meta['max_size'] or 10000000 # 10MB default
|
||||||
|
max_documents = cls._meta['max_documents']
|
||||||
|
|
||||||
|
if collection_name in db.collection_names():
|
||||||
|
cls._collection = db[collection_name]
|
||||||
|
# The collection already exists, check if its capped
|
||||||
|
# options match the specified capped options
|
||||||
|
options = cls._collection.options()
|
||||||
|
if options.get('max') != max_documents or \
|
||||||
|
options.get('size') != max_size:
|
||||||
|
msg = (('Cannot create collection "%s" as a capped '
|
||||||
|
'collection as it already exists')
|
||||||
|
% cls._collection)
|
||||||
|
raise InvalidCollectionError(msg)
|
||||||
|
else:
|
||||||
|
# Create the collection as a capped collection
|
||||||
|
opts = {'capped': True, 'size': max_size}
|
||||||
|
if max_documents:
|
||||||
|
opts['max'] = max_documents
|
||||||
|
cls._collection = db.create_collection(
|
||||||
|
collection_name, **opts
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
cls._collection = db[collection_name]
|
||||||
|
if cls._meta.get('auto_create_index', True):
|
||||||
|
cls.ensure_indexes()
|
||||||
|
return cls._collection
|
||||||
|
|
||||||
|
def save(self, force_insert=False, validate=True, clean=True,
|
||||||
|
write_concern=None, cascade=None, cascade_kwargs=None,
|
||||||
|
_refs=None, **kwargs):
|
||||||
"""Save the :class:`~mongoengine.Document` to the database. If the
|
"""Save the :class:`~mongoengine.Document` to the database. If the
|
||||||
document already exists, it will be updated, otherwise it will be
|
document already exists, it will be updated, otherwise it will be
|
||||||
created.
|
created.
|
||||||
"""
|
|
||||||
self.validate()
|
|
||||||
object_id = self.__class__.objects._collection.save(self.to_mongo())
|
|
||||||
self.id = self._fields['id'].to_python(object_id)
|
|
||||||
|
|
||||||
def delete(self):
|
:param force_insert: only try to create a new document, don't allow
|
||||||
|
updates of existing documents
|
||||||
|
:param validate: validates the document; set to ``False`` to skip.
|
||||||
|
:param clean: call the document clean method, requires `validate` to be
|
||||||
|
True.
|
||||||
|
:param write_concern: Extra keyword arguments are passed down to
|
||||||
|
:meth:`~pymongo.collection.Collection.save` OR
|
||||||
|
:meth:`~pymongo.collection.Collection.insert`
|
||||||
|
which will be used as options for the resultant
|
||||||
|
``getLastError`` command. For example,
|
||||||
|
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
|
||||||
|
wait until at least two servers have recorded the write and
|
||||||
|
will force an fsync on the primary server.
|
||||||
|
:param cascade: Sets the flag for cascading saves. You can set a
|
||||||
|
default by setting "cascade" in the document __meta__
|
||||||
|
:param cascade_kwargs: optional kwargs dictionary to be passed throw
|
||||||
|
to cascading saves
|
||||||
|
:param _refs: A list of processed references used in cascading saves
|
||||||
|
|
||||||
|
.. versionchanged:: 0.5
|
||||||
|
In existing documents it only saves changed fields using
|
||||||
|
set / unset. Saves are cascaded and any
|
||||||
|
:class:`~bson.dbref.DBRef` objects that have changes are
|
||||||
|
saved as well.
|
||||||
|
.. versionchanged:: 0.6
|
||||||
|
Cascade saves are optional = defaults to True, if you want
|
||||||
|
fine grain control then you can turn off using document
|
||||||
|
meta['cascade'] = False Also you can pass different kwargs to
|
||||||
|
the cascade save using cascade_kwargs which overwrites the
|
||||||
|
existing kwargs with custom values
|
||||||
|
"""
|
||||||
|
signals.pre_save.send(self.__class__, document=self)
|
||||||
|
|
||||||
|
if validate:
|
||||||
|
self.validate(clean=clean)
|
||||||
|
|
||||||
|
if not write_concern:
|
||||||
|
write_concern = {}
|
||||||
|
|
||||||
|
doc = self.to_mongo()
|
||||||
|
|
||||||
|
created = ('_id' not in doc or self._created or force_insert)
|
||||||
|
|
||||||
|
try:
|
||||||
|
collection = self._get_collection()
|
||||||
|
if created:
|
||||||
|
if force_insert:
|
||||||
|
object_id = collection.insert(doc, **write_concern)
|
||||||
|
else:
|
||||||
|
object_id = collection.save(doc, **write_concern)
|
||||||
|
else:
|
||||||
|
object_id = doc['_id']
|
||||||
|
updates, removals = self._delta()
|
||||||
|
# Need to add shard key to query, or you get an error
|
||||||
|
select_dict = {'_id': object_id}
|
||||||
|
shard_key = self.__class__._meta.get('shard_key', tuple())
|
||||||
|
for k in shard_key:
|
||||||
|
actual_key = self._db_field_map.get(k, k)
|
||||||
|
select_dict[actual_key] = doc[actual_key]
|
||||||
|
|
||||||
|
def is_new_object(last_error):
|
||||||
|
if last_error is not None:
|
||||||
|
updated = last_error.get("updatedExisting")
|
||||||
|
if updated is not None:
|
||||||
|
return not updated
|
||||||
|
return created
|
||||||
|
|
||||||
|
update_query = {}
|
||||||
|
|
||||||
|
if updates:
|
||||||
|
update_query["$set"] = updates
|
||||||
|
if removals:
|
||||||
|
update_query["$unset"] = removals
|
||||||
|
if updates or removals:
|
||||||
|
last_error = collection.update(select_dict, update_query,
|
||||||
|
upsert=True, **write_concern)
|
||||||
|
created = is_new_object(last_error)
|
||||||
|
|
||||||
|
cascade = (self._meta.get('cascade', True)
|
||||||
|
if cascade is None else cascade)
|
||||||
|
if cascade:
|
||||||
|
kwargs = {
|
||||||
|
"force_insert": force_insert,
|
||||||
|
"validate": validate,
|
||||||
|
"write_concern": write_concern,
|
||||||
|
"cascade": cascade
|
||||||
|
}
|
||||||
|
if cascade_kwargs: # Allow granular control over cascades
|
||||||
|
kwargs.update(cascade_kwargs)
|
||||||
|
kwargs['_refs'] = _refs
|
||||||
|
self.cascade_save(**kwargs)
|
||||||
|
|
||||||
|
except pymongo.errors.OperationFailure, err:
|
||||||
|
message = 'Could not save document (%s)'
|
||||||
|
if re.match('^E1100[01] duplicate key', unicode(err)):
|
||||||
|
# E11000 - duplicate key error index
|
||||||
|
# E11001 - duplicate key on update
|
||||||
|
message = u'Tried to save duplicate unique keys (%s)'
|
||||||
|
raise NotUniqueError(message % unicode(err))
|
||||||
|
raise OperationError(message % unicode(err))
|
||||||
|
id_field = self._meta['id_field']
|
||||||
|
if id_field not in self._meta.get('shard_key', []):
|
||||||
|
self[id_field] = self._fields[id_field].to_python(object_id)
|
||||||
|
|
||||||
|
self._clear_changed_fields()
|
||||||
|
self._created = False
|
||||||
|
signals.post_save.send(self.__class__, document=self, created=created)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def cascade_save(self, *args, **kwargs):
|
||||||
|
"""Recursively saves any references /
|
||||||
|
generic references on an objects"""
|
||||||
|
import fields
|
||||||
|
_refs = kwargs.get('_refs', []) or []
|
||||||
|
|
||||||
|
for name, cls in self._fields.items():
|
||||||
|
if not isinstance(cls, (fields.ReferenceField,
|
||||||
|
fields.GenericReferenceField)):
|
||||||
|
continue
|
||||||
|
|
||||||
|
ref = getattr(self, name)
|
||||||
|
if not ref or isinstance(ref, DBRef):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not getattr(ref, '_changed_fields', True):
|
||||||
|
continue
|
||||||
|
|
||||||
|
ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
|
||||||
|
if ref and ref_id not in _refs:
|
||||||
|
_refs.append(ref_id)
|
||||||
|
kwargs["_refs"] = _refs
|
||||||
|
ref.save(**kwargs)
|
||||||
|
ref._changed_fields = []
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _qs(self):
|
||||||
|
"""
|
||||||
|
Returns the queryset to use for updating / reloading / deletions
|
||||||
|
"""
|
||||||
|
if not hasattr(self, '__objects'):
|
||||||
|
self.__objects = QuerySet(self, self._get_collection())
|
||||||
|
return self.__objects
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _object_key(self):
|
||||||
|
"""Dict to identify object in collection
|
||||||
|
"""
|
||||||
|
select_dict = {'pk': self.pk}
|
||||||
|
shard_key = self.__class__._meta.get('shard_key', tuple())
|
||||||
|
for k in shard_key:
|
||||||
|
select_dict[k] = getattr(self, k)
|
||||||
|
return select_dict
|
||||||
|
|
||||||
|
def update(self, **kwargs):
|
||||||
|
"""Performs an update on the :class:`~mongoengine.Document`
|
||||||
|
A convenience wrapper to :meth:`~mongoengine.QuerySet.update`.
|
||||||
|
|
||||||
|
Raises :class:`OperationError` if called on an object that has not yet
|
||||||
|
been saved.
|
||||||
|
"""
|
||||||
|
if not self.pk:
|
||||||
|
raise OperationError('attempt to update a document not yet saved')
|
||||||
|
|
||||||
|
# Need to add shard key to query, or you get an error
|
||||||
|
return self._qs.filter(**self._object_key).update_one(**kwargs)
|
||||||
|
|
||||||
|
def delete(self, **write_concern):
|
||||||
"""Delete the :class:`~mongoengine.Document` from the database. This
|
"""Delete the :class:`~mongoengine.Document` from the database. This
|
||||||
will only take effect if the document has been previously saved.
|
will only take effect if the document has been previously saved.
|
||||||
"""
|
|
||||||
object_id = self._fields['id'].to_mongo(self.id)
|
|
||||||
self.__class__.objects(id=object_id).delete()
|
|
||||||
|
|
||||||
def validate(self):
|
:param write_concern: Extra keyword arguments are passed down which
|
||||||
"""Ensure that all fields' values are valid and that required fields
|
will be used as options for the resultant
|
||||||
are present.
|
``getLastError`` command. For example,
|
||||||
|
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
|
||||||
|
wait until at least two servers have recorded the write and
|
||||||
|
will force an fsync on the primary server.
|
||||||
"""
|
"""
|
||||||
# Get a list of tuples of field names and their current values
|
signals.pre_delete.send(self.__class__, document=self)
|
||||||
fields = [(field, getattr(self, name))
|
|
||||||
for name, field in self._fields.items()]
|
|
||||||
|
|
||||||
# Ensure that each field is matched to a valid value
|
try:
|
||||||
for field, value in fields:
|
self._qs.filter(**self._object_key).delete(write_concern=write_concern)
|
||||||
if value is not None:
|
except pymongo.errors.OperationFailure, err:
|
||||||
try:
|
message = u'Could not delete document (%s)' % err.message
|
||||||
field.validate(value)
|
raise OperationError(message)
|
||||||
except (ValueError, AttributeError, AssertionError), e:
|
|
||||||
raise ValidationError('Invalid value for field of type "' +
|
signals.post_delete.send(self.__class__, document=self)
|
||||||
field.__class__.__name__ + '"')
|
|
||||||
elif field.required:
|
def switch_db(self, db_alias):
|
||||||
raise ValidationError('Field "%s" is required' % field.name)
|
"""
|
||||||
|
Temporarily switch the database for a document instance.
|
||||||
|
|
||||||
|
Only really useful for archiving off data and calling `save()`::
|
||||||
|
|
||||||
|
user = User.objects.get(id=user_id)
|
||||||
|
user.switch_db('archive-db')
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
If you need to read from another database see
|
||||||
|
:class:`~mongoengine.context_managers.switch_db`
|
||||||
|
|
||||||
|
:param db_alias: The database alias to use for saving the document
|
||||||
|
"""
|
||||||
|
with switch_db(self.__class__, db_alias) as cls:
|
||||||
|
collection = cls._get_collection()
|
||||||
|
db = cls._get_db
|
||||||
|
self._get_collection = lambda: collection
|
||||||
|
self._get_db = lambda: db
|
||||||
|
self._collection = collection
|
||||||
|
self._created = True
|
||||||
|
self.__objects = self._qs
|
||||||
|
self.__objects._collection_obj = collection
|
||||||
|
return self
|
||||||
|
|
||||||
|
def switch_collection(self, collection_name):
|
||||||
|
"""
|
||||||
|
Temporarily switch the collection for a document instance.
|
||||||
|
|
||||||
|
Only really useful for archiving off data and calling `save()`::
|
||||||
|
|
||||||
|
user = User.objects.get(id=user_id)
|
||||||
|
user.switch_collection('old-users')
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
If you need to read from another database see
|
||||||
|
:class:`~mongoengine.context_managers.switch_db`
|
||||||
|
|
||||||
|
:param collection_name: The database alias to use for saving the
|
||||||
|
document
|
||||||
|
"""
|
||||||
|
with switch_collection(self.__class__, collection_name) as cls:
|
||||||
|
collection = cls._get_collection()
|
||||||
|
self._get_collection = lambda: collection
|
||||||
|
self._collection = collection
|
||||||
|
self._created = True
|
||||||
|
self.__objects = self._qs
|
||||||
|
self.__objects._collection_obj = collection
|
||||||
|
return self
|
||||||
|
|
||||||
|
def select_related(self, max_depth=1):
|
||||||
|
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects to
|
||||||
|
a maximum depth in order to cut down the number queries to mongodb.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
"""
|
||||||
|
import dereference
|
||||||
|
self._data = dereference.DeReference()(self._data, max_depth)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def reload(self, max_depth=1):
|
||||||
|
"""Reloads all attributes from the database.
|
||||||
|
|
||||||
|
.. versionadded:: 0.1.2
|
||||||
|
.. versionchanged:: 0.6 Now chainable
|
||||||
|
"""
|
||||||
|
id_field = self._meta['id_field']
|
||||||
|
obj = self._qs.filter(**{id_field: self[id_field]}
|
||||||
|
).limit(1).select_related(max_depth=max_depth)
|
||||||
|
if obj:
|
||||||
|
obj = obj[0]
|
||||||
|
else:
|
||||||
|
msg = "Reloaded document has been deleted"
|
||||||
|
raise OperationError(msg)
|
||||||
|
for field in self._fields:
|
||||||
|
setattr(self, field, self._reload(field, obj[field]))
|
||||||
|
if self._dynamic:
|
||||||
|
for name in self._dynamic_fields.keys():
|
||||||
|
setattr(self, name, self._reload(name, obj._data[name]))
|
||||||
|
self._changed_fields = obj._changed_fields
|
||||||
|
self._created = False
|
||||||
|
return obj
|
||||||
|
|
||||||
|
def _reload(self, key, value):
|
||||||
|
"""Used by :meth:`~mongoengine.Document.reload` to ensure the
|
||||||
|
correct instance is linked to self.
|
||||||
|
"""
|
||||||
|
if isinstance(value, BaseDict):
|
||||||
|
value = [(k, self._reload(k, v)) for k, v in value.items()]
|
||||||
|
value = BaseDict(value, self, key)
|
||||||
|
elif isinstance(value, BaseList):
|
||||||
|
value = [self._reload(key, v) for v in value]
|
||||||
|
value = BaseList(value, self, key)
|
||||||
|
elif isinstance(value, (EmbeddedDocument, DynamicEmbeddedDocument)):
|
||||||
|
value._changed_fields = []
|
||||||
|
return value
|
||||||
|
|
||||||
|
def to_dbref(self):
|
||||||
|
"""Returns an instance of :class:`~bson.dbref.DBRef` useful in
|
||||||
|
`__raw__` queries."""
|
||||||
|
if not self.pk:
|
||||||
|
msg = "Only saved documents can have a valid dbref"
|
||||||
|
raise OperationError(msg)
|
||||||
|
return DBRef(self.__class__._get_collection_name(), self.pk)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def register_delete_rule(cls, document_cls, field_name, rule):
|
||||||
|
"""This method registers the delete rules to apply when removing this
|
||||||
|
object.
|
||||||
|
"""
|
||||||
|
classes = [get_document(class_name)
|
||||||
|
for class_name in cls._subclasses
|
||||||
|
if class_name != cls.__name__] + [cls]
|
||||||
|
documents = [get_document(class_name)
|
||||||
|
for class_name in document_cls._subclasses
|
||||||
|
if class_name != document_cls.__name__] + [document_cls]
|
||||||
|
|
||||||
|
for cls in classes:
|
||||||
|
for document_cls in documents:
|
||||||
|
delete_rules = cls._meta.get('delete_rules') or {}
|
||||||
|
delete_rules[(document_cls, field_name)] = rule
|
||||||
|
cls._meta['delete_rules'] = delete_rules
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def drop_collection(cls):
|
def drop_collection(cls):
|
||||||
"""Drops the entire collection associated with this
|
"""Drops the entire collection associated with this
|
||||||
:class:`~mongoengine.Document` type from the database.
|
:class:`~mongoengine.Document` type from the database.
|
||||||
"""
|
"""
|
||||||
db = _get_db()
|
cls._collection = None
|
||||||
db.drop_collection(cls._meta['collection'])
|
db = cls._get_db()
|
||||||
|
db.drop_collection(cls._get_collection_name())
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def ensure_index(cls, key_or_list, drop_dups=False, background=False,
|
||||||
|
**kwargs):
|
||||||
|
"""Ensure that the given indexes are in place.
|
||||||
|
|
||||||
|
:param key_or_list: a single index key or a list of index keys (to
|
||||||
|
construct a multi-field index); keys may be prefixed with a **+**
|
||||||
|
or a **-** to determine the index ordering
|
||||||
|
"""
|
||||||
|
index_spec = cls._build_index_spec(key_or_list)
|
||||||
|
index_spec = index_spec.copy()
|
||||||
|
fields = index_spec.pop('fields')
|
||||||
|
index_spec['drop_dups'] = drop_dups
|
||||||
|
index_spec['background'] = background
|
||||||
|
index_spec.update(kwargs)
|
||||||
|
|
||||||
|
return cls._get_collection().ensure_index(fields, **index_spec)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def ensure_indexes(cls):
|
||||||
|
"""Checks the document meta data and ensures all the indexes exist.
|
||||||
|
|
||||||
|
.. note:: You can disable automatic index creation by setting
|
||||||
|
`auto_create_index` to False in the documents meta data
|
||||||
|
"""
|
||||||
|
background = cls._meta.get('index_background', False)
|
||||||
|
drop_dups = cls._meta.get('index_drop_dups', False)
|
||||||
|
index_opts = cls._meta.get('index_opts') or {}
|
||||||
|
index_cls = cls._meta.get('index_cls', True)
|
||||||
|
|
||||||
|
collection = cls._get_collection()
|
||||||
|
|
||||||
|
# determine if an index which we are creating includes
|
||||||
|
# _cls as its first field; if so, we can avoid creating
|
||||||
|
# an extra index on _cls, as mongodb will use the existing
|
||||||
|
# index to service queries against _cls
|
||||||
|
cls_indexed = False
|
||||||
|
def includes_cls(fields):
|
||||||
|
first_field = None
|
||||||
|
if len(fields):
|
||||||
|
if isinstance(fields[0], basestring):
|
||||||
|
first_field = fields[0]
|
||||||
|
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
|
||||||
|
first_field = fields[0][0]
|
||||||
|
return first_field == '_cls'
|
||||||
|
|
||||||
|
# Ensure document-defined indexes are created
|
||||||
|
if cls._meta['index_specs']:
|
||||||
|
index_spec = cls._meta['index_specs']
|
||||||
|
for spec in index_spec:
|
||||||
|
spec = spec.copy()
|
||||||
|
fields = spec.pop('fields')
|
||||||
|
cls_indexed = cls_indexed or includes_cls(fields)
|
||||||
|
opts = index_opts.copy()
|
||||||
|
opts.update(spec)
|
||||||
|
collection.ensure_index(fields, background=background,
|
||||||
|
drop_dups=drop_dups, **opts)
|
||||||
|
|
||||||
|
# If _cls is being used (for polymorphism), it needs an index,
|
||||||
|
# only if another index doesn't begin with _cls
|
||||||
|
if (index_cls and not cls_indexed and
|
||||||
|
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
|
||||||
|
collection.ensure_index('_cls', background=background,
|
||||||
|
**index_opts)
|
||||||
|
|
||||||
|
|
||||||
|
class DynamicDocument(Document):
|
||||||
|
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
|
||||||
|
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
|
||||||
|
way as an ordinary document but has expando style properties. Any data
|
||||||
|
passed or set against the :class:`~mongoengine.DynamicDocument` that is
|
||||||
|
not a field is automatically converted into a
|
||||||
|
:class:`~mongoengine.fields.DynamicField` and data can be attributed to that
|
||||||
|
field.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
||||||
|
"""
|
||||||
|
|
||||||
|
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||||
|
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||||
|
my_metaclass = TopLevelDocumentMetaclass
|
||||||
|
__metaclass__ = TopLevelDocumentMetaclass
|
||||||
|
|
||||||
|
_dynamic = True
|
||||||
|
|
||||||
|
def __delattr__(self, *args, **kwargs):
|
||||||
|
"""Deletes the attribute by setting to None and allowing _delta to unset
|
||||||
|
it"""
|
||||||
|
field_name = args[0]
|
||||||
|
if field_name in self._dynamic_fields:
|
||||||
|
setattr(self, field_name, None)
|
||||||
|
else:
|
||||||
|
super(DynamicDocument, self).__delattr__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class DynamicEmbeddedDocument(EmbeddedDocument):
|
||||||
|
"""A Dynamic Embedded Document class allowing flexible, expandable and
|
||||||
|
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
|
||||||
|
information about dynamic documents.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||||
|
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||||
|
my_metaclass = DocumentMetaclass
|
||||||
|
__metaclass__ = DocumentMetaclass
|
||||||
|
|
||||||
|
_dynamic = True
|
||||||
|
|
||||||
|
def __delattr__(self, *args, **kwargs):
|
||||||
|
"""Deletes the attribute by setting to None and allowing _delta to unset
|
||||||
|
it"""
|
||||||
|
field_name = args[0]
|
||||||
|
if field_name in self._fields:
|
||||||
|
default = self._fields[field_name].default
|
||||||
|
if callable(default):
|
||||||
|
default = default()
|
||||||
|
setattr(self, field_name, default)
|
||||||
|
else:
|
||||||
|
setattr(self, field_name, None)
|
||||||
|
|
||||||
|
|
||||||
|
class MapReduceDocument(object):
|
||||||
|
"""A document returned from a map/reduce query.
|
||||||
|
|
||||||
|
:param collection: An instance of :class:`~pymongo.Collection`
|
||||||
|
:param key: Document/result key, often an instance of
|
||||||
|
:class:`~bson.objectid.ObjectId`. If supplied as
|
||||||
|
an ``ObjectId`` found in the given ``collection``,
|
||||||
|
the object can be accessed via the ``object`` property.
|
||||||
|
:param value: The result(s) for this key.
|
||||||
|
|
||||||
|
.. versionadded:: 0.3
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, document, collection, key, value):
|
||||||
|
self._document = document
|
||||||
|
self._collection = collection
|
||||||
|
self.key = key
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def object(self):
|
||||||
|
"""Lazy-load the object referenced by ``self.key``. ``self.key``
|
||||||
|
should be the ``primary_key``.
|
||||||
|
"""
|
||||||
|
id_field = self._document()._meta['id_field']
|
||||||
|
id_field_type = type(id_field)
|
||||||
|
|
||||||
|
if not isinstance(self.key, id_field_type):
|
||||||
|
try:
|
||||||
|
self.key = id_field_type(self.key)
|
||||||
|
except:
|
||||||
|
raise Exception("Could not cast key as %s" % \
|
||||||
|
id_field_type.__name__)
|
||||||
|
|
||||||
|
if not hasattr(self, "_key_object"):
|
||||||
|
self._key_object = self._document.objects.with_id(self.key)
|
||||||
|
return self._key_object
|
||||||
|
return self._key_object
|
||||||
|
|||||||
126
mongoengine/errors.py
Normal file
126
mongoengine/errors.py
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from mongoengine.python_support import txt_type
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError',
|
||||||
|
'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError',
|
||||||
|
'OperationError', 'NotUniqueError', 'ValidationError')
|
||||||
|
|
||||||
|
|
||||||
|
class NotRegistered(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidDocumentError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class LookUpError(AttributeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DoesNotExist(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class MultipleObjectsReturned(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidQueryError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class OperationError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NotUniqueError(OperationError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ValidationError(AssertionError):
|
||||||
|
"""Validation exception.
|
||||||
|
|
||||||
|
May represent an error validating a field or a
|
||||||
|
document containing fields with validation errors.
|
||||||
|
|
||||||
|
:ivar errors: A dictionary of errors for fields within this
|
||||||
|
document or list, or None if the error is for an
|
||||||
|
individual field.
|
||||||
|
"""
|
||||||
|
|
||||||
|
errors = {}
|
||||||
|
field_name = None
|
||||||
|
_message = None
|
||||||
|
|
||||||
|
def __init__(self, message="", **kwargs):
|
||||||
|
self.errors = kwargs.get('errors', {})
|
||||||
|
self.field_name = kwargs.get('field_name')
|
||||||
|
self.message = message
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return txt_type(self.message)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '%s(%s,)' % (self.__class__.__name__, self.message)
|
||||||
|
|
||||||
|
def __getattribute__(self, name):
|
||||||
|
message = super(ValidationError, self).__getattribute__(name)
|
||||||
|
if name == 'message':
|
||||||
|
if self.field_name:
|
||||||
|
message = '%s' % message
|
||||||
|
if self.errors:
|
||||||
|
message = '%s(%s)' % (message, self._format_errors())
|
||||||
|
return message
|
||||||
|
|
||||||
|
def _get_message(self):
|
||||||
|
return self._message
|
||||||
|
|
||||||
|
def _set_message(self, message):
|
||||||
|
self._message = message
|
||||||
|
|
||||||
|
message = property(_get_message, _set_message)
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
"""Returns a dictionary of all errors within a document
|
||||||
|
|
||||||
|
Keys are field names or list indices and values are the
|
||||||
|
validation error messages, or a nested dictionary of
|
||||||
|
errors for an embedded document or list.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def build_dict(source):
|
||||||
|
errors_dict = {}
|
||||||
|
if not source:
|
||||||
|
return errors_dict
|
||||||
|
if isinstance(source, dict):
|
||||||
|
for field_name, error in source.iteritems():
|
||||||
|
errors_dict[field_name] = build_dict(error)
|
||||||
|
elif isinstance(source, ValidationError) and source.errors:
|
||||||
|
return build_dict(source.errors)
|
||||||
|
else:
|
||||||
|
return unicode(source)
|
||||||
|
return errors_dict
|
||||||
|
if not self.errors:
|
||||||
|
return {}
|
||||||
|
return build_dict(self.errors)
|
||||||
|
|
||||||
|
def _format_errors(self):
|
||||||
|
"""Returns a string listing all errors within a document"""
|
||||||
|
|
||||||
|
def generate_key(value, prefix=''):
|
||||||
|
if isinstance(value, list):
|
||||||
|
value = ' '.join([generate_key(k) for k in value])
|
||||||
|
if isinstance(value, dict):
|
||||||
|
value = ' '.join(
|
||||||
|
[generate_key(v, k) for k, v in value.iteritems()])
|
||||||
|
|
||||||
|
results = "%s.%s" % (prefix, value) if prefix else value
|
||||||
|
return results
|
||||||
|
|
||||||
|
error_dict = defaultdict(list)
|
||||||
|
for k, v in self.to_dict().iteritems():
|
||||||
|
error_dict[generate_key(v)].append(k)
|
||||||
|
return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.iteritems()])
|
||||||
File diff suppressed because it is too large
Load Diff
61
mongoengine/python_support.py
Normal file
61
mongoengine/python_support.py
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
"""Helper functions and types to aid with Python 2.5 - 3 support."""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
PY3 = sys.version_info[0] == 3
|
||||||
|
PY25 = sys.version_info[:2] == (2, 5)
|
||||||
|
UNICODE_KWARGS = int(''.join([str(x) for x in sys.version_info[:3]])) > 264
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
import codecs
|
||||||
|
from io import BytesIO as StringIO
|
||||||
|
# return s converted to binary. b('test') should be equivalent to b'test'
|
||||||
|
def b(s):
|
||||||
|
return codecs.latin_1_encode(s)[0]
|
||||||
|
|
||||||
|
bin_type = bytes
|
||||||
|
txt_type = str
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
from cStringIO import StringIO
|
||||||
|
except ImportError:
|
||||||
|
from StringIO import StringIO
|
||||||
|
|
||||||
|
# Conversion to binary only necessary in Python 3
|
||||||
|
def b(s):
|
||||||
|
return s
|
||||||
|
|
||||||
|
bin_type = str
|
||||||
|
txt_type = unicode
|
||||||
|
|
||||||
|
str_types = (bin_type, txt_type)
|
||||||
|
|
||||||
|
if PY25:
|
||||||
|
def product(*args, **kwds):
|
||||||
|
pools = map(tuple, args) * kwds.get('repeat', 1)
|
||||||
|
result = [[]]
|
||||||
|
for pool in pools:
|
||||||
|
result = [x + [y] for x in result for y in pool]
|
||||||
|
for prod in result:
|
||||||
|
yield tuple(prod)
|
||||||
|
reduce = reduce
|
||||||
|
else:
|
||||||
|
from itertools import product
|
||||||
|
from functools import reduce
|
||||||
|
|
||||||
|
|
||||||
|
# For use with Python 2.5
|
||||||
|
# converts all keys from unicode to str for d and all nested dictionaries
|
||||||
|
def to_str_keys_recursive(d):
|
||||||
|
if isinstance(d, list):
|
||||||
|
for val in d:
|
||||||
|
if isinstance(val, (dict, list)):
|
||||||
|
to_str_keys_recursive(val)
|
||||||
|
elif isinstance(d, dict):
|
||||||
|
for key, val in d.items():
|
||||||
|
if isinstance(val, (dict, list)):
|
||||||
|
to_str_keys_recursive(val)
|
||||||
|
if isinstance(key, unicode):
|
||||||
|
d[str(key)] = d.pop(key)
|
||||||
|
else:
|
||||||
|
raise ValueError("non list/dict parameter not allowed")
|
||||||
@@ -1,343 +0,0 @@
|
|||||||
from connection import _get_db
|
|
||||||
|
|
||||||
import pymongo
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['queryset_manager', 'InvalidQueryError', 'InvalidCollectionError']
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidQueryError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class QuerySet(object):
|
|
||||||
"""A set of results returned from a query. Wraps a MongoDB cursor,
|
|
||||||
providing :class:`~mongoengine.Document` objects as the results.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, document, collection):
|
|
||||||
self._document = document
|
|
||||||
self._collection = collection
|
|
||||||
self._query = {}
|
|
||||||
# If inheritance is allowed, only return instances and instances of
|
|
||||||
# subclasses of the class being used
|
|
||||||
if document._meta.get('allow_inheritance'):
|
|
||||||
self._query = {'_types': self._document._class_name}
|
|
||||||
self._cursor_obj = None
|
|
||||||
|
|
||||||
def ensure_index(self, key_or_list, direction=None):
|
|
||||||
"""Ensure that the given indexes are in place.
|
|
||||||
"""
|
|
||||||
if isinstance(key_or_list, basestring):
|
|
||||||
# single-field indexes needn't specify a direction
|
|
||||||
if key_or_list.startswith("-"):
|
|
||||||
key_or_list = key_or_list[1:]
|
|
||||||
self._collection.ensure_index(key_or_list)
|
|
||||||
elif isinstance(key_or_list, (list, tuple)):
|
|
||||||
print key_or_list
|
|
||||||
self._collection.ensure_index(key_or_list)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __call__(self, **query):
|
|
||||||
"""Filter the selected documents by calling the
|
|
||||||
:class:`~mongoengine.QuerySet` with a query.
|
|
||||||
"""
|
|
||||||
query = QuerySet._transform_query(_doc_cls=self._document, **query)
|
|
||||||
self._query.update(query)
|
|
||||||
return self
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _cursor(self):
|
|
||||||
if not self._cursor_obj:
|
|
||||||
self._cursor_obj = self._collection.find(self._query)
|
|
||||||
return self._cursor_obj
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _translate_field_name(cls, document, parts):
|
|
||||||
"""Translate a field attribute name to a database field name.
|
|
||||||
"""
|
|
||||||
if not isinstance(parts, (list, tuple)):
|
|
||||||
parts = [parts]
|
|
||||||
field_names = []
|
|
||||||
field = None
|
|
||||||
for field_name in parts:
|
|
||||||
if field is None:
|
|
||||||
# Look up first field from the document
|
|
||||||
field = document._fields[field_name]
|
|
||||||
else:
|
|
||||||
# Look up subfield on the previous field
|
|
||||||
field = field.lookup_member(field_name)
|
|
||||||
if field is None:
|
|
||||||
raise InvalidQueryError('Cannot resolve field "%s"'
|
|
||||||
% field_name)
|
|
||||||
field_names.append(field.name)
|
|
||||||
return field_names
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _transform_query(cls, _doc_cls=None, **query):
|
|
||||||
"""Transform a query from Django-style format to Mongo format.
|
|
||||||
"""
|
|
||||||
operators = ['neq', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
|
|
||||||
'all', 'size', 'exists']
|
|
||||||
|
|
||||||
mongo_query = {}
|
|
||||||
for key, value in query.items():
|
|
||||||
parts = key.split('__')
|
|
||||||
# Check for an operator and transform to mongo-style if there is
|
|
||||||
op = None
|
|
||||||
if parts[-1] in operators:
|
|
||||||
op = parts.pop()
|
|
||||||
value = {'$' + op: value}
|
|
||||||
|
|
||||||
# Switch field names to proper names [set in Field(name='foo')]
|
|
||||||
if _doc_cls:
|
|
||||||
parts = QuerySet._translate_field_name(_doc_cls, parts)
|
|
||||||
|
|
||||||
key = '.'.join(parts)
|
|
||||||
if op is None or key not in mongo_query:
|
|
||||||
mongo_query[key] = value
|
|
||||||
elif key in mongo_query and isinstance(mongo_query[key], dict):
|
|
||||||
mongo_query[key].update(value)
|
|
||||||
|
|
||||||
return mongo_query
|
|
||||||
|
|
||||||
def first(self):
|
|
||||||
"""Retrieve the first object matching the query.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
result = self[0]
|
|
||||||
except IndexError:
|
|
||||||
result = None
|
|
||||||
return result
|
|
||||||
|
|
||||||
def with_id(self, object_id):
|
|
||||||
"""Retrieve the object matching the id provided.
|
|
||||||
"""
|
|
||||||
if not isinstance(object_id, pymongo.objectid.ObjectId):
|
|
||||||
object_id = pymongo.objectid.ObjectId(object_id)
|
|
||||||
|
|
||||||
result = self._collection.find_one(object_id)
|
|
||||||
if result is not None:
|
|
||||||
result = self._document._from_son(result)
|
|
||||||
return result
|
|
||||||
|
|
||||||
def next(self):
|
|
||||||
"""Wrap the result in a :class:`~mongoengine.Document` object.
|
|
||||||
"""
|
|
||||||
return self._document._from_son(self._cursor.next())
|
|
||||||
|
|
||||||
def count(self):
|
|
||||||
"""Count the selected elements in the query.
|
|
||||||
"""
|
|
||||||
return self._cursor.count()
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return self.count()
|
|
||||||
|
|
||||||
def limit(self, n):
|
|
||||||
"""Limit the number of returned documents to `n`. This may also be
|
|
||||||
achieved using array-slicing syntax (e.g. ``User.objects[:5]``).
|
|
||||||
"""
|
|
||||||
self._cursor.limit(n)
|
|
||||||
# Return self to allow chaining
|
|
||||||
return self
|
|
||||||
|
|
||||||
def skip(self, n):
|
|
||||||
"""Skip `n` documents before returning the results. This may also be
|
|
||||||
achieved using array-slicing syntax (e.g. ``User.objects[5:]``).
|
|
||||||
"""
|
|
||||||
self._cursor.skip(n)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
"""Support skip and limit using getitem and slicing syntax.
|
|
||||||
"""
|
|
||||||
# Slice provided
|
|
||||||
if isinstance(key, slice):
|
|
||||||
self._cursor_obj = self._cursor[key]
|
|
||||||
# Allow further QuerySet modifications to be performed
|
|
||||||
return self
|
|
||||||
# Integer index provided
|
|
||||||
elif isinstance(key, int):
|
|
||||||
return self._document._from_son(self._cursor[key])
|
|
||||||
|
|
||||||
def order_by(self, *keys):
|
|
||||||
"""Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The
|
|
||||||
order may be specified by prepending each of the keys by a + or a -.
|
|
||||||
Ascending order is assumed.
|
|
||||||
"""
|
|
||||||
key_list = []
|
|
||||||
for key in keys:
|
|
||||||
direction = pymongo.ASCENDING
|
|
||||||
if key[0] == '-':
|
|
||||||
direction = pymongo.DESCENDING
|
|
||||||
if key[0] in ('-', '+'):
|
|
||||||
key = key[1:]
|
|
||||||
key_list.append((key, direction))
|
|
||||||
|
|
||||||
self._cursor.sort(key_list)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def explain(self, format=False):
|
|
||||||
"""Return an explain plan record for the
|
|
||||||
:class:`~mongoengine.queryset.QuerySet`\ 's cursor.
|
|
||||||
"""
|
|
||||||
plan = self._cursor.explain()
|
|
||||||
if format:
|
|
||||||
import pprint
|
|
||||||
plan = pprint.pformat(plan)
|
|
||||||
return plan
|
|
||||||
|
|
||||||
def delete(self):
|
|
||||||
"""Delete the documents matched by the query.
|
|
||||||
"""
|
|
||||||
self._collection.remove(self._query)
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def exec_js(self, code, *fields, **options):
|
|
||||||
"""Execute a Javascript function on the server. A list of fields may be
|
|
||||||
provided, which will be translated to their correct names and supplied
|
|
||||||
as the arguments to the function. A few extra variables are added to
|
|
||||||
the function's scope: ``collection``, which is the name of the
|
|
||||||
collection in use; ``query``, which is an object representing the
|
|
||||||
current query; and ``options``, which is an object containing any
|
|
||||||
options specified as keyword arguments.
|
|
||||||
"""
|
|
||||||
fields = [QuerySet._translate_field_name(self._document, f)
|
|
||||||
for f in fields]
|
|
||||||
collection = self._document._meta['collection']
|
|
||||||
scope = {
|
|
||||||
'collection': collection,
|
|
||||||
'query': self._query,
|
|
||||||
'options': options or {},
|
|
||||||
}
|
|
||||||
code = pymongo.code.Code(code, scope=scope)
|
|
||||||
|
|
||||||
db = _get_db()
|
|
||||||
return db.eval(code, *fields)
|
|
||||||
|
|
||||||
def sum(self, field):
|
|
||||||
"""Sum over the values of the specified field.
|
|
||||||
"""
|
|
||||||
sum_func = """
|
|
||||||
function(sumField) {
|
|
||||||
var total = 0.0;
|
|
||||||
db[collection].find(query).forEach(function(doc) {
|
|
||||||
total += (doc[sumField] || 0.0);
|
|
||||||
});
|
|
||||||
return total;
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
return self.exec_js(sum_func, field)
|
|
||||||
|
|
||||||
def average(self, field):
|
|
||||||
"""Average over the values of the specified field.
|
|
||||||
"""
|
|
||||||
average_func = """
|
|
||||||
function(averageField) {
|
|
||||||
var total = 0.0;
|
|
||||||
var num = 0;
|
|
||||||
db[collection].find(query).forEach(function(doc) {
|
|
||||||
if (doc[averageField]) {
|
|
||||||
total += doc[averageField];
|
|
||||||
num += 1;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return total / num;
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
return self.exec_js(average_func, field)
|
|
||||||
|
|
||||||
def item_frequencies(self, list_field, normalize=False):
|
|
||||||
"""Returns a dictionary of all items present in a list field across
|
|
||||||
the whole queried set of documents, and their corresponding frequency.
|
|
||||||
This is useful for generating tag clouds, or searching documents.
|
|
||||||
"""
|
|
||||||
freq_func = """
|
|
||||||
function(listField) {
|
|
||||||
if (options.normalize) {
|
|
||||||
var total = 0.0;
|
|
||||||
db[collection].find(query).forEach(function(doc) {
|
|
||||||
total += doc[listField].length;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
var frequencies = {};
|
|
||||||
var inc = 1.0;
|
|
||||||
if (options.normalize) {
|
|
||||||
inc /= total;
|
|
||||||
}
|
|
||||||
db[collection].find(query).forEach(function(doc) {
|
|
||||||
doc[listField].forEach(function(item) {
|
|
||||||
frequencies[item] = inc + (frequencies[item] || 0);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
return frequencies;
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
return self.exec_js(freq_func, list_field, normalize=normalize)
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidCollectionError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class QuerySetManager(object):
|
|
||||||
|
|
||||||
def __init__(self, manager_func=None):
|
|
||||||
self._manager_func = manager_func
|
|
||||||
self._collection = None
|
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
|
||||||
"""Descriptor for instantiating a new QuerySet object when
|
|
||||||
Document.objects is accessed.
|
|
||||||
"""
|
|
||||||
if instance is not None:
|
|
||||||
# Document class being used rather than a document object
|
|
||||||
return self
|
|
||||||
|
|
||||||
if self._collection is None:
|
|
||||||
db = _get_db()
|
|
||||||
collection = owner._meta['collection']
|
|
||||||
|
|
||||||
# Create collection as a capped collection if specified
|
|
||||||
if owner._meta['max_size'] or owner._meta['max_documents']:
|
|
||||||
# Get max document limit and max byte size from meta
|
|
||||||
max_size = owner._meta['max_size'] or 10000000 # 10MB default
|
|
||||||
max_documents = owner._meta['max_documents']
|
|
||||||
|
|
||||||
if collection in db.collection_names():
|
|
||||||
self._collection = db[collection]
|
|
||||||
# The collection already exists, check if its capped
|
|
||||||
# options match the specified capped options
|
|
||||||
options = self._collection.options()
|
|
||||||
if options.get('max') != max_documents or \
|
|
||||||
options.get('size') != max_size:
|
|
||||||
msg = ('Cannot create collection "%s" as a capped '
|
|
||||||
'collection as it already exists') % collection
|
|
||||||
raise InvalidCollectionError(msg)
|
|
||||||
else:
|
|
||||||
# Create the collection as a capped collection
|
|
||||||
opts = {'capped': True, 'size': max_size}
|
|
||||||
if max_documents:
|
|
||||||
opts['max'] = max_documents
|
|
||||||
self._collection = db.create_collection(collection, opts)
|
|
||||||
else:
|
|
||||||
self._collection = db[collection]
|
|
||||||
|
|
||||||
# owner is the document that contains the QuerySetManager
|
|
||||||
queryset = QuerySet(owner, self._collection)
|
|
||||||
if self._manager_func:
|
|
||||||
queryset = self._manager_func(queryset)
|
|
||||||
return queryset
|
|
||||||
|
|
||||||
def queryset_manager(func):
|
|
||||||
"""Decorator that allows you to define custom QuerySet managers on
|
|
||||||
:class:`~mongoengine.Document` classes. The manager must be a function that
|
|
||||||
accepts a :class:`~mongoengine.queryset.QuerySet` as its only argument, and
|
|
||||||
returns a :class:`~mongoengine.queryset.QuerySet`, probably the same one
|
|
||||||
but modified in some way.
|
|
||||||
"""
|
|
||||||
return QuerySetManager(func)
|
|
||||||
11
mongoengine/queryset/__init__.py
Normal file
11
mongoengine/queryset/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
from mongoengine.errors import (DoesNotExist, MultipleObjectsReturned,
|
||||||
|
InvalidQueryError, OperationError,
|
||||||
|
NotUniqueError)
|
||||||
|
from mongoengine.queryset.field_list import *
|
||||||
|
from mongoengine.queryset.manager import *
|
||||||
|
from mongoengine.queryset.queryset import *
|
||||||
|
from mongoengine.queryset.transform import *
|
||||||
|
from mongoengine.queryset.visitor import *
|
||||||
|
|
||||||
|
__all__ = (field_list.__all__ + manager.__all__ + queryset.__all__ +
|
||||||
|
transform.__all__ + visitor.__all__)
|
||||||
85
mongoengine/queryset/field_list.py
Normal file
85
mongoengine/queryset/field_list.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
|
||||||
|
__all__ = ('QueryFieldList',)
|
||||||
|
|
||||||
|
|
||||||
|
class QueryFieldList(object):
|
||||||
|
"""Object that handles combinations of .only() and .exclude() calls"""
|
||||||
|
ONLY = 1
|
||||||
|
EXCLUDE = 0
|
||||||
|
|
||||||
|
def __init__(self, fields=None, value=ONLY, always_include=None, _only_called=False):
|
||||||
|
"""The QueryFieldList builder
|
||||||
|
|
||||||
|
:param fields: A list of fields used in `.only()` or `.exclude()`
|
||||||
|
:param value: How to handle the fields; either `ONLY` or `EXCLUDE`
|
||||||
|
:param always_include: Any fields to always_include eg `_cls`
|
||||||
|
:param _only_called: Has `.only()` been called? If so its a set of fields
|
||||||
|
otherwise it performs a union.
|
||||||
|
"""
|
||||||
|
self.value = value
|
||||||
|
self.fields = set(fields or [])
|
||||||
|
self.always_include = set(always_include or [])
|
||||||
|
self._id = None
|
||||||
|
self._only_called = _only_called
|
||||||
|
self.slice = {}
|
||||||
|
|
||||||
|
def __add__(self, f):
|
||||||
|
if isinstance(f.value, dict):
|
||||||
|
for field in f.fields:
|
||||||
|
self.slice[field] = f.value
|
||||||
|
if not self.fields:
|
||||||
|
self.fields = f.fields
|
||||||
|
elif not self.fields:
|
||||||
|
self.fields = f.fields
|
||||||
|
self.value = f.value
|
||||||
|
self.slice = {}
|
||||||
|
elif self.value is self.ONLY and f.value is self.ONLY:
|
||||||
|
self._clean_slice()
|
||||||
|
if self._only_called:
|
||||||
|
self.fields = self.fields.union(f.fields)
|
||||||
|
else:
|
||||||
|
self.fields = f.fields
|
||||||
|
elif self.value is self.EXCLUDE and f.value is self.EXCLUDE:
|
||||||
|
self.fields = self.fields.union(f.fields)
|
||||||
|
self._clean_slice()
|
||||||
|
elif self.value is self.ONLY and f.value is self.EXCLUDE:
|
||||||
|
self.fields -= f.fields
|
||||||
|
self._clean_slice()
|
||||||
|
elif self.value is self.EXCLUDE and f.value is self.ONLY:
|
||||||
|
self.value = self.ONLY
|
||||||
|
self.fields = f.fields - self.fields
|
||||||
|
self._clean_slice()
|
||||||
|
|
||||||
|
if '_id' in f.fields:
|
||||||
|
self._id = f.value
|
||||||
|
|
||||||
|
if self.always_include:
|
||||||
|
if self.value is self.ONLY and self.fields:
|
||||||
|
self.fields = self.fields.union(self.always_include)
|
||||||
|
else:
|
||||||
|
self.fields -= self.always_include
|
||||||
|
|
||||||
|
if getattr(f, '_only_called', False):
|
||||||
|
self._only_called = True
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __nonzero__(self):
|
||||||
|
return bool(self.fields)
|
||||||
|
|
||||||
|
def as_dict(self):
|
||||||
|
field_list = dict((field, self.value) for field in self.fields)
|
||||||
|
if self.slice:
|
||||||
|
field_list.update(self.slice)
|
||||||
|
if self._id is not None:
|
||||||
|
field_list['_id'] = self._id
|
||||||
|
return field_list
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
self.fields = set([])
|
||||||
|
self.slice = {}
|
||||||
|
self.value = self.ONLY
|
||||||
|
|
||||||
|
def _clean_slice(self):
|
||||||
|
if self.slice:
|
||||||
|
for field in set(self.slice.keys()) - self.fields:
|
||||||
|
del self.slice[field]
|
||||||
57
mongoengine/queryset/manager.py
Normal file
57
mongoengine/queryset/manager.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
from functools import partial
|
||||||
|
from mongoengine.queryset.queryset import QuerySet
|
||||||
|
|
||||||
|
__all__ = ('queryset_manager', 'QuerySetManager')
|
||||||
|
|
||||||
|
|
||||||
|
class QuerySetManager(object):
|
||||||
|
"""
|
||||||
|
The default QuerySet Manager.
|
||||||
|
|
||||||
|
Custom QuerySet Manager functions can extend this class and users can
|
||||||
|
add extra queryset functionality. Any custom manager methods must accept a
|
||||||
|
:class:`~mongoengine.Document` class as its first argument, and a
|
||||||
|
:class:`~mongoengine.queryset.QuerySet` as its second argument.
|
||||||
|
|
||||||
|
The method function should return a :class:`~mongoengine.queryset.QuerySet`
|
||||||
|
, probably the same one that was passed in, but modified in some way.
|
||||||
|
"""
|
||||||
|
|
||||||
|
get_queryset = None
|
||||||
|
default = QuerySet
|
||||||
|
|
||||||
|
def __init__(self, queryset_func=None):
|
||||||
|
if queryset_func:
|
||||||
|
self.get_queryset = queryset_func
|
||||||
|
|
||||||
|
def __get__(self, instance, owner):
|
||||||
|
"""Descriptor for instantiating a new QuerySet object when
|
||||||
|
Document.objects is accessed.
|
||||||
|
"""
|
||||||
|
if instance is not None:
|
||||||
|
# Document class being used rather than a document object
|
||||||
|
return self
|
||||||
|
|
||||||
|
# owner is the document that contains the QuerySetManager
|
||||||
|
queryset_class = owner._meta.get('queryset_class', self.default)
|
||||||
|
queryset = queryset_class(owner, owner._get_collection())
|
||||||
|
if self.get_queryset:
|
||||||
|
arg_count = self.get_queryset.func_code.co_argcount
|
||||||
|
if arg_count == 1:
|
||||||
|
queryset = self.get_queryset(queryset)
|
||||||
|
elif arg_count == 2:
|
||||||
|
queryset = self.get_queryset(owner, queryset)
|
||||||
|
else:
|
||||||
|
queryset = partial(self.get_queryset, owner, queryset)
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
|
def queryset_manager(func):
|
||||||
|
"""Decorator that allows you to define custom QuerySet managers on
|
||||||
|
:class:`~mongoengine.Document` classes. The manager must be a function that
|
||||||
|
accepts a :class:`~mongoengine.Document` class as its first argument, and a
|
||||||
|
:class:`~mongoengine.queryset.QuerySet` as its second argument. The method
|
||||||
|
function should return a :class:`~mongoengine.queryset.QuerySet`, probably
|
||||||
|
the same one that was passed in, but modified in some way.
|
||||||
|
"""
|
||||||
|
return QuerySetManager(func)
|
||||||
1494
mongoengine/queryset/queryset.py
Normal file
1494
mongoengine/queryset/queryset.py
Normal file
File diff suppressed because it is too large
Load Diff
315
mongoengine/queryset/transform.py
Normal file
315
mongoengine/queryset/transform.py
Normal file
@@ -0,0 +1,315 @@
|
|||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
import pymongo
|
||||||
|
from bson import SON
|
||||||
|
|
||||||
|
from mongoengine.common import _import_class
|
||||||
|
from mongoengine.errors import InvalidQueryError, LookUpError
|
||||||
|
|
||||||
|
__all__ = ('query', 'update')
|
||||||
|
|
||||||
|
|
||||||
|
COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
|
||||||
|
'all', 'size', 'exists', 'not')
|
||||||
|
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
|
||||||
|
'within_box', 'within_polygon', 'near', 'near_sphere',
|
||||||
|
'max_distance', 'geo_within', 'geo_within_box',
|
||||||
|
'geo_within_polygon', 'geo_within_center',
|
||||||
|
'geo_within_sphere', 'geo_intersects')
|
||||||
|
STRING_OPERATORS = ('contains', 'icontains', 'startswith',
|
||||||
|
'istartswith', 'endswith', 'iendswith',
|
||||||
|
'exact', 'iexact')
|
||||||
|
CUSTOM_OPERATORS = ('match',)
|
||||||
|
MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
|
||||||
|
STRING_OPERATORS + CUSTOM_OPERATORS)
|
||||||
|
|
||||||
|
UPDATE_OPERATORS = ('set', 'unset', 'inc', 'dec', 'pop', 'push',
|
||||||
|
'push_all', 'pull', 'pull_all', 'add_to_set',
|
||||||
|
'set_on_insert')
|
||||||
|
|
||||||
|
|
||||||
|
def query(_doc_cls=None, _field_operation=False, **query):
|
||||||
|
"""Transform a query from Django-style format to Mongo format.
|
||||||
|
"""
|
||||||
|
mongo_query = {}
|
||||||
|
merge_query = defaultdict(list)
|
||||||
|
for key, value in sorted(query.items()):
|
||||||
|
if key == "__raw__":
|
||||||
|
mongo_query.update(value)
|
||||||
|
continue
|
||||||
|
|
||||||
|
parts = key.split('__')
|
||||||
|
indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()]
|
||||||
|
parts = [part for part in parts if not part.isdigit()]
|
||||||
|
# Check for an operator and transform to mongo-style if there is
|
||||||
|
op = None
|
||||||
|
if parts[-1] in MATCH_OPERATORS:
|
||||||
|
op = parts.pop()
|
||||||
|
|
||||||
|
negate = False
|
||||||
|
if parts[-1] == 'not':
|
||||||
|
parts.pop()
|
||||||
|
negate = True
|
||||||
|
|
||||||
|
if _doc_cls:
|
||||||
|
# Switch field names to proper names [set in Field(name='foo')]
|
||||||
|
try:
|
||||||
|
fields = _doc_cls._lookup_field(parts)
|
||||||
|
except Exception, e:
|
||||||
|
raise InvalidQueryError(e)
|
||||||
|
parts = []
|
||||||
|
|
||||||
|
cleaned_fields = []
|
||||||
|
for field in fields:
|
||||||
|
append_field = True
|
||||||
|
if isinstance(field, basestring):
|
||||||
|
parts.append(field)
|
||||||
|
append_field = False
|
||||||
|
else:
|
||||||
|
parts.append(field.db_field)
|
||||||
|
if append_field:
|
||||||
|
cleaned_fields.append(field)
|
||||||
|
|
||||||
|
# Convert value to proper value
|
||||||
|
field = cleaned_fields[-1]
|
||||||
|
|
||||||
|
singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not']
|
||||||
|
singular_ops += STRING_OPERATORS
|
||||||
|
if op in singular_ops:
|
||||||
|
if isinstance(field, basestring):
|
||||||
|
if (op in STRING_OPERATORS and
|
||||||
|
isinstance(value, basestring)):
|
||||||
|
StringField = _import_class('StringField')
|
||||||
|
value = StringField.prepare_query_value(op, value)
|
||||||
|
else:
|
||||||
|
value = field
|
||||||
|
else:
|
||||||
|
value = field.prepare_query_value(op, value)
|
||||||
|
elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict):
|
||||||
|
# 'in', 'nin' and 'all' require a list of values
|
||||||
|
value = [field.prepare_query_value(op, v) for v in value]
|
||||||
|
|
||||||
|
# if op and op not in COMPARISON_OPERATORS:
|
||||||
|
if op:
|
||||||
|
if op in GEO_OPERATORS:
|
||||||
|
value = _geo_operator(field, op, value)
|
||||||
|
elif op in CUSTOM_OPERATORS:
|
||||||
|
if op == 'match':
|
||||||
|
value = {"$elemMatch": value}
|
||||||
|
else:
|
||||||
|
NotImplementedError("Custom method '%s' has not "
|
||||||
|
"been implemented" % op)
|
||||||
|
elif op not in STRING_OPERATORS:
|
||||||
|
value = {'$' + op: value}
|
||||||
|
|
||||||
|
if negate:
|
||||||
|
value = {'$not': value}
|
||||||
|
|
||||||
|
for i, part in indices:
|
||||||
|
parts.insert(i, part)
|
||||||
|
key = '.'.join(parts)
|
||||||
|
if op is None or key not in mongo_query:
|
||||||
|
mongo_query[key] = value
|
||||||
|
elif key in mongo_query:
|
||||||
|
if key in mongo_query and isinstance(mongo_query[key], dict):
|
||||||
|
mongo_query[key].update(value)
|
||||||
|
# $maxDistance needs to come last - convert to SON
|
||||||
|
if '$maxDistance' in mongo_query[key]:
|
||||||
|
value_dict = mongo_query[key]
|
||||||
|
value_son = SON()
|
||||||
|
for k, v in value_dict.iteritems():
|
||||||
|
if k == '$maxDistance':
|
||||||
|
continue
|
||||||
|
value_son[k] = v
|
||||||
|
value_son['$maxDistance'] = value_dict['$maxDistance']
|
||||||
|
mongo_query[key] = value_son
|
||||||
|
else:
|
||||||
|
# Store for manually merging later
|
||||||
|
merge_query[key].append(value)
|
||||||
|
|
||||||
|
# The queryset has been filter in such a way we must manually merge
|
||||||
|
for k, v in merge_query.items():
|
||||||
|
merge_query[k].append(mongo_query[k])
|
||||||
|
del mongo_query[k]
|
||||||
|
if isinstance(v, list):
|
||||||
|
value = [{k: val} for val in v]
|
||||||
|
if '$and' in mongo_query.keys():
|
||||||
|
mongo_query['$and'].append(value)
|
||||||
|
else:
|
||||||
|
mongo_query['$and'] = value
|
||||||
|
|
||||||
|
return mongo_query
|
||||||
|
|
||||||
|
|
||||||
|
def update(_doc_cls=None, **update):
|
||||||
|
"""Transform an update spec from Django-style format to Mongo format.
|
||||||
|
"""
|
||||||
|
mongo_update = {}
|
||||||
|
for key, value in update.items():
|
||||||
|
if key == "__raw__":
|
||||||
|
mongo_update.update(value)
|
||||||
|
continue
|
||||||
|
parts = key.split('__')
|
||||||
|
# Check for an operator and transform to mongo-style if there is
|
||||||
|
op = None
|
||||||
|
if parts[0] in UPDATE_OPERATORS:
|
||||||
|
op = parts.pop(0)
|
||||||
|
# Convert Pythonic names to Mongo equivalents
|
||||||
|
if op in ('push_all', 'pull_all'):
|
||||||
|
op = op.replace('_all', 'All')
|
||||||
|
elif op == 'dec':
|
||||||
|
# Support decrement by flipping a positive value's sign
|
||||||
|
# and using 'inc'
|
||||||
|
op = 'inc'
|
||||||
|
if value > 0:
|
||||||
|
value = -value
|
||||||
|
elif op == 'add_to_set':
|
||||||
|
op = 'addToSet'
|
||||||
|
elif op == 'set_on_insert':
|
||||||
|
op = "setOnInsert"
|
||||||
|
|
||||||
|
match = None
|
||||||
|
if parts[-1] in COMPARISON_OPERATORS:
|
||||||
|
match = parts.pop()
|
||||||
|
|
||||||
|
if _doc_cls:
|
||||||
|
# Switch field names to proper names [set in Field(name='foo')]
|
||||||
|
try:
|
||||||
|
fields = _doc_cls._lookup_field(parts)
|
||||||
|
except Exception, e:
|
||||||
|
raise InvalidQueryError(e)
|
||||||
|
parts = []
|
||||||
|
|
||||||
|
cleaned_fields = []
|
||||||
|
for field in fields:
|
||||||
|
append_field = True
|
||||||
|
if isinstance(field, basestring):
|
||||||
|
# Convert the S operator to $
|
||||||
|
if field == 'S':
|
||||||
|
field = '$'
|
||||||
|
parts.append(field)
|
||||||
|
append_field = False
|
||||||
|
else:
|
||||||
|
parts.append(field.db_field)
|
||||||
|
if append_field:
|
||||||
|
cleaned_fields.append(field)
|
||||||
|
|
||||||
|
# Convert value to proper value
|
||||||
|
field = cleaned_fields[-1]
|
||||||
|
|
||||||
|
if op in (None, 'set', 'push', 'pull'):
|
||||||
|
if field.required or value is not None:
|
||||||
|
value = field.prepare_query_value(op, value)
|
||||||
|
elif op in ('pushAll', 'pullAll'):
|
||||||
|
value = [field.prepare_query_value(op, v) for v in value]
|
||||||
|
elif op == 'addToSet':
|
||||||
|
if isinstance(value, (list, tuple, set)):
|
||||||
|
value = [field.prepare_query_value(op, v) for v in value]
|
||||||
|
elif field.required or value is not None:
|
||||||
|
value = field.prepare_query_value(op, value)
|
||||||
|
|
||||||
|
if match:
|
||||||
|
match = '$' + match
|
||||||
|
value = {match: value}
|
||||||
|
|
||||||
|
key = '.'.join(parts)
|
||||||
|
|
||||||
|
if not op:
|
||||||
|
raise InvalidQueryError("Updates must supply an operation "
|
||||||
|
"eg: set__FIELD=value")
|
||||||
|
|
||||||
|
if 'pull' in op and '.' in key:
|
||||||
|
# Dot operators don't work on pull operations
|
||||||
|
# it uses nested dict syntax
|
||||||
|
if op == 'pullAll':
|
||||||
|
raise InvalidQueryError("pullAll operations only support "
|
||||||
|
"a single field depth")
|
||||||
|
|
||||||
|
parts.reverse()
|
||||||
|
for key in parts:
|
||||||
|
value = {key: value}
|
||||||
|
elif op == 'addToSet' and isinstance(value, list):
|
||||||
|
value = {key: {"$each": value}}
|
||||||
|
else:
|
||||||
|
value = {key: value}
|
||||||
|
key = '$' + op
|
||||||
|
|
||||||
|
if key not in mongo_update:
|
||||||
|
mongo_update[key] = value
|
||||||
|
elif key in mongo_update and isinstance(mongo_update[key], dict):
|
||||||
|
mongo_update[key].update(value)
|
||||||
|
|
||||||
|
return mongo_update
|
||||||
|
|
||||||
|
|
||||||
|
def _geo_operator(field, op, value):
|
||||||
|
"""Helper to return the query for a given geo query"""
|
||||||
|
if field._geo_index == pymongo.GEO2D:
|
||||||
|
if op == "within_distance":
|
||||||
|
value = {'$within': {'$center': value}}
|
||||||
|
elif op == "within_spherical_distance":
|
||||||
|
value = {'$within': {'$centerSphere': value}}
|
||||||
|
elif op == "within_polygon":
|
||||||
|
value = {'$within': {'$polygon': value}}
|
||||||
|
elif op == "near":
|
||||||
|
value = {'$near': value}
|
||||||
|
elif op == "near_sphere":
|
||||||
|
value = {'$nearSphere': value}
|
||||||
|
elif op == 'within_box':
|
||||||
|
value = {'$within': {'$box': value}}
|
||||||
|
elif op == "max_distance":
|
||||||
|
value = {'$maxDistance': value}
|
||||||
|
else:
|
||||||
|
raise NotImplementedError("Geo method '%s' has not "
|
||||||
|
"been implemented for a GeoPointField" % op)
|
||||||
|
else:
|
||||||
|
if op == "geo_within":
|
||||||
|
value = {"$geoWithin": _infer_geometry(value)}
|
||||||
|
elif op == "geo_within_box":
|
||||||
|
value = {"$geoWithin": {"$box": value}}
|
||||||
|
elif op == "geo_within_polygon":
|
||||||
|
value = {"$geoWithin": {"$polygon": value}}
|
||||||
|
elif op == "geo_within_center":
|
||||||
|
value = {"$geoWithin": {"$center": value}}
|
||||||
|
elif op == "geo_within_sphere":
|
||||||
|
value = {"$geoWithin": {"$centerSphere": value}}
|
||||||
|
elif op == "geo_intersects":
|
||||||
|
value = {"$geoIntersects": _infer_geometry(value)}
|
||||||
|
elif op == "near":
|
||||||
|
value = {'$near': _infer_geometry(value)}
|
||||||
|
elif op == "max_distance":
|
||||||
|
value = {'$maxDistance': value}
|
||||||
|
else:
|
||||||
|
raise NotImplementedError("Geo method '%s' has not "
|
||||||
|
"been implemented for a %s " % (op, field._name))
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def _infer_geometry(value):
|
||||||
|
"""Helper method that tries to infer the $geometry shape for a given value"""
|
||||||
|
if isinstance(value, dict):
|
||||||
|
if "$geometry" in value:
|
||||||
|
return value
|
||||||
|
elif 'coordinates' in value and 'type' in value:
|
||||||
|
return {"$geometry": value}
|
||||||
|
raise InvalidQueryError("Invalid $geometry dictionary should have "
|
||||||
|
"type and coordinates keys")
|
||||||
|
elif isinstance(value, (list, set)):
|
||||||
|
try:
|
||||||
|
value[0][0][0]
|
||||||
|
return {"$geometry": {"type": "Polygon", "coordinates": value}}
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
value[0][0]
|
||||||
|
return {"$geometry": {"type": "LineString", "coordinates": value}}
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
value[0]
|
||||||
|
return {"$geometry": {"type": "Point", "coordinates": value}}
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
raise InvalidQueryError("Invalid $geometry data. Can be either a dictionary "
|
||||||
|
"or (nested) lists of coordinate(s)")
|
||||||
155
mongoengine/queryset/visitor.py
Normal file
155
mongoengine/queryset/visitor.py
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
import copy
|
||||||
|
|
||||||
|
from mongoengine.errors import InvalidQueryError
|
||||||
|
from mongoengine.python_support import product, reduce
|
||||||
|
|
||||||
|
from mongoengine.queryset import transform
|
||||||
|
|
||||||
|
__all__ = ('Q',)
|
||||||
|
|
||||||
|
|
||||||
|
class QNodeVisitor(object):
|
||||||
|
"""Base visitor class for visiting Q-object nodes in a query tree.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def visit_combination(self, combination):
|
||||||
|
"""Called by QCombination objects.
|
||||||
|
"""
|
||||||
|
return combination
|
||||||
|
|
||||||
|
def visit_query(self, query):
|
||||||
|
"""Called by (New)Q objects.
|
||||||
|
"""
|
||||||
|
return query
|
||||||
|
|
||||||
|
|
||||||
|
class SimplificationVisitor(QNodeVisitor):
|
||||||
|
"""Simplifies query trees by combinging unnecessary 'and' connection nodes
|
||||||
|
into a single Q-object.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def visit_combination(self, combination):
|
||||||
|
if combination.operation == combination.AND:
|
||||||
|
# The simplification only applies to 'simple' queries
|
||||||
|
if all(isinstance(node, Q) for node in combination.children):
|
||||||
|
queries = [n.query for n in combination.children]
|
||||||
|
return Q(**self._query_conjunction(queries))
|
||||||
|
return combination
|
||||||
|
|
||||||
|
def _query_conjunction(self, queries):
|
||||||
|
"""Merges query dicts - effectively &ing them together.
|
||||||
|
"""
|
||||||
|
query_ops = set()
|
||||||
|
combined_query = {}
|
||||||
|
for query in queries:
|
||||||
|
ops = set(query.keys())
|
||||||
|
# Make sure that the same operation isn't applied more than once
|
||||||
|
# to a single field
|
||||||
|
intersection = ops.intersection(query_ops)
|
||||||
|
if intersection:
|
||||||
|
msg = 'Duplicate query conditions: '
|
||||||
|
raise InvalidQueryError(msg + ', '.join(intersection))
|
||||||
|
|
||||||
|
query_ops.update(ops)
|
||||||
|
combined_query.update(copy.deepcopy(query))
|
||||||
|
return combined_query
|
||||||
|
|
||||||
|
|
||||||
|
class QueryCompilerVisitor(QNodeVisitor):
|
||||||
|
"""Compiles the nodes in a query tree to a PyMongo-compatible query
|
||||||
|
dictionary.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, document):
|
||||||
|
self.document = document
|
||||||
|
|
||||||
|
def visit_combination(self, combination):
|
||||||
|
operator = "$and"
|
||||||
|
if combination.operation == combination.OR:
|
||||||
|
operator = "$or"
|
||||||
|
return {operator: combination.children}
|
||||||
|
|
||||||
|
def visit_query(self, query):
|
||||||
|
return transform.query(self.document, **query.query)
|
||||||
|
|
||||||
|
|
||||||
|
class QNode(object):
|
||||||
|
"""Base class for nodes in query trees.
|
||||||
|
"""
|
||||||
|
|
||||||
|
AND = 0
|
||||||
|
OR = 1
|
||||||
|
|
||||||
|
def to_query(self, document):
|
||||||
|
query = self.accept(SimplificationVisitor())
|
||||||
|
query = query.accept(QueryCompilerVisitor(document))
|
||||||
|
return query
|
||||||
|
|
||||||
|
def accept(self, visitor):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def _combine(self, other, operation):
|
||||||
|
"""Combine this node with another node into a QCombination object.
|
||||||
|
"""
|
||||||
|
if getattr(other, 'empty', True):
|
||||||
|
return self
|
||||||
|
|
||||||
|
if self.empty:
|
||||||
|
return other
|
||||||
|
|
||||||
|
return QCombination(operation, [self, other])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def empty(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __or__(self, other):
|
||||||
|
return self._combine(other, self.OR)
|
||||||
|
|
||||||
|
def __and__(self, other):
|
||||||
|
return self._combine(other, self.AND)
|
||||||
|
|
||||||
|
|
||||||
|
class QCombination(QNode):
|
||||||
|
"""Represents the combination of several conditions by a given logical
|
||||||
|
operator.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, operation, children):
|
||||||
|
self.operation = operation
|
||||||
|
self.children = []
|
||||||
|
for node in children:
|
||||||
|
# If the child is a combination of the same type, we can merge its
|
||||||
|
# children directly into this combinations children
|
||||||
|
if isinstance(node, QCombination) and node.operation == operation:
|
||||||
|
# self.children += node.children
|
||||||
|
self.children.append(node)
|
||||||
|
else:
|
||||||
|
self.children.append(node)
|
||||||
|
|
||||||
|
def accept(self, visitor):
|
||||||
|
for i in range(len(self.children)):
|
||||||
|
if isinstance(self.children[i], QNode):
|
||||||
|
self.children[i] = self.children[i].accept(visitor)
|
||||||
|
|
||||||
|
return visitor.visit_combination(self)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def empty(self):
|
||||||
|
return not bool(self.children)
|
||||||
|
|
||||||
|
|
||||||
|
class Q(QNode):
|
||||||
|
"""A simple query object, used in a query tree to build up more complex
|
||||||
|
query structures.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, **query):
|
||||||
|
self.query = query
|
||||||
|
|
||||||
|
def accept(self, visitor):
|
||||||
|
return visitor.visit_query(self)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def empty(self):
|
||||||
|
return not bool(self.query)
|
||||||
46
mongoengine/signals.py
Normal file
46
mongoengine/signals.py
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
__all__ = ['pre_init', 'post_init', 'pre_save', 'post_save',
|
||||||
|
'pre_delete', 'post_delete']
|
||||||
|
|
||||||
|
signals_available = False
|
||||||
|
try:
|
||||||
|
from blinker import Namespace
|
||||||
|
signals_available = True
|
||||||
|
except ImportError:
|
||||||
|
class Namespace(object):
|
||||||
|
def signal(self, name, doc=None):
|
||||||
|
return _FakeSignal(name, doc)
|
||||||
|
|
||||||
|
class _FakeSignal(object):
|
||||||
|
"""If blinker is unavailable, create a fake class with the same
|
||||||
|
interface that allows sending of signals but will fail with an
|
||||||
|
error on anything else. Instead of doing anything on send, it
|
||||||
|
will just ignore the arguments and do nothing instead.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name, doc=None):
|
||||||
|
self.name = name
|
||||||
|
self.__doc__ = doc
|
||||||
|
|
||||||
|
def _fail(self, *args, **kwargs):
|
||||||
|
raise RuntimeError('signalling support is unavailable '
|
||||||
|
'because the blinker library is '
|
||||||
|
'not installed.')
|
||||||
|
send = lambda *a, **kw: None
|
||||||
|
connect = disconnect = has_receivers_for = receivers_for = \
|
||||||
|
temporarily_connected_to = _fail
|
||||||
|
del _fail
|
||||||
|
|
||||||
|
# the namespace for code signals. If you are not mongoengine code, do
|
||||||
|
# not put signals in here. Create your own namespace instead.
|
||||||
|
_signals = Namespace()
|
||||||
|
|
||||||
|
pre_init = _signals.signal('pre_init')
|
||||||
|
post_init = _signals.signal('post_init')
|
||||||
|
pre_save = _signals.signal('pre_save')
|
||||||
|
post_save = _signals.signal('post_save')
|
||||||
|
pre_delete = _signals.signal('pre_delete')
|
||||||
|
post_delete = _signals.signal('post_delete')
|
||||||
|
pre_bulk_insert = _signals.signal('pre_bulk_insert')
|
||||||
|
post_bulk_insert = _signals.signal('post_bulk_insert')
|
||||||
54
python-mongoengine.spec
Normal file
54
python-mongoengine.spec
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
# sitelib for noarch packages, sitearch for others (remove the unneeded one)
|
||||||
|
%{!?python_sitelib: %global python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")}
|
||||||
|
%{!?python_sitearch: %global python_sitearch %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(1))")}
|
||||||
|
|
||||||
|
%define srcname mongoengine
|
||||||
|
|
||||||
|
Name: python-%{srcname}
|
||||||
|
Version: 0.8.1
|
||||||
|
Release: 1%{?dist}
|
||||||
|
Summary: A Python Document-Object Mapper for working with MongoDB
|
||||||
|
|
||||||
|
Group: Development/Libraries
|
||||||
|
License: MIT
|
||||||
|
URL: https://github.com/MongoEngine/mongoengine
|
||||||
|
Source0: %{srcname}-%{version}.tar.bz2
|
||||||
|
|
||||||
|
BuildRequires: python-devel
|
||||||
|
BuildRequires: python-setuptools
|
||||||
|
|
||||||
|
Requires: mongodb
|
||||||
|
Requires: pymongo
|
||||||
|
Requires: python-blinker
|
||||||
|
Requires: python-imaging
|
||||||
|
|
||||||
|
|
||||||
|
%description
|
||||||
|
MongoEngine is an ORM-like layer on top of PyMongo.
|
||||||
|
|
||||||
|
%prep
|
||||||
|
%setup -q -n %{srcname}-%{version}
|
||||||
|
|
||||||
|
|
||||||
|
%build
|
||||||
|
# Remove CFLAGS=... for noarch packages (unneeded)
|
||||||
|
CFLAGS="$RPM_OPT_FLAGS" %{__python} setup.py build
|
||||||
|
|
||||||
|
|
||||||
|
%install
|
||||||
|
rm -rf $RPM_BUILD_ROOT
|
||||||
|
%{__python} setup.py install -O1 --skip-build --root $RPM_BUILD_ROOT
|
||||||
|
|
||||||
|
%clean
|
||||||
|
rm -rf $RPM_BUILD_ROOT
|
||||||
|
|
||||||
|
%files
|
||||||
|
%defattr(-,root,root,-)
|
||||||
|
%doc docs AUTHORS LICENSE README.rst
|
||||||
|
# For noarch packages: sitelib
|
||||||
|
%{python_sitelib}/*
|
||||||
|
# For arch-specific packages: sitearch
|
||||||
|
# %{python_sitearch}/*
|
||||||
|
|
||||||
|
%changelog
|
||||||
|
* See: http://docs.mongoengine.org/en/latest/changelog.html
|
||||||
1
requirements.txt
Normal file
1
requirements.txt
Normal file
@@ -0,0 +1 @@
|
|||||||
|
pymongo
|
||||||
11
setup.cfg
Normal file
11
setup.cfg
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
[nosetests]
|
||||||
|
verbosity = 3
|
||||||
|
detailed-errors = 1
|
||||||
|
#with-coverage = 1
|
||||||
|
#cover-erase = 1
|
||||||
|
#cover-html = 1
|
||||||
|
#cover-html-dir = ../htmlcov
|
||||||
|
#cover-package = mongoengine
|
||||||
|
py3where = build
|
||||||
|
where = tests
|
||||||
|
#tests = document/__init__.py
|
||||||
60
setup.py
60
setup.py
@@ -1,36 +1,80 @@
|
|||||||
from setuptools import setup
|
import os
|
||||||
|
import sys
|
||||||
|
from setuptools import setup, find_packages
|
||||||
|
|
||||||
VERSION = '0.1.1'
|
# Hack to silence atexit traceback in newer python versions
|
||||||
|
try:
|
||||||
DESCRIPTION = "A Python Document-Object Mapper for working with MongoDB"
|
import multiprocessing
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
DESCRIPTION = 'MongoEngine is a Python Object-Document ' + \
|
||||||
|
'Mapper for working with MongoDB.'
|
||||||
LONG_DESCRIPTION = None
|
LONG_DESCRIPTION = None
|
||||||
try:
|
try:
|
||||||
LONG_DESCRIPTION = open('README.rst').read()
|
LONG_DESCRIPTION = open('README.rst').read()
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def get_version(version_tuple):
|
||||||
|
if not isinstance(version_tuple[-1], int):
|
||||||
|
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
|
||||||
|
return '.'.join(map(str, version_tuple))
|
||||||
|
|
||||||
|
# Dirty hack to get version number from monogengine/__init__.py - we can't
|
||||||
|
# import it as it depends on PyMongo and PyMongo isn't installed until this
|
||||||
|
# file is read
|
||||||
|
init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
|
||||||
|
version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0]
|
||||||
|
|
||||||
|
VERSION = get_version(eval(version_line.split('=')[-1]))
|
||||||
|
print(VERSION)
|
||||||
|
|
||||||
CLASSIFIERS = [
|
CLASSIFIERS = [
|
||||||
'Development Status :: 4 - Beta',
|
'Development Status :: 4 - Beta',
|
||||||
'Intended Audience :: Developers',
|
'Intended Audience :: Developers',
|
||||||
'License :: OSI Approved :: MIT License',
|
'License :: OSI Approved :: MIT License',
|
||||||
'Operating System :: OS Independent',
|
'Operating System :: OS Independent',
|
||||||
'Programming Language :: Python',
|
'Programming Language :: Python',
|
||||||
|
"Programming Language :: Python :: 2",
|
||||||
|
"Programming Language :: Python :: 2.6",
|
||||||
|
"Programming Language :: Python :: 2.7",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.1",
|
||||||
|
"Programming Language :: Python :: 3.2",
|
||||||
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
'Topic :: Database',
|
'Topic :: Database',
|
||||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
extra_opts = {}
|
||||||
|
if sys.version_info[0] == 3:
|
||||||
|
extra_opts['use_2to3'] = True
|
||||||
|
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6']
|
||||||
|
extra_opts['packages'] = find_packages(exclude=('tests',))
|
||||||
|
if "test" in sys.argv or "nosetests" in sys.argv:
|
||||||
|
extra_opts['packages'].append("tests")
|
||||||
|
extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]}
|
||||||
|
else:
|
||||||
|
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2==2.6']
|
||||||
|
extra_opts['packages'] = find_packages(exclude=('tests',))
|
||||||
|
|
||||||
setup(name='mongoengine',
|
setup(name='mongoengine',
|
||||||
version=VERSION,
|
version=VERSION,
|
||||||
packages=['mongoengine'],
|
|
||||||
author='Harry Marr',
|
author='Harry Marr',
|
||||||
author_email='harry.marr@{nospam}gmail.com',
|
author_email='harry.marr@{nospam}gmail.com',
|
||||||
url='http://hmarr.com/mongoengine/',
|
maintainer="Ross Lawley",
|
||||||
|
maintainer_email="ross.lawley@{nospam}gmail.com",
|
||||||
|
url='http://mongoengine.org/',
|
||||||
|
download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
|
||||||
license='MIT',
|
license='MIT',
|
||||||
|
include_package_data=True,
|
||||||
description=DESCRIPTION,
|
description=DESCRIPTION,
|
||||||
long_description=LONG_DESCRIPTION,
|
long_description=LONG_DESCRIPTION,
|
||||||
platforms=['any'],
|
platforms=['any'],
|
||||||
classifiers=CLASSIFIERS,
|
classifiers=CLASSIFIERS,
|
||||||
install_requires=['pymongo'],
|
install_requires=['pymongo>=2.5'],
|
||||||
test_suite='tests',
|
test_suite='nose.collector',
|
||||||
|
**extra_opts
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -0,0 +1,5 @@
|
|||||||
|
from all_warnings import AllWarnings
|
||||||
|
from document import *
|
||||||
|
from queryset import *
|
||||||
|
from fields import *
|
||||||
|
from migration import *
|
||||||
|
|||||||
44
tests/all_warnings/__init__.py
Normal file
44
tests/all_warnings/__init__.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
"""
|
||||||
|
This test has been put into a module. This is because it tests warnings that
|
||||||
|
only get triggered on first hit. This way we can ensure its imported into the
|
||||||
|
top level and called first by the test suite.
|
||||||
|
"""
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
import unittest
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ('AllWarnings', )
|
||||||
|
|
||||||
|
|
||||||
|
class AllWarnings(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.warning_list = []
|
||||||
|
self.showwarning_default = warnings.showwarning
|
||||||
|
warnings.showwarning = self.append_to_warning_list
|
||||||
|
|
||||||
|
def append_to_warning_list(self, message, category, *args):
|
||||||
|
self.warning_list.append({"message": message,
|
||||||
|
"category": category})
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
# restore default handling of warnings
|
||||||
|
warnings.showwarning = self.showwarning_default
|
||||||
|
|
||||||
|
def test_document_collection_syntax_warning(self):
|
||||||
|
|
||||||
|
class NonAbstractBase(Document):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
class InheritedDocumentFailTest(NonAbstractBase):
|
||||||
|
meta = {'collection': 'fail'}
|
||||||
|
|
||||||
|
warning = self.warning_list[0]
|
||||||
|
self.assertEqual(SyntaxWarning, warning["category"])
|
||||||
|
self.assertEqual('non_abstract_base',
|
||||||
|
InheritedDocumentFailTest._get_collection_name())
|
||||||
@@ -1,385 +0,0 @@
|
|||||||
import unittest
|
|
||||||
import datetime
|
|
||||||
import pymongo
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine.connection import _get_db
|
|
||||||
|
|
||||||
|
|
||||||
class DocumentTest(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
self.db = _get_db()
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
age = IntField()
|
|
||||||
self.Person = Person
|
|
||||||
|
|
||||||
def test_drop_collection(self):
|
|
||||||
"""Ensure that the collection may be dropped from the database.
|
|
||||||
"""
|
|
||||||
self.Person(name='Test').save()
|
|
||||||
|
|
||||||
collection = self.Person._meta['collection']
|
|
||||||
self.assertTrue(collection in self.db.collection_names())
|
|
||||||
|
|
||||||
self.Person.drop_collection()
|
|
||||||
self.assertFalse(collection in self.db.collection_names())
|
|
||||||
|
|
||||||
def test_definition(self):
|
|
||||||
"""Ensure that document may be defined using fields.
|
|
||||||
"""
|
|
||||||
name_field = StringField()
|
|
||||||
age_field = IntField()
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = name_field
|
|
||||||
age = age_field
|
|
||||||
non_field = True
|
|
||||||
|
|
||||||
self.assertEqual(Person._fields['name'], name_field)
|
|
||||||
self.assertEqual(Person._fields['age'], age_field)
|
|
||||||
self.assertFalse('non_field' in Person._fields)
|
|
||||||
self.assertTrue('id' in Person._fields)
|
|
||||||
# Test iteration over fields
|
|
||||||
fields = list(Person())
|
|
||||||
self.assertTrue('name' in fields and 'age' in fields)
|
|
||||||
# Ensure Document isn't treated like an actual document
|
|
||||||
self.assertFalse(hasattr(Document, '_fields'))
|
|
||||||
|
|
||||||
def test_get_superclasses(self):
|
|
||||||
"""Ensure that the correct list of superclasses is assembled.
|
|
||||||
"""
|
|
||||||
class Animal(Document): pass
|
|
||||||
class Fish(Animal): pass
|
|
||||||
class Mammal(Animal): pass
|
|
||||||
class Human(Mammal): pass
|
|
||||||
class Dog(Mammal): pass
|
|
||||||
|
|
||||||
mammal_superclasses = {'Animal': Animal}
|
|
||||||
self.assertEqual(Mammal._superclasses, mammal_superclasses)
|
|
||||||
|
|
||||||
dog_superclasses = {
|
|
||||||
'Animal': Animal,
|
|
||||||
'Animal.Mammal': Mammal,
|
|
||||||
}
|
|
||||||
self.assertEqual(Dog._superclasses, dog_superclasses)
|
|
||||||
|
|
||||||
def test_get_subclasses(self):
|
|
||||||
"""Ensure that the correct list of subclasses is retrieved by the
|
|
||||||
_get_subclasses method.
|
|
||||||
"""
|
|
||||||
class Animal(Document): pass
|
|
||||||
class Fish(Animal): pass
|
|
||||||
class Mammal(Animal): pass
|
|
||||||
class Human(Mammal): pass
|
|
||||||
class Dog(Mammal): pass
|
|
||||||
|
|
||||||
mammal_subclasses = {
|
|
||||||
'Animal.Mammal.Dog': Dog,
|
|
||||||
'Animal.Mammal.Human': Human
|
|
||||||
}
|
|
||||||
self.assertEqual(Mammal._get_subclasses(), mammal_subclasses)
|
|
||||||
|
|
||||||
animal_subclasses = {
|
|
||||||
'Animal.Fish': Fish,
|
|
||||||
'Animal.Mammal': Mammal,
|
|
||||||
'Animal.Mammal.Dog': Dog,
|
|
||||||
'Animal.Mammal.Human': Human
|
|
||||||
}
|
|
||||||
self.assertEqual(Animal._get_subclasses(), animal_subclasses)
|
|
||||||
|
|
||||||
def test_polymorphic_queries(self):
|
|
||||||
"""Ensure that the correct subclasses are returned from a query"""
|
|
||||||
class Animal(Document): pass
|
|
||||||
class Fish(Animal): pass
|
|
||||||
class Mammal(Animal): pass
|
|
||||||
class Human(Mammal): pass
|
|
||||||
class Dog(Mammal): pass
|
|
||||||
|
|
||||||
Animal().save()
|
|
||||||
Fish().save()
|
|
||||||
Mammal().save()
|
|
||||||
Human().save()
|
|
||||||
Dog().save()
|
|
||||||
|
|
||||||
classes = [obj.__class__ for obj in Animal.objects]
|
|
||||||
self.assertEqual(classes, [Animal, Fish, Mammal, Human, Dog])
|
|
||||||
|
|
||||||
classes = [obj.__class__ for obj in Mammal.objects]
|
|
||||||
self.assertEqual(classes, [Mammal, Human, Dog])
|
|
||||||
|
|
||||||
classes = [obj.__class__ for obj in Human.objects]
|
|
||||||
self.assertEqual(classes, [Human])
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
|
|
||||||
def test_inheritance(self):
|
|
||||||
"""Ensure that document may inherit fields from a superclass document.
|
|
||||||
"""
|
|
||||||
class Employee(self.Person):
|
|
||||||
salary = IntField()
|
|
||||||
|
|
||||||
self.assertTrue('name' in Employee._fields)
|
|
||||||
self.assertTrue('salary' in Employee._fields)
|
|
||||||
self.assertEqual(Employee._meta['collection'],
|
|
||||||
self.Person._meta['collection'])
|
|
||||||
|
|
||||||
def test_allow_inheritance(self):
|
|
||||||
"""Ensure that inheritance may be disabled on simple classes and that
|
|
||||||
_cls and _types will not be used.
|
|
||||||
"""
|
|
||||||
class Animal(Document):
|
|
||||||
meta = {'allow_inheritance': False}
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
|
|
||||||
def create_dog_class():
|
|
||||||
class Dog(Animal):
|
|
||||||
pass
|
|
||||||
self.assertRaises(ValueError, create_dog_class)
|
|
||||||
|
|
||||||
# Check that _cls etc aren't present on simple documents
|
|
||||||
dog = Animal(name='dog')
|
|
||||||
dog.save()
|
|
||||||
collection = self.db[Animal._meta['collection']]
|
|
||||||
obj = collection.find_one()
|
|
||||||
self.assertFalse('_cls' in obj)
|
|
||||||
self.assertFalse('_types' in obj)
|
|
||||||
|
|
||||||
Animal.drop_collection()
|
|
||||||
|
|
||||||
def create_employee_class():
|
|
||||||
class Employee(self.Person):
|
|
||||||
meta = {'allow_inheritance': False}
|
|
||||||
self.assertRaises(ValueError, create_employee_class)
|
|
||||||
|
|
||||||
def test_collection_name(self):
|
|
||||||
"""Ensure that a collection with a specified name may be used.
|
|
||||||
"""
|
|
||||||
collection = 'personCollTest'
|
|
||||||
if collection in self.db.collection_names():
|
|
||||||
self.db.drop_collection(collection)
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
meta = {'collection': collection}
|
|
||||||
|
|
||||||
user = Person(name="Test User")
|
|
||||||
user.save()
|
|
||||||
self.assertTrue(collection in self.db.collection_names())
|
|
||||||
|
|
||||||
user_obj = self.db[collection].find_one()
|
|
||||||
self.assertEqual(user_obj['name'], "Test User")
|
|
||||||
|
|
||||||
user_obj = Person.objects[0]
|
|
||||||
self.assertEqual(user_obj.name, "Test User")
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
self.assertFalse(collection in self.db.collection_names())
|
|
||||||
|
|
||||||
def test_capped_collection(self):
|
|
||||||
"""Ensure that capped collections work properly.
|
|
||||||
"""
|
|
||||||
class Log(Document):
|
|
||||||
date = DateTimeField(default=datetime.datetime.now)
|
|
||||||
meta = {
|
|
||||||
'max_documents': 10,
|
|
||||||
'max_size': 90000,
|
|
||||||
}
|
|
||||||
|
|
||||||
Log.drop_collection()
|
|
||||||
|
|
||||||
# Ensure that the collection handles up to its maximum
|
|
||||||
for i in range(10):
|
|
||||||
Log().save()
|
|
||||||
|
|
||||||
self.assertEqual(len(Log.objects), 10)
|
|
||||||
|
|
||||||
# Check that extra documents don't increase the size
|
|
||||||
Log().save()
|
|
||||||
self.assertEqual(len(Log.objects), 10)
|
|
||||||
|
|
||||||
options = Log.objects._collection.options()
|
|
||||||
self.assertEqual(options['capped'], True)
|
|
||||||
self.assertEqual(options['max'], 10)
|
|
||||||
self.assertEqual(options['size'], 90000)
|
|
||||||
|
|
||||||
# Check that the document cannot be redefined with different options
|
|
||||||
def recreate_log_document():
|
|
||||||
class Log(Document):
|
|
||||||
date = DateTimeField(default=datetime.datetime.now)
|
|
||||||
meta = {
|
|
||||||
'max_documents': 11,
|
|
||||||
}
|
|
||||||
# Create the collection by accessing Document.objects
|
|
||||||
Log.objects
|
|
||||||
self.assertRaises(InvalidCollectionError, recreate_log_document)
|
|
||||||
|
|
||||||
Log.drop_collection()
|
|
||||||
|
|
||||||
def test_creation(self):
|
|
||||||
"""Ensure that document may be created using keyword arguments.
|
|
||||||
"""
|
|
||||||
person = self.Person(name="Test User", age=30)
|
|
||||||
self.assertEqual(person.name, "Test User")
|
|
||||||
self.assertEqual(person.age, 30)
|
|
||||||
|
|
||||||
def test_dictionary_access(self):
|
|
||||||
"""Ensure that dictionary-style field access works properly.
|
|
||||||
"""
|
|
||||||
person = self.Person(name='Test User', age=30)
|
|
||||||
self.assertEquals(person['name'], 'Test User')
|
|
||||||
|
|
||||||
self.assertRaises(KeyError, person.__getitem__, 'salary')
|
|
||||||
self.assertRaises(KeyError, person.__setitem__, 'salary', 50)
|
|
||||||
|
|
||||||
person['name'] = 'Another User'
|
|
||||||
self.assertEquals(person['name'], 'Another User')
|
|
||||||
|
|
||||||
# Length = length(assigned fields + id)
|
|
||||||
self.assertEquals(len(person), 3)
|
|
||||||
|
|
||||||
self.assertTrue('age' in person)
|
|
||||||
person.age = None
|
|
||||||
self.assertFalse('age' in person)
|
|
||||||
self.assertFalse('nationality' in person)
|
|
||||||
|
|
||||||
def test_embedded_document(self):
|
|
||||||
"""Ensure that embedded documents are set up correctly.
|
|
||||||
"""
|
|
||||||
class Comment(EmbeddedDocument):
|
|
||||||
content = StringField()
|
|
||||||
|
|
||||||
self.assertTrue('content' in Comment._fields)
|
|
||||||
self.assertFalse('id' in Comment._fields)
|
|
||||||
self.assertFalse(hasattr(Comment, '_meta'))
|
|
||||||
|
|
||||||
def test_save(self):
|
|
||||||
"""Ensure that a document may be saved in the database.
|
|
||||||
"""
|
|
||||||
# Create person object and save it to the database
|
|
||||||
person = self.Person(name='Test User', age=30)
|
|
||||||
person.save()
|
|
||||||
# Ensure that the object is in the database
|
|
||||||
collection = self.db[self.Person._meta['collection']]
|
|
||||||
person_obj = collection.find_one({'name': 'Test User'})
|
|
||||||
self.assertEqual(person_obj['name'], 'Test User')
|
|
||||||
self.assertEqual(person_obj['age'], 30)
|
|
||||||
self.assertEqual(str(person_obj['_id']), person.id)
|
|
||||||
|
|
||||||
def test_delete(self):
|
|
||||||
"""Ensure that document may be deleted using the delete method.
|
|
||||||
"""
|
|
||||||
person = self.Person(name="Test User", age=30)
|
|
||||||
person.save()
|
|
||||||
self.assertEqual(len(self.Person.objects), 1)
|
|
||||||
person.delete()
|
|
||||||
self.assertEqual(len(self.Person.objects), 0)
|
|
||||||
|
|
||||||
def test_save_custom_id(self):
|
|
||||||
"""Ensure that a document may be saved with a custom _id.
|
|
||||||
"""
|
|
||||||
# Create person object and save it to the database
|
|
||||||
person = self.Person(name='Test User', age=30,
|
|
||||||
id='497ce96f395f2f052a494fd4')
|
|
||||||
person.save()
|
|
||||||
# Ensure that the object is in the database with the correct _id
|
|
||||||
collection = self.db[self.Person._meta['collection']]
|
|
||||||
person_obj = collection.find_one({'name': 'Test User'})
|
|
||||||
self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4')
|
|
||||||
|
|
||||||
def test_save_list(self):
|
|
||||||
"""Ensure that a list field may be properly saved.
|
|
||||||
"""
|
|
||||||
class Comment(EmbeddedDocument):
|
|
||||||
content = StringField()
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
content = StringField()
|
|
||||||
comments = ListField(EmbeddedDocumentField(Comment))
|
|
||||||
tags = ListField(StringField())
|
|
||||||
|
|
||||||
post = BlogPost(content='Went for a walk today...')
|
|
||||||
post.tags = tags = ['fun', 'leisure']
|
|
||||||
comments = [Comment(content='Good for you'), Comment(content='Yay.')]
|
|
||||||
post.comments = comments
|
|
||||||
post.save()
|
|
||||||
|
|
||||||
collection = self.db[BlogPost._meta['collection']]
|
|
||||||
post_obj = collection.find_one()
|
|
||||||
self.assertEqual(post_obj['tags'], tags)
|
|
||||||
for comment_obj, comment in zip(post_obj['comments'], comments):
|
|
||||||
self.assertEqual(comment_obj['content'], comment['content'])
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
def test_save_embedded_document(self):
|
|
||||||
"""Ensure that a document with an embedded document field may be
|
|
||||||
saved in the database.
|
|
||||||
"""
|
|
||||||
class EmployeeDetails(EmbeddedDocument):
|
|
||||||
position = StringField()
|
|
||||||
|
|
||||||
class Employee(self.Person):
|
|
||||||
salary = IntField()
|
|
||||||
details = EmbeddedDocumentField(EmployeeDetails)
|
|
||||||
|
|
||||||
# Create employee object and save it to the database
|
|
||||||
employee = Employee(name='Test Employee', age=50, salary=20000)
|
|
||||||
employee.details = EmployeeDetails(position='Developer')
|
|
||||||
employee.save()
|
|
||||||
|
|
||||||
# Ensure that the object is in the database
|
|
||||||
collection = self.db[self.Person._meta['collection']]
|
|
||||||
employee_obj = collection.find_one({'name': 'Test Employee'})
|
|
||||||
self.assertEqual(employee_obj['name'], 'Test Employee')
|
|
||||||
self.assertEqual(employee_obj['age'], 50)
|
|
||||||
# Ensure that the 'details' embedded object saved correctly
|
|
||||||
self.assertEqual(employee_obj['details']['position'], 'Developer')
|
|
||||||
|
|
||||||
def test_save_reference(self):
|
|
||||||
"""Ensure that a document reference field may be saved in the database.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
meta = {'collection': 'blogpost_1'}
|
|
||||||
content = StringField()
|
|
||||||
author = ReferenceField(self.Person)
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
author = self.Person(name='Test User')
|
|
||||||
author.save()
|
|
||||||
|
|
||||||
post = BlogPost(content='Watched some TV today... how exciting.')
|
|
||||||
# Should only reference author when saving
|
|
||||||
post.author = author
|
|
||||||
post.save()
|
|
||||||
|
|
||||||
post_obj = BlogPost.objects.first()
|
|
||||||
|
|
||||||
# Test laziness
|
|
||||||
self.assertTrue(isinstance(post_obj._data['author'],
|
|
||||||
pymongo.dbref.DBRef))
|
|
||||||
self.assertTrue(isinstance(post_obj.author, self.Person))
|
|
||||||
self.assertEqual(post_obj.author.name, 'Test User')
|
|
||||||
|
|
||||||
# Ensure that the dereferenced object may be changed and saved
|
|
||||||
post_obj.author.age = 25
|
|
||||||
post_obj.author.save()
|
|
||||||
|
|
||||||
author = list(self.Person.objects(name='Test User'))[-1]
|
|
||||||
self.assertEqual(author.age, 25)
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
self.Person.drop_collection()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
15
tests/document/__init__.py
Normal file
15
tests/document/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from class_methods import *
|
||||||
|
from delta import *
|
||||||
|
from dynamic import *
|
||||||
|
from indexes import *
|
||||||
|
from inheritance import *
|
||||||
|
from instance import *
|
||||||
|
from json_serialisation import *
|
||||||
|
from validation import *
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
205
tests/document/class_methods.py
Normal file
205
tests/document/class_methods.py
Normal file
@@ -0,0 +1,205 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
from mongoengine.queryset import NULLIFY, PULL
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
|
||||||
|
__all__ = ("ClassMethodsTest", )
|
||||||
|
|
||||||
|
|
||||||
|
class ClassMethodsTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
non_field = True
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
for collection in self.db.collection_names():
|
||||||
|
if 'system.' in collection:
|
||||||
|
continue
|
||||||
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
|
def test_definition(self):
|
||||||
|
"""Ensure that document may be defined using fields.
|
||||||
|
"""
|
||||||
|
self.assertEqual(['age', 'id', 'name'],
|
||||||
|
sorted(self.Person._fields.keys()))
|
||||||
|
self.assertEqual(["IntField", "ObjectIdField", "StringField"],
|
||||||
|
sorted([x.__class__.__name__ for x in
|
||||||
|
self.Person._fields.values()]))
|
||||||
|
|
||||||
|
def test_get_db(self):
|
||||||
|
"""Ensure that get_db returns the expected db.
|
||||||
|
"""
|
||||||
|
db = self.Person._get_db()
|
||||||
|
self.assertEqual(self.db, db)
|
||||||
|
|
||||||
|
def test_get_collection_name(self):
|
||||||
|
"""Ensure that get_collection_name returns the expected collection
|
||||||
|
name.
|
||||||
|
"""
|
||||||
|
collection_name = 'person'
|
||||||
|
self.assertEqual(collection_name, self.Person._get_collection_name())
|
||||||
|
|
||||||
|
def test_get_collection(self):
|
||||||
|
"""Ensure that get_collection returns the expected collection.
|
||||||
|
"""
|
||||||
|
collection_name = 'person'
|
||||||
|
collection = self.Person._get_collection()
|
||||||
|
self.assertEqual(self.db[collection_name], collection)
|
||||||
|
|
||||||
|
def test_drop_collection(self):
|
||||||
|
"""Ensure that the collection may be dropped from the database.
|
||||||
|
"""
|
||||||
|
collection_name = 'person'
|
||||||
|
self.Person(name='Test').save()
|
||||||
|
self.assertTrue(collection_name in self.db.collection_names())
|
||||||
|
|
||||||
|
self.Person.drop_collection()
|
||||||
|
self.assertFalse(collection_name in self.db.collection_names())
|
||||||
|
|
||||||
|
def test_register_delete_rule(self):
|
||||||
|
"""Ensure that register delete rule adds a delete rule to the document
|
||||||
|
meta.
|
||||||
|
"""
|
||||||
|
class Job(Document):
|
||||||
|
employee = ReferenceField(self.Person)
|
||||||
|
|
||||||
|
self.assertEqual(self.Person._meta.get('delete_rules'), None)
|
||||||
|
|
||||||
|
self.Person.register_delete_rule(Job, 'employee', NULLIFY)
|
||||||
|
self.assertEqual(self.Person._meta['delete_rules'],
|
||||||
|
{(Job, 'employee'): NULLIFY})
|
||||||
|
|
||||||
|
def test_register_delete_rule_inherited(self):
|
||||||
|
|
||||||
|
class Vaccine(Document):
|
||||||
|
name = StringField(required=True)
|
||||||
|
|
||||||
|
meta = {"indexes": ["name"]}
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
family = StringField(required=True)
|
||||||
|
vaccine_made = ListField(ReferenceField("Vaccine", reverse_delete_rule=PULL))
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True, "indexes": ["family"]}
|
||||||
|
|
||||||
|
class Cat(Animal):
|
||||||
|
name = StringField(required=True)
|
||||||
|
|
||||||
|
self.assertEqual(Vaccine._meta['delete_rules'][(Animal, 'vaccine_made')], PULL)
|
||||||
|
self.assertEqual(Vaccine._meta['delete_rules'][(Cat, 'vaccine_made')], PULL)
|
||||||
|
|
||||||
|
def test_collection_naming(self):
|
||||||
|
"""Ensure that a collection with a specified name may be used.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class DefaultNamingTest(Document):
|
||||||
|
pass
|
||||||
|
self.assertEqual('default_naming_test',
|
||||||
|
DefaultNamingTest._get_collection_name())
|
||||||
|
|
||||||
|
class CustomNamingTest(Document):
|
||||||
|
meta = {'collection': 'pimp_my_collection'}
|
||||||
|
|
||||||
|
self.assertEqual('pimp_my_collection',
|
||||||
|
CustomNamingTest._get_collection_name())
|
||||||
|
|
||||||
|
class DynamicNamingTest(Document):
|
||||||
|
meta = {'collection': lambda c: "DYNAMO"}
|
||||||
|
self.assertEqual('DYNAMO', DynamicNamingTest._get_collection_name())
|
||||||
|
|
||||||
|
# Use Abstract class to handle backwards compatibility
|
||||||
|
class BaseDocument(Document):
|
||||||
|
meta = {
|
||||||
|
'abstract': True,
|
||||||
|
'collection': lambda c: c.__name__.lower()
|
||||||
|
}
|
||||||
|
|
||||||
|
class OldNamingConvention(BaseDocument):
|
||||||
|
pass
|
||||||
|
self.assertEqual('oldnamingconvention',
|
||||||
|
OldNamingConvention._get_collection_name())
|
||||||
|
|
||||||
|
class InheritedAbstractNamingTest(BaseDocument):
|
||||||
|
meta = {'collection': 'wibble'}
|
||||||
|
self.assertEqual('wibble',
|
||||||
|
InheritedAbstractNamingTest._get_collection_name())
|
||||||
|
|
||||||
|
# Mixin tests
|
||||||
|
class BaseMixin(object):
|
||||||
|
meta = {
|
||||||
|
'collection': lambda c: c.__name__.lower()
|
||||||
|
}
|
||||||
|
|
||||||
|
class OldMixinNamingConvention(Document, BaseMixin):
|
||||||
|
pass
|
||||||
|
self.assertEqual('oldmixinnamingconvention',
|
||||||
|
OldMixinNamingConvention._get_collection_name())
|
||||||
|
|
||||||
|
class BaseMixin(object):
|
||||||
|
meta = {
|
||||||
|
'collection': lambda c: c.__name__.lower()
|
||||||
|
}
|
||||||
|
|
||||||
|
class BaseDocument(Document, BaseMixin):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
class MyDocument(BaseDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertEqual('basedocument', MyDocument._get_collection_name())
|
||||||
|
|
||||||
|
def test_custom_collection_name_operations(self):
|
||||||
|
"""Ensure that a collection with a specified name is used as expected.
|
||||||
|
"""
|
||||||
|
collection_name = 'personCollTest'
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {'collection': collection_name}
|
||||||
|
|
||||||
|
Person(name="Test User").save()
|
||||||
|
self.assertTrue(collection_name in self.db.collection_names())
|
||||||
|
|
||||||
|
user_obj = self.db[collection_name].find_one()
|
||||||
|
self.assertEqual(user_obj['name'], "Test User")
|
||||||
|
|
||||||
|
user_obj = Person.objects[0]
|
||||||
|
self.assertEqual(user_obj.name, "Test User")
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
self.assertFalse(collection_name in self.db.collection_names())
|
||||||
|
|
||||||
|
def test_collection_name_and_primary(self):
|
||||||
|
"""Ensure that a collection with a specified name may be used.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField(primary_key=True)
|
||||||
|
meta = {'collection': 'app'}
|
||||||
|
|
||||||
|
Person(name="Test User").save()
|
||||||
|
|
||||||
|
user_obj = Person.objects.first()
|
||||||
|
self.assertEqual(user_obj.name, "Test User")
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
690
tests/document/delta.py
Normal file
690
tests/document/delta.py
Normal file
@@ -0,0 +1,690 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
|
||||||
|
__all__ = ("DeltaTest",)
|
||||||
|
|
||||||
|
|
||||||
|
class DeltaTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
non_field = True
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
for collection in self.db.collection_names():
|
||||||
|
if 'system.' in collection:
|
||||||
|
continue
|
||||||
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
|
def test_delta(self):
|
||||||
|
self.delta(Document)
|
||||||
|
self.delta(DynamicDocument)
|
||||||
|
|
||||||
|
def delta(self, DocClass):
|
||||||
|
|
||||||
|
class Doc(DocClass):
|
||||||
|
string_field = StringField()
|
||||||
|
int_field = IntField()
|
||||||
|
dict_field = DictField()
|
||||||
|
list_field = ListField()
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc._get_changed_fields(), [])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {}))
|
||||||
|
|
||||||
|
doc.string_field = 'hello'
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['string_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.int_field = 1
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['int_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||||
|
doc.dict_field = dict_value
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
list_value = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.list_field = list_value
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
|
||||||
|
|
||||||
|
# Test unsetting
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.dict_field = {}
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.list_field = []
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
|
||||||
|
|
||||||
|
def test_delta_recursive(self):
|
||||||
|
self.delta_recursive(Document, EmbeddedDocument)
|
||||||
|
self.delta_recursive(DynamicDocument, EmbeddedDocument)
|
||||||
|
self.delta_recursive(Document, DynamicEmbeddedDocument)
|
||||||
|
self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument)
|
||||||
|
|
||||||
|
def delta_recursive(self, DocClass, EmbeddedClass):
|
||||||
|
|
||||||
|
class Embedded(EmbeddedClass):
|
||||||
|
string_field = StringField()
|
||||||
|
int_field = IntField()
|
||||||
|
dict_field = DictField()
|
||||||
|
list_field = ListField()
|
||||||
|
|
||||||
|
class Doc(DocClass):
|
||||||
|
string_field = StringField()
|
||||||
|
int_field = IntField()
|
||||||
|
dict_field = DictField()
|
||||||
|
list_field = ListField()
|
||||||
|
embedded_field = EmbeddedDocumentField(Embedded)
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc._get_changed_fields(), [])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {}))
|
||||||
|
|
||||||
|
embedded_1 = Embedded()
|
||||||
|
embedded_1.string_field = 'hello'
|
||||||
|
embedded_1.int_field = 1
|
||||||
|
embedded_1.dict_field = {'hello': 'world'}
|
||||||
|
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['embedded_field'])
|
||||||
|
|
||||||
|
embedded_delta = {
|
||||||
|
'string_field': 'hello',
|
||||||
|
'int_field': 1,
|
||||||
|
'dict_field': {'hello': 'world'},
|
||||||
|
'list_field': ['1', 2, {'hello': 'world'}]
|
||||||
|
}
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'embedded_field': embedded_delta}, {}))
|
||||||
|
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.embedded_field.dict_field = {}
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['embedded_field.dict_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.dict_field, {})
|
||||||
|
|
||||||
|
doc.embedded_field.list_field = []
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['embedded_field.list_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1}))
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field, [])
|
||||||
|
|
||||||
|
embedded_2 = Embedded()
|
||||||
|
embedded_2.string_field = 'hello'
|
||||||
|
embedded_2.int_field = 1
|
||||||
|
embedded_2.dict_field = {'hello': 'world'}
|
||||||
|
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
|
||||||
|
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['embedded_field.list_field'])
|
||||||
|
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), ({
|
||||||
|
'list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'string_field': 'hello',
|
||||||
|
'dict_field': {'hello': 'world'},
|
||||||
|
'int_field': 1,
|
||||||
|
'list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
}]
|
||||||
|
}, {}))
|
||||||
|
|
||||||
|
self.assertEqual(doc._delta(), ({
|
||||||
|
'embedded_field.list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'string_field': 'hello',
|
||||||
|
'dict_field': {'hello': 'world'},
|
||||||
|
'int_field': 1,
|
||||||
|
'list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
}]
|
||||||
|
}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||||
|
for k in doc.embedded_field.list_field[2]._fields:
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2][k],
|
||||||
|
embedded_2[k])
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].string_field = 'world'
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['embedded_field.list_field.2.string_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(),
|
||||||
|
({'list_field.2.string_field': 'world'}, {}))
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'embedded_field.list_field.2.string_field': 'world'}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||||
|
'world')
|
||||||
|
|
||||||
|
# Test multiple assignments
|
||||||
|
doc.embedded_field.list_field[2].string_field = 'hello world'
|
||||||
|
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['embedded_field.list_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), ({
|
||||||
|
'list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'string_field': 'hello world',
|
||||||
|
'int_field': 1,
|
||||||
|
'list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
'dict_field': {'hello': 'world'}}]}, {}))
|
||||||
|
self.assertEqual(doc._delta(), ({
|
||||||
|
'embedded_field.list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'string_field': 'hello world',
|
||||||
|
'int_field': 1,
|
||||||
|
'list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
'dict_field': {'hello': 'world'}}
|
||||||
|
]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||||
|
'hello world')
|
||||||
|
|
||||||
|
# Test list native methods
|
||||||
|
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'embedded_field.list_field.2.list_field':
|
||||||
|
[2, {'hello': 'world'}]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].list_field.append(1)
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'embedded_field.list_field.2.list_field':
|
||||||
|
[2, {'hello': 'world'}, 1]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||||
|
[2, {'hello': 'world'}, 1])
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||||
|
[1, 2, {'hello': 'world'}])
|
||||||
|
|
||||||
|
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
del(doc.embedded_field.list_field[2].list_field)
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({}, {'embedded_field.list_field.2.list_field': 1}))
|
||||||
|
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.dict_field['Embedded'] = embedded_1
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.dict_field['Embedded'].string_field = 'Hello World'
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['dict_field.Embedded.string_field'])
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'dict_field.Embedded.string_field': 'Hello World'}, {}))
|
||||||
|
|
||||||
|
def test_circular_reference_deltas(self):
|
||||||
|
self.circular_reference_deltas(Document, Document)
|
||||||
|
self.circular_reference_deltas(Document, DynamicDocument)
|
||||||
|
self.circular_reference_deltas(DynamicDocument, Document)
|
||||||
|
self.circular_reference_deltas(DynamicDocument, DynamicDocument)
|
||||||
|
|
||||||
|
def circular_reference_deltas(self, DocClass1, DocClass2):
|
||||||
|
|
||||||
|
class Person(DocClass1):
|
||||||
|
name = StringField()
|
||||||
|
owns = ListField(ReferenceField('Organization'))
|
||||||
|
|
||||||
|
class Organization(DocClass2):
|
||||||
|
name = StringField()
|
||||||
|
owner = ReferenceField('Person')
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
Organization.drop_collection()
|
||||||
|
|
||||||
|
person = Person(name="owner").save()
|
||||||
|
organization = Organization(name="company").save()
|
||||||
|
|
||||||
|
person.owns.append(organization)
|
||||||
|
organization.owner = person
|
||||||
|
|
||||||
|
person.save()
|
||||||
|
organization.save()
|
||||||
|
|
||||||
|
p = Person.objects[0].select_related()
|
||||||
|
o = Organization.objects.first()
|
||||||
|
self.assertEqual(p.owns[0], o)
|
||||||
|
self.assertEqual(o.owner, p)
|
||||||
|
|
||||||
|
def test_circular_reference_deltas_2(self):
|
||||||
|
self.circular_reference_deltas_2(Document, Document)
|
||||||
|
self.circular_reference_deltas_2(Document, DynamicDocument)
|
||||||
|
self.circular_reference_deltas_2(DynamicDocument, Document)
|
||||||
|
self.circular_reference_deltas_2(DynamicDocument, DynamicDocument)
|
||||||
|
|
||||||
|
def circular_reference_deltas_2(self, DocClass1, DocClass2):
|
||||||
|
|
||||||
|
class Person(DocClass1):
|
||||||
|
name = StringField()
|
||||||
|
owns = ListField(ReferenceField('Organization'))
|
||||||
|
employer = ReferenceField('Organization')
|
||||||
|
|
||||||
|
class Organization(DocClass2):
|
||||||
|
name = StringField()
|
||||||
|
owner = ReferenceField('Person')
|
||||||
|
employees = ListField(ReferenceField('Person'))
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
Organization.drop_collection()
|
||||||
|
|
||||||
|
person = Person(name="owner")
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
employee = Person(name="employee")
|
||||||
|
employee.save()
|
||||||
|
|
||||||
|
organization = Organization(name="company")
|
||||||
|
organization.save()
|
||||||
|
|
||||||
|
person.owns.append(organization)
|
||||||
|
organization.owner = person
|
||||||
|
|
||||||
|
organization.employees.append(employee)
|
||||||
|
employee.employer = organization
|
||||||
|
|
||||||
|
person.save()
|
||||||
|
organization.save()
|
||||||
|
employee.save()
|
||||||
|
|
||||||
|
p = Person.objects.get(name="owner")
|
||||||
|
e = Person.objects.get(name="employee")
|
||||||
|
o = Organization.objects.first()
|
||||||
|
|
||||||
|
self.assertEqual(p.owns[0], o)
|
||||||
|
self.assertEqual(o.owner, p)
|
||||||
|
self.assertEqual(e.employer, o)
|
||||||
|
|
||||||
|
def test_delta_db_field(self):
|
||||||
|
self.delta_db_field(Document)
|
||||||
|
self.delta_db_field(DynamicDocument)
|
||||||
|
|
||||||
|
def delta_db_field(self, DocClass):
|
||||||
|
|
||||||
|
class Doc(DocClass):
|
||||||
|
string_field = StringField(db_field='db_string_field')
|
||||||
|
int_field = IntField(db_field='db_int_field')
|
||||||
|
dict_field = DictField(db_field='db_dict_field')
|
||||||
|
list_field = ListField(db_field='db_list_field')
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc._get_changed_fields(), [])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {}))
|
||||||
|
|
||||||
|
doc.string_field = 'hello'
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_string_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'db_string_field': 'hello'}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.int_field = 1
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_int_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'db_int_field': 1}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||||
|
doc.dict_field = dict_value
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_dict_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'db_dict_field': dict_value}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
list_value = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.list_field = list_value
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_list_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'db_list_field': list_value}, {}))
|
||||||
|
|
||||||
|
# Test unsetting
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.dict_field = {}
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_dict_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'db_dict_field': 1}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.list_field = []
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_list_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'db_list_field': 1}))
|
||||||
|
|
||||||
|
# Test it saves that data
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc.string_field = 'hello'
|
||||||
|
doc.int_field = 1
|
||||||
|
doc.dict_field = {'hello': 'world'}
|
||||||
|
doc.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
self.assertEqual(doc.string_field, 'hello')
|
||||||
|
self.assertEqual(doc.int_field, 1)
|
||||||
|
self.assertEqual(doc.dict_field, {'hello': 'world'})
|
||||||
|
self.assertEqual(doc.list_field, ['1', 2, {'hello': 'world'}])
|
||||||
|
|
||||||
|
def test_delta_recursive_db_field(self):
|
||||||
|
self.delta_recursive_db_field(Document, EmbeddedDocument)
|
||||||
|
self.delta_recursive_db_field(Document, DynamicEmbeddedDocument)
|
||||||
|
self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument)
|
||||||
|
self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument)
|
||||||
|
|
||||||
|
def delta_recursive_db_field(self, DocClass, EmbeddedClass):
|
||||||
|
|
||||||
|
class Embedded(EmbeddedClass):
|
||||||
|
string_field = StringField(db_field='db_string_field')
|
||||||
|
int_field = IntField(db_field='db_int_field')
|
||||||
|
dict_field = DictField(db_field='db_dict_field')
|
||||||
|
list_field = ListField(db_field='db_list_field')
|
||||||
|
|
||||||
|
class Doc(DocClass):
|
||||||
|
string_field = StringField(db_field='db_string_field')
|
||||||
|
int_field = IntField(db_field='db_int_field')
|
||||||
|
dict_field = DictField(db_field='db_dict_field')
|
||||||
|
list_field = ListField(db_field='db_list_field')
|
||||||
|
embedded_field = EmbeddedDocumentField(Embedded,
|
||||||
|
db_field='db_embedded_field')
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc._get_changed_fields(), [])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {}))
|
||||||
|
|
||||||
|
embedded_1 = Embedded()
|
||||||
|
embedded_1.string_field = 'hello'
|
||||||
|
embedded_1.int_field = 1
|
||||||
|
embedded_1.dict_field = {'hello': 'world'}
|
||||||
|
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_embedded_field'])
|
||||||
|
|
||||||
|
embedded_delta = {
|
||||||
|
'db_string_field': 'hello',
|
||||||
|
'db_int_field': 1,
|
||||||
|
'db_dict_field': {'hello': 'world'},
|
||||||
|
'db_list_field': ['1', 2, {'hello': 'world'}]
|
||||||
|
}
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'db_embedded_field': embedded_delta}, {}))
|
||||||
|
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.embedded_field.dict_field = {}
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['db_embedded_field.db_dict_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(),
|
||||||
|
({}, {'db_dict_field': 1}))
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({}, {'db_embedded_field.db_dict_field': 1}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.dict_field, {})
|
||||||
|
|
||||||
|
doc.embedded_field.list_field = []
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['db_embedded_field.db_list_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(),
|
||||||
|
({}, {'db_list_field': 1}))
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({}, {'db_embedded_field.db_list_field': 1}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field, [])
|
||||||
|
|
||||||
|
embedded_2 = Embedded()
|
||||||
|
embedded_2.string_field = 'hello'
|
||||||
|
embedded_2.int_field = 1
|
||||||
|
embedded_2.dict_field = {'hello': 'world'}
|
||||||
|
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
|
||||||
|
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['db_embedded_field.db_list_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), ({
|
||||||
|
'db_list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'db_string_field': 'hello',
|
||||||
|
'db_dict_field': {'hello': 'world'},
|
||||||
|
'db_int_field': 1,
|
||||||
|
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
}]
|
||||||
|
}, {}))
|
||||||
|
|
||||||
|
self.assertEqual(doc._delta(), ({
|
||||||
|
'db_embedded_field.db_list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'db_string_field': 'hello',
|
||||||
|
'db_dict_field': {'hello': 'world'},
|
||||||
|
'db_int_field': 1,
|
||||||
|
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
}]
|
||||||
|
}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||||
|
for k in doc.embedded_field.list_field[2]._fields:
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2][k],
|
||||||
|
embedded_2[k])
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].string_field = 'world'
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['db_embedded_field.db_list_field.2.db_string_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(),
|
||||||
|
({'db_list_field.2.db_string_field': 'world'}, {}))
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'db_embedded_field.db_list_field.2.db_string_field': 'world'},
|
||||||
|
{}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||||
|
'world')
|
||||||
|
|
||||||
|
# Test multiple assignments
|
||||||
|
doc.embedded_field.list_field[2].string_field = 'hello world'
|
||||||
|
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['db_embedded_field.db_list_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), ({
|
||||||
|
'db_list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'db_string_field': 'hello world',
|
||||||
|
'db_int_field': 1,
|
||||||
|
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
'db_dict_field': {'hello': 'world'}}]}, {}))
|
||||||
|
self.assertEqual(doc._delta(), ({
|
||||||
|
'db_embedded_field.db_list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'db_string_field': 'hello world',
|
||||||
|
'db_int_field': 1,
|
||||||
|
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
'db_dict_field': {'hello': 'world'}}
|
||||||
|
]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||||
|
'hello world')
|
||||||
|
|
||||||
|
# Test list native methods
|
||||||
|
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'db_embedded_field.db_list_field.2.db_list_field':
|
||||||
|
[2, {'hello': 'world'}]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].list_field.append(1)
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'db_embedded_field.db_list_field.2.db_list_field':
|
||||||
|
[2, {'hello': 'world'}, 1]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||||
|
[2, {'hello': 'world'}, 1])
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||||
|
[1, 2, {'hello': 'world'}])
|
||||||
|
|
||||||
|
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'db_embedded_field.db_list_field.2.db_list_field':
|
||||||
|
[1, 2, {}]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
del(doc.embedded_field.list_field[2].list_field)
|
||||||
|
self.assertEqual(doc._delta(), ({},
|
||||||
|
{'db_embedded_field.db_list_field.2.db_list_field': 1}))
|
||||||
|
|
||||||
|
def test_delta_for_dynamic_documents(self):
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
p = Person(name="James", age=34)
|
||||||
|
self.assertEqual(p._delta(), ({'age': 34, 'name': 'James',
|
||||||
|
'_cls': 'Person'}, {}))
|
||||||
|
|
||||||
|
p.doc = 123
|
||||||
|
del(p.doc)
|
||||||
|
self.assertEqual(p._delta(), ({'age': 34, 'name': 'James',
|
||||||
|
'_cls': 'Person'}, {'doc': 1}))
|
||||||
|
|
||||||
|
p = Person()
|
||||||
|
p.name = "Dean"
|
||||||
|
p.age = 22
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p.age = 24
|
||||||
|
self.assertEqual(p.age, 24)
|
||||||
|
self.assertEqual(p._get_changed_fields(), ['age'])
|
||||||
|
self.assertEqual(p._delta(), ({'age': 24}, {}))
|
||||||
|
|
||||||
|
p = self.Person.objects(age=22).get()
|
||||||
|
p.age = 24
|
||||||
|
self.assertEqual(p.age, 24)
|
||||||
|
self.assertEqual(p._get_changed_fields(), ['age'])
|
||||||
|
self.assertEqual(p._delta(), ({'age': 24}, {}))
|
||||||
|
|
||||||
|
p.save()
|
||||||
|
self.assertEqual(1, self.Person.objects(age=24).count())
|
||||||
|
|
||||||
|
def test_dynamic_delta(self):
|
||||||
|
|
||||||
|
class Doc(DynamicDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc._get_changed_fields(), [])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {}))
|
||||||
|
|
||||||
|
doc.string_field = 'hello'
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['string_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.int_field = 1
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['int_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||||
|
doc.dict_field = dict_value
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
list_value = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.list_field = list_value
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
|
||||||
|
|
||||||
|
# Test unsetting
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.dict_field = {}
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.list_field = []
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
297
tests/document/dynamic.py
Normal file
297
tests/document/dynamic.py
Normal file
@@ -0,0 +1,297 @@
|
|||||||
|
import unittest
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
|
||||||
|
__all__ = ("DynamicTest", )
|
||||||
|
|
||||||
|
|
||||||
|
class DynamicTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def test_simple_dynamic_document(self):
|
||||||
|
"""Ensures simple dynamic documents are saved correctly"""
|
||||||
|
|
||||||
|
p = self.Person()
|
||||||
|
p.name = "James"
|
||||||
|
p.age = 34
|
||||||
|
|
||||||
|
self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James",
|
||||||
|
"age": 34})
|
||||||
|
self.assertEqual(p.to_mongo().keys(), ["_cls", "name", "age"])
|
||||||
|
p.save()
|
||||||
|
self.assertEqual(p.to_mongo().keys(), ["_id", "_cls", "name", "age"])
|
||||||
|
|
||||||
|
self.assertEqual(self.Person.objects.first().age, 34)
|
||||||
|
|
||||||
|
# Confirm no changes to self.Person
|
||||||
|
self.assertFalse(hasattr(self.Person, 'age'))
|
||||||
|
|
||||||
|
def test_change_scope_of_variable(self):
|
||||||
|
"""Test changing the scope of a dynamic field has no adverse effects"""
|
||||||
|
p = self.Person()
|
||||||
|
p.name = "Dean"
|
||||||
|
p.misc = 22
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
p.misc = {'hello': 'world'}
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
self.assertEqual(p.misc, {'hello': 'world'})
|
||||||
|
|
||||||
|
def test_delete_dynamic_field(self):
|
||||||
|
"""Test deleting a dynamic field works"""
|
||||||
|
self.Person.drop_collection()
|
||||||
|
p = self.Person()
|
||||||
|
p.name = "Dean"
|
||||||
|
p.misc = 22
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
p.misc = {'hello': 'world'}
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
self.assertEqual(p.misc, {'hello': 'world'})
|
||||||
|
collection = self.db[self.Person._get_collection_name()]
|
||||||
|
obj = collection.find_one()
|
||||||
|
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name'])
|
||||||
|
|
||||||
|
del(p.misc)
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
self.assertFalse(hasattr(p, 'misc'))
|
||||||
|
|
||||||
|
obj = collection.find_one()
|
||||||
|
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name'])
|
||||||
|
|
||||||
|
def test_dynamic_document_queries(self):
|
||||||
|
"""Ensure we can query dynamic fields"""
|
||||||
|
p = self.Person()
|
||||||
|
p.name = "Dean"
|
||||||
|
p.age = 22
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
self.assertEqual(1, self.Person.objects(age=22).count())
|
||||||
|
p = self.Person.objects(age=22)
|
||||||
|
p = p.get()
|
||||||
|
self.assertEqual(22, p.age)
|
||||||
|
|
||||||
|
def test_complex_dynamic_document_queries(self):
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
p = Person(name="test")
|
||||||
|
p.age = "ten"
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p1 = Person(name="test1")
|
||||||
|
p1.age = "less then ten and a half"
|
||||||
|
p1.save()
|
||||||
|
|
||||||
|
p2 = Person(name="test2")
|
||||||
|
p2.age = 10
|
||||||
|
p2.save()
|
||||||
|
|
||||||
|
self.assertEqual(Person.objects(age__icontains='ten').count(), 2)
|
||||||
|
self.assertEqual(Person.objects(age__gte=10).count(), 1)
|
||||||
|
|
||||||
|
def test_complex_data_lookups(self):
|
||||||
|
"""Ensure you can query dynamic document dynamic fields"""
|
||||||
|
p = self.Person()
|
||||||
|
p.misc = {'hello': 'world'}
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
self.assertEqual(1, self.Person.objects(misc__hello='world').count())
|
||||||
|
|
||||||
|
def test_complex_embedded_document_validation(self):
|
||||||
|
"""Ensure embedded dynamic documents may be validated"""
|
||||||
|
class Embedded(DynamicEmbeddedDocument):
|
||||||
|
content = URLField()
|
||||||
|
|
||||||
|
class Doc(DynamicDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
|
||||||
|
embedded_doc_1 = Embedded(content='http://mongoengine.org')
|
||||||
|
embedded_doc_1.validate()
|
||||||
|
|
||||||
|
embedded_doc_2 = Embedded(content='this is not a url')
|
||||||
|
self.assertRaises(ValidationError, embedded_doc_2.validate)
|
||||||
|
|
||||||
|
doc.embedded_field_1 = embedded_doc_1
|
||||||
|
doc.embedded_field_2 = embedded_doc_2
|
||||||
|
self.assertRaises(ValidationError, doc.validate)
|
||||||
|
|
||||||
|
def test_inheritance(self):
|
||||||
|
"""Ensure that dynamic document plays nice with inheritance"""
|
||||||
|
class Employee(self.Person):
|
||||||
|
salary = IntField()
|
||||||
|
|
||||||
|
Employee.drop_collection()
|
||||||
|
|
||||||
|
self.assertTrue('name' in Employee._fields)
|
||||||
|
self.assertTrue('salary' in Employee._fields)
|
||||||
|
self.assertEqual(Employee._get_collection_name(),
|
||||||
|
self.Person._get_collection_name())
|
||||||
|
|
||||||
|
joe_bloggs = Employee()
|
||||||
|
joe_bloggs.name = "Joe Bloggs"
|
||||||
|
joe_bloggs.salary = 10
|
||||||
|
joe_bloggs.age = 20
|
||||||
|
joe_bloggs.save()
|
||||||
|
|
||||||
|
self.assertEqual(1, self.Person.objects(age=20).count())
|
||||||
|
self.assertEqual(1, Employee.objects(age=20).count())
|
||||||
|
|
||||||
|
joe_bloggs = self.Person.objects.first()
|
||||||
|
self.assertTrue(isinstance(joe_bloggs, Employee))
|
||||||
|
|
||||||
|
def test_embedded_dynamic_document(self):
|
||||||
|
"""Test dynamic embedded documents"""
|
||||||
|
class Embedded(DynamicEmbeddedDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Doc(DynamicDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
|
||||||
|
embedded_1 = Embedded()
|
||||||
|
embedded_1.string_field = 'hello'
|
||||||
|
embedded_1.int_field = 1
|
||||||
|
embedded_1.dict_field = {'hello': 'world'}
|
||||||
|
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
|
self.assertEqual(doc.to_mongo(), {
|
||||||
|
"embedded_field": {
|
||||||
|
"_cls": "Embedded",
|
||||||
|
"string_field": "hello",
|
||||||
|
"int_field": 1,
|
||||||
|
"dict_field": {"hello": "world"},
|
||||||
|
"list_field": ['1', 2, {'hello': 'world'}]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||||
|
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||||
|
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||||
|
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||||
|
self.assertEqual(doc.embedded_field.list_field,
|
||||||
|
['1', 2, {'hello': 'world'}])
|
||||||
|
|
||||||
|
def test_complex_embedded_documents(self):
|
||||||
|
"""Test complex dynamic embedded documents setups"""
|
||||||
|
class Embedded(DynamicEmbeddedDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Doc(DynamicDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
|
||||||
|
embedded_1 = Embedded()
|
||||||
|
embedded_1.string_field = 'hello'
|
||||||
|
embedded_1.int_field = 1
|
||||||
|
embedded_1.dict_field = {'hello': 'world'}
|
||||||
|
|
||||||
|
embedded_2 = Embedded()
|
||||||
|
embedded_2.string_field = 'hello'
|
||||||
|
embedded_2.int_field = 1
|
||||||
|
embedded_2.dict_field = {'hello': 'world'}
|
||||||
|
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
|
||||||
|
embedded_1.list_field = ['1', 2, embedded_2]
|
||||||
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
|
self.assertEqual(doc.to_mongo(), {
|
||||||
|
"embedded_field": {
|
||||||
|
"_cls": "Embedded",
|
||||||
|
"string_field": "hello",
|
||||||
|
"int_field": 1,
|
||||||
|
"dict_field": {"hello": "world"},
|
||||||
|
"list_field": ['1', 2,
|
||||||
|
{"_cls": "Embedded",
|
||||||
|
"string_field": "hello",
|
||||||
|
"int_field": 1,
|
||||||
|
"dict_field": {"hello": "world"},
|
||||||
|
"list_field": ['1', 2, {'hello': 'world'}]}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
doc.save()
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||||
|
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||||
|
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||||
|
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||||
|
|
||||||
|
embedded_field = doc.embedded_field.list_field[2]
|
||||||
|
|
||||||
|
self.assertEqual(embedded_field.__class__, Embedded)
|
||||||
|
self.assertEqual(embedded_field.string_field, "hello")
|
||||||
|
self.assertEqual(embedded_field.int_field, 1)
|
||||||
|
self.assertEqual(embedded_field.dict_field, {'hello': 'world'})
|
||||||
|
self.assertEqual(embedded_field.list_field, ['1', 2,
|
||||||
|
{'hello': 'world'}])
|
||||||
|
|
||||||
|
def test_dynamic_and_embedded(self):
|
||||||
|
"""Ensure embedded documents play nicely"""
|
||||||
|
|
||||||
|
class Address(EmbeddedDocument):
|
||||||
|
city = StringField()
|
||||||
|
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
Person(name="Ross", address=Address(city="London")).save()
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.address.city = "Lundenne"
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
self.assertEqual(Person.objects.first().address.city, "Lundenne")
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.address = Address(city="Londinium")
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
self.assertEqual(Person.objects.first().address.city, "Londinium")
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.age = 35
|
||||||
|
person.save()
|
||||||
|
self.assertEqual(Person.objects.first().age, 35)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
712
tests/document/indexes.py
Normal file
712
tests/document/indexes.py
Normal file
@@ -0,0 +1,712 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import unittest
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import os
|
||||||
|
import pymongo
|
||||||
|
|
||||||
|
from nose.plugins.skip import SkipTest
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import get_db, get_connection
|
||||||
|
|
||||||
|
__all__ = ("IndexesTest", )
|
||||||
|
|
||||||
|
|
||||||
|
class IndexesTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
non_field = True
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
for collection in self.db.collection_names():
|
||||||
|
if 'system.' in collection:
|
||||||
|
continue
|
||||||
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
|
def test_indexes_document(self):
|
||||||
|
"""Ensure that indexes are used when meta[indexes] is specified for
|
||||||
|
Documents
|
||||||
|
"""
|
||||||
|
self._index_test(Document)
|
||||||
|
|
||||||
|
def test_indexes_dynamic_document(self):
|
||||||
|
"""Ensure that indexes are used when meta[indexes] is specified for
|
||||||
|
Dynamic Documents
|
||||||
|
"""
|
||||||
|
self._index_test(DynamicDocument)
|
||||||
|
|
||||||
|
def _index_test(self, InheritFrom):
|
||||||
|
|
||||||
|
class BlogPost(InheritFrom):
|
||||||
|
date = DateTimeField(db_field='addDate', default=datetime.now)
|
||||||
|
category = StringField()
|
||||||
|
tags = ListField(StringField())
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
'-date',
|
||||||
|
'tags',
|
||||||
|
('category', '-date')
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_specs = [{'fields': [('addDate', -1)]},
|
||||||
|
{'fields': [('tags', 1)]},
|
||||||
|
{'fields': [('category', 1), ('addDate', -1)]}]
|
||||||
|
self.assertEqual(expected_specs, BlogPost._meta['index_specs'])
|
||||||
|
|
||||||
|
BlogPost.ensure_indexes()
|
||||||
|
info = BlogPost.objects._collection.index_information()
|
||||||
|
# _id, '-date', 'tags', ('cat', 'date')
|
||||||
|
self.assertEqual(len(info), 4)
|
||||||
|
info = [value['key'] for key, value in info.iteritems()]
|
||||||
|
for expected in expected_specs:
|
||||||
|
self.assertTrue(expected['fields'] in info)
|
||||||
|
|
||||||
|
def _index_test_inheritance(self, InheritFrom):
|
||||||
|
|
||||||
|
class BlogPost(InheritFrom):
|
||||||
|
date = DateTimeField(db_field='addDate', default=datetime.now)
|
||||||
|
category = StringField()
|
||||||
|
tags = ListField(StringField())
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
'-date',
|
||||||
|
'tags',
|
||||||
|
('category', '-date')
|
||||||
|
],
|
||||||
|
'allow_inheritance': True
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_specs = [{'fields': [('_cls', 1), ('addDate', -1)]},
|
||||||
|
{'fields': [('_cls', 1), ('tags', 1)]},
|
||||||
|
{'fields': [('_cls', 1), ('category', 1),
|
||||||
|
('addDate', -1)]}]
|
||||||
|
self.assertEqual(expected_specs, BlogPost._meta['index_specs'])
|
||||||
|
|
||||||
|
BlogPost.ensure_indexes()
|
||||||
|
info = BlogPost.objects._collection.index_information()
|
||||||
|
# _id, '-date', 'tags', ('cat', 'date')
|
||||||
|
# NB: there is no index on _cls by itself, since
|
||||||
|
# the indices on -date and tags will both contain
|
||||||
|
# _cls as first element in the key
|
||||||
|
self.assertEqual(len(info), 4)
|
||||||
|
info = [value['key'] for key, value in info.iteritems()]
|
||||||
|
for expected in expected_specs:
|
||||||
|
self.assertTrue(expected['fields'] in info)
|
||||||
|
|
||||||
|
class ExtendedBlogPost(BlogPost):
|
||||||
|
title = StringField()
|
||||||
|
meta = {'indexes': ['title']}
|
||||||
|
|
||||||
|
expected_specs.append({'fields': [('_cls', 1), ('title', 1)]})
|
||||||
|
self.assertEqual(expected_specs, ExtendedBlogPost._meta['index_specs'])
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
ExtendedBlogPost.ensure_indexes()
|
||||||
|
info = ExtendedBlogPost.objects._collection.index_information()
|
||||||
|
info = [value['key'] for key, value in info.iteritems()]
|
||||||
|
for expected in expected_specs:
|
||||||
|
self.assertTrue(expected['fields'] in info)
|
||||||
|
|
||||||
|
def test_indexes_document_inheritance(self):
|
||||||
|
"""Ensure that indexes are used when meta[indexes] is specified for
|
||||||
|
Documents
|
||||||
|
"""
|
||||||
|
self._index_test_inheritance(Document)
|
||||||
|
|
||||||
|
def test_indexes_dynamic_document_inheritance(self):
|
||||||
|
"""Ensure that indexes are used when meta[indexes] is specified for
|
||||||
|
Dynamic Documents
|
||||||
|
"""
|
||||||
|
self._index_test_inheritance(DynamicDocument)
|
||||||
|
|
||||||
|
def test_inherited_index(self):
|
||||||
|
"""Ensure index specs are inhertited correctly"""
|
||||||
|
|
||||||
|
class A(Document):
|
||||||
|
title = StringField()
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
{
|
||||||
|
'fields': ('title',),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
'allow_inheritance': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
class B(A):
|
||||||
|
description = StringField()
|
||||||
|
|
||||||
|
self.assertEqual(A._meta['index_specs'], B._meta['index_specs'])
|
||||||
|
self.assertEqual([{'fields': [('_cls', 1), ('title', 1)]}],
|
||||||
|
A._meta['index_specs'])
|
||||||
|
|
||||||
|
def test_build_index_spec_is_not_destructive(self):
|
||||||
|
|
||||||
|
class MyDoc(Document):
|
||||||
|
keywords = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': ['keywords'],
|
||||||
|
'allow_inheritance': False
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertEqual(MyDoc._meta['index_specs'],
|
||||||
|
[{'fields': [('keywords', 1)]}])
|
||||||
|
|
||||||
|
# Force index creation
|
||||||
|
MyDoc.ensure_indexes()
|
||||||
|
|
||||||
|
self.assertEqual(MyDoc._meta['index_specs'],
|
||||||
|
[{'fields': [('keywords', 1)]}])
|
||||||
|
|
||||||
|
def test_embedded_document_index_meta(self):
|
||||||
|
"""Ensure that embedded document indexes are created explicitly
|
||||||
|
"""
|
||||||
|
class Rank(EmbeddedDocument):
|
||||||
|
title = StringField(required=True)
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField(required=True)
|
||||||
|
rank = EmbeddedDocumentField(Rank, required=False)
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
'rank.title',
|
||||||
|
],
|
||||||
|
'allow_inheritance': False
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertEqual([{'fields': [('rank.title', 1)]}],
|
||||||
|
Person._meta['index_specs'])
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
# Indexes are lazy so use list() to perform query
|
||||||
|
list(Person.objects)
|
||||||
|
info = Person.objects._collection.index_information()
|
||||||
|
info = [value['key'] for key, value in info.iteritems()]
|
||||||
|
self.assertTrue([('rank.title', 1)] in info)
|
||||||
|
|
||||||
|
def test_explicit_geo2d_index(self):
|
||||||
|
"""Ensure that geo2d indexes work when created via meta[indexes]
|
||||||
|
"""
|
||||||
|
class Place(Document):
|
||||||
|
location = DictField()
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'indexes': [
|
||||||
|
'*location.point',
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertEqual([{'fields': [('location.point', '2d')]}],
|
||||||
|
Place._meta['index_specs'])
|
||||||
|
|
||||||
|
Place.ensure_indexes()
|
||||||
|
info = Place._get_collection().index_information()
|
||||||
|
info = [value['key'] for key, value in info.iteritems()]
|
||||||
|
self.assertTrue([('location.point', '2d')] in info)
|
||||||
|
|
||||||
|
def test_explicit_geo2d_index_embedded(self):
|
||||||
|
"""Ensure that geo2d indexes work when created via meta[indexes]
|
||||||
|
"""
|
||||||
|
class EmbeddedLocation(EmbeddedDocument):
|
||||||
|
location = DictField()
|
||||||
|
|
||||||
|
class Place(Document):
|
||||||
|
current = DictField(field=EmbeddedDocumentField('EmbeddedLocation'))
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'indexes': [
|
||||||
|
'*current.location.point',
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertEqual([{'fields': [('current.location.point', '2d')]}],
|
||||||
|
Place._meta['index_specs'])
|
||||||
|
|
||||||
|
Place.ensure_indexes()
|
||||||
|
info = Place._get_collection().index_information()
|
||||||
|
info = [value['key'] for key, value in info.iteritems()]
|
||||||
|
self.assertTrue([('current.location.point', '2d')] in info)
|
||||||
|
|
||||||
|
def test_dictionary_indexes(self):
|
||||||
|
"""Ensure that indexes are used when meta[indexes] contains
|
||||||
|
dictionaries instead of lists.
|
||||||
|
"""
|
||||||
|
class BlogPost(Document):
|
||||||
|
date = DateTimeField(db_field='addDate', default=datetime.now)
|
||||||
|
category = StringField()
|
||||||
|
tags = ListField(StringField())
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
{'fields': ['-date'], 'unique': True, 'sparse': True},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertEqual([{'fields': [('addDate', -1)], 'unique': True,
|
||||||
|
'sparse': True}],
|
||||||
|
BlogPost._meta['index_specs'])
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
info = BlogPost.objects._collection.index_information()
|
||||||
|
# _id, '-date'
|
||||||
|
self.assertEqual(len(info), 2)
|
||||||
|
|
||||||
|
# Indexes are lazy so use list() to perform query
|
||||||
|
list(BlogPost.objects)
|
||||||
|
info = BlogPost.objects._collection.index_information()
|
||||||
|
info = [(value['key'],
|
||||||
|
value.get('unique', False),
|
||||||
|
value.get('sparse', False))
|
||||||
|
for key, value in info.iteritems()]
|
||||||
|
self.assertTrue(([('addDate', -1)], True, True) in info)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_abstract_index_inheritance(self):
|
||||||
|
|
||||||
|
class UserBase(Document):
|
||||||
|
user_guid = StringField(required=True)
|
||||||
|
meta = {
|
||||||
|
'abstract': True,
|
||||||
|
'indexes': ['user_guid'],
|
||||||
|
'allow_inheritance': True
|
||||||
|
}
|
||||||
|
|
||||||
|
class Person(UserBase):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': ['name'],
|
||||||
|
}
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
Person(name="test", user_guid='123').save()
|
||||||
|
|
||||||
|
self.assertEqual(1, Person.objects.count())
|
||||||
|
info = Person.objects._collection.index_information()
|
||||||
|
self.assertEqual(sorted(info.keys()),
|
||||||
|
['_cls_1_name_1', '_cls_1_user_guid_1', '_id_'])
|
||||||
|
|
||||||
|
def test_disable_index_creation(self):
|
||||||
|
"""Tests setting auto_create_index to False on the connection will
|
||||||
|
disable any index generation.
|
||||||
|
"""
|
||||||
|
class User(Document):
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'indexes': ['user_guid'],
|
||||||
|
'auto_create_index': False
|
||||||
|
}
|
||||||
|
user_guid = StringField(required=True)
|
||||||
|
|
||||||
|
class MongoUser(User):
|
||||||
|
pass
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
|
||||||
|
User(user_guid='123').save()
|
||||||
|
MongoUser(user_guid='123').save()
|
||||||
|
|
||||||
|
self.assertEqual(2, User.objects.count())
|
||||||
|
info = User.objects._collection.index_information()
|
||||||
|
self.assertEqual(info.keys(), ['_id_'])
|
||||||
|
|
||||||
|
User.ensure_indexes()
|
||||||
|
info = User.objects._collection.index_information()
|
||||||
|
self.assertEqual(sorted(info.keys()), ['_cls_1_user_guid_1', '_id_'])
|
||||||
|
User.drop_collection()
|
||||||
|
|
||||||
|
def test_embedded_document_index(self):
|
||||||
|
"""Tests settings an index on an embedded document
|
||||||
|
"""
|
||||||
|
class Date(EmbeddedDocument):
|
||||||
|
year = IntField(db_field='yr')
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField()
|
||||||
|
date = EmbeddedDocumentField(Date)
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
'-date.year'
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
info = BlogPost.objects._collection.index_information()
|
||||||
|
self.assertEqual(sorted(info.keys()), ['_id_', 'date.yr_-1'])
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_list_embedded_document_index(self):
|
||||||
|
"""Ensure list embedded documents can be indexed
|
||||||
|
"""
|
||||||
|
class Tag(EmbeddedDocument):
|
||||||
|
name = StringField(db_field='tag')
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField()
|
||||||
|
tags = ListField(EmbeddedDocumentField(Tag))
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
'tags.name'
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
info = BlogPost.objects._collection.index_information()
|
||||||
|
# we don't use _cls in with list fields by default
|
||||||
|
self.assertEqual(sorted(info.keys()), ['_id_', 'tags.tag_1'])
|
||||||
|
|
||||||
|
post1 = BlogPost(title="Embedded Indexes tests in place",
|
||||||
|
tags=[Tag(name="about"), Tag(name="time")])
|
||||||
|
post1.save()
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_recursive_embedded_objects_dont_break_indexes(self):
|
||||||
|
|
||||||
|
class RecursiveObject(EmbeddedDocument):
|
||||||
|
obj = EmbeddedDocumentField('self')
|
||||||
|
|
||||||
|
class RecursiveDocument(Document):
|
||||||
|
recursive_obj = EmbeddedDocumentField(RecursiveObject)
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
RecursiveDocument.ensure_indexes()
|
||||||
|
info = RecursiveDocument._get_collection().index_information()
|
||||||
|
self.assertEqual(sorted(info.keys()), ['_cls_1', '_id_'])
|
||||||
|
|
||||||
|
def test_covered_index(self):
|
||||||
|
"""Ensure that covered indexes can be used
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Test(Document):
|
||||||
|
a = IntField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': ['a'],
|
||||||
|
'allow_inheritance': False
|
||||||
|
}
|
||||||
|
|
||||||
|
Test.drop_collection()
|
||||||
|
|
||||||
|
obj = Test(a=1)
|
||||||
|
obj.save()
|
||||||
|
|
||||||
|
# Need to be explicit about covered indexes as mongoDB doesn't know if
|
||||||
|
# the documents returned might have more keys in that here.
|
||||||
|
query_plan = Test.objects(id=obj.id).exclude('a').explain()
|
||||||
|
self.assertFalse(query_plan['indexOnly'])
|
||||||
|
|
||||||
|
query_plan = Test.objects(id=obj.id).only('id').explain()
|
||||||
|
self.assertTrue(query_plan['indexOnly'])
|
||||||
|
|
||||||
|
query_plan = Test.objects(a=1).only('a').exclude('id').explain()
|
||||||
|
self.assertTrue(query_plan['indexOnly'])
|
||||||
|
|
||||||
|
def test_index_on_id(self):
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
['categories', 'id']
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
title = StringField(required=True)
|
||||||
|
description = StringField(required=True)
|
||||||
|
categories = ListField()
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
indexes = BlogPost.objects._collection.index_information()
|
||||||
|
self.assertEqual(indexes['categories_1__id_1']['key'],
|
||||||
|
[('categories', 1), ('_id', 1)])
|
||||||
|
|
||||||
|
def test_hint(self):
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
tags = ListField(StringField())
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
'tags',
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
for i in xrange(0, 10):
|
||||||
|
tags = [("tag %i" % n) for n in xrange(0, i % 2)]
|
||||||
|
BlogPost(tags=tags).save()
|
||||||
|
|
||||||
|
self.assertEqual(BlogPost.objects.count(), 10)
|
||||||
|
self.assertEqual(BlogPost.objects.hint().count(), 10)
|
||||||
|
self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10)
|
||||||
|
|
||||||
|
self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).count(), 10)
|
||||||
|
|
||||||
|
def invalid_index():
|
||||||
|
BlogPost.objects.hint('tags')
|
||||||
|
self.assertRaises(TypeError, invalid_index)
|
||||||
|
|
||||||
|
def invalid_index_2():
|
||||||
|
return BlogPost.objects.hint(('tags', 1))
|
||||||
|
self.assertRaises(TypeError, invalid_index_2)
|
||||||
|
|
||||||
|
def test_unique(self):
|
||||||
|
"""Ensure that uniqueness constraints are applied to fields.
|
||||||
|
"""
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField()
|
||||||
|
slug = StringField(unique=True)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
post1 = BlogPost(title='test1', slug='test')
|
||||||
|
post1.save()
|
||||||
|
|
||||||
|
# Two posts with the same slug is not allowed
|
||||||
|
post2 = BlogPost(title='test2', slug='test')
|
||||||
|
self.assertRaises(NotUniqueError, post2.save)
|
||||||
|
|
||||||
|
# Ensure backwards compatibilty for errors
|
||||||
|
self.assertRaises(OperationError, post2.save)
|
||||||
|
|
||||||
|
def test_unique_with(self):
|
||||||
|
"""Ensure that unique_with constraints are applied to fields.
|
||||||
|
"""
|
||||||
|
class Date(EmbeddedDocument):
|
||||||
|
year = IntField(db_field='yr')
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField()
|
||||||
|
date = EmbeddedDocumentField(Date)
|
||||||
|
slug = StringField(unique_with='date.year')
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
post1 = BlogPost(title='test1', date=Date(year=2009), slug='test')
|
||||||
|
post1.save()
|
||||||
|
|
||||||
|
# day is different so won't raise exception
|
||||||
|
post2 = BlogPost(title='test2', date=Date(year=2010), slug='test')
|
||||||
|
post2.save()
|
||||||
|
|
||||||
|
# Now there will be two docs with the same slug and the same day: fail
|
||||||
|
post3 = BlogPost(title='test3', date=Date(year=2010), slug='test')
|
||||||
|
self.assertRaises(OperationError, post3.save)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_unique_embedded_document(self):
|
||||||
|
"""Ensure that uniqueness constraints are applied to fields on embedded documents.
|
||||||
|
"""
|
||||||
|
class SubDocument(EmbeddedDocument):
|
||||||
|
year = IntField(db_field='yr')
|
||||||
|
slug = StringField(unique=True)
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField()
|
||||||
|
sub = EmbeddedDocumentField(SubDocument)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
post1 = BlogPost(title='test1',
|
||||||
|
sub=SubDocument(year=2009, slug="test"))
|
||||||
|
post1.save()
|
||||||
|
|
||||||
|
# sub.slug is different so won't raise exception
|
||||||
|
post2 = BlogPost(title='test2',
|
||||||
|
sub=SubDocument(year=2010, slug='another-slug'))
|
||||||
|
post2.save()
|
||||||
|
|
||||||
|
# Now there will be two docs with the same sub.slug
|
||||||
|
post3 = BlogPost(title='test3',
|
||||||
|
sub=SubDocument(year=2010, slug='test'))
|
||||||
|
self.assertRaises(NotUniqueError, post3.save)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_unique_with_embedded_document_and_embedded_unique(self):
|
||||||
|
"""Ensure that uniqueness constraints are applied to fields on
|
||||||
|
embedded documents. And work with unique_with as well.
|
||||||
|
"""
|
||||||
|
class SubDocument(EmbeddedDocument):
|
||||||
|
year = IntField(db_field='yr')
|
||||||
|
slug = StringField(unique=True)
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField(unique_with='sub.year')
|
||||||
|
sub = EmbeddedDocumentField(SubDocument)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
post1 = BlogPost(title='test1',
|
||||||
|
sub=SubDocument(year=2009, slug="test"))
|
||||||
|
post1.save()
|
||||||
|
|
||||||
|
# sub.slug is different so won't raise exception
|
||||||
|
post2 = BlogPost(title='test2',
|
||||||
|
sub=SubDocument(year=2010, slug='another-slug'))
|
||||||
|
post2.save()
|
||||||
|
|
||||||
|
# Now there will be two docs with the same sub.slug
|
||||||
|
post3 = BlogPost(title='test3',
|
||||||
|
sub=SubDocument(year=2010, slug='test'))
|
||||||
|
self.assertRaises(NotUniqueError, post3.save)
|
||||||
|
|
||||||
|
# Now there will be two docs with the same title and year
|
||||||
|
post3 = BlogPost(title='test1',
|
||||||
|
sub=SubDocument(year=2009, slug='test-1'))
|
||||||
|
self.assertRaises(NotUniqueError, post3.save)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_ttl_indexes(self):
|
||||||
|
|
||||||
|
class Log(Document):
|
||||||
|
created = DateTimeField(default=datetime.now)
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
{'fields': ['created'], 'expireAfterSeconds': 3600}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.drop_collection()
|
||||||
|
|
||||||
|
if pymongo.version_tuple[0] < 2 and pymongo.version_tuple[1] < 3:
|
||||||
|
raise SkipTest('pymongo needs to be 2.3 or higher for this test')
|
||||||
|
|
||||||
|
connection = get_connection()
|
||||||
|
version_array = connection.server_info()['versionArray']
|
||||||
|
if version_array[0] < 2 and version_array[1] < 2:
|
||||||
|
raise SkipTest('MongoDB needs to be 2.2 or higher for this test')
|
||||||
|
|
||||||
|
# Indexes are lazy so use list() to perform query
|
||||||
|
list(Log.objects)
|
||||||
|
info = Log.objects._collection.index_information()
|
||||||
|
self.assertEqual(3600,
|
||||||
|
info['created_1']['expireAfterSeconds'])
|
||||||
|
|
||||||
|
def test_unique_and_indexes(self):
|
||||||
|
"""Ensure that 'unique' constraints aren't overridden by
|
||||||
|
meta.indexes.
|
||||||
|
"""
|
||||||
|
class Customer(Document):
|
||||||
|
cust_id = IntField(unique=True, required=True)
|
||||||
|
meta = {
|
||||||
|
'indexes': ['cust_id'],
|
||||||
|
'allow_inheritance': False,
|
||||||
|
}
|
||||||
|
|
||||||
|
Customer.drop_collection()
|
||||||
|
cust = Customer(cust_id=1)
|
||||||
|
cust.save()
|
||||||
|
|
||||||
|
cust_dupe = Customer(cust_id=1)
|
||||||
|
try:
|
||||||
|
cust_dupe.save()
|
||||||
|
raise AssertionError("We saved a dupe!")
|
||||||
|
except NotUniqueError:
|
||||||
|
pass
|
||||||
|
Customer.drop_collection()
|
||||||
|
|
||||||
|
def test_unique_and_primary(self):
|
||||||
|
"""If you set a field as primary, then unexpected behaviour can occur.
|
||||||
|
You won't create a duplicate but you will update an existing document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField(primary_key=True, unique=True)
|
||||||
|
password = StringField()
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
|
||||||
|
user = User(name='huangz', password='secret')
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
user = User(name='huangz', password='secret2')
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
self.assertEqual(User.objects.count(), 1)
|
||||||
|
self.assertEqual(User.objects.get().password, 'secret2')
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
|
||||||
|
def test_index_with_pk(self):
|
||||||
|
"""Ensure you can use `pk` as part of a query"""
|
||||||
|
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
comment_id = IntField(required=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
class BlogPost(Document):
|
||||||
|
comments = EmbeddedDocumentField(Comment)
|
||||||
|
meta = {'indexes': [
|
||||||
|
{'fields': ['pk', 'comments.comment_id'],
|
||||||
|
'unique': True}]}
|
||||||
|
except UnboundLocalError:
|
||||||
|
self.fail('Unbound local error at index + pk definition')
|
||||||
|
|
||||||
|
info = BlogPost.objects._collection.index_information()
|
||||||
|
info = [value['key'] for key, value in info.iteritems()]
|
||||||
|
index_item = [('_id', 1), ('comments.comment_id', 1)]
|
||||||
|
self.assertTrue(index_item in info)
|
||||||
|
|
||||||
|
def test_compound_key_embedded(self):
|
||||||
|
|
||||||
|
class CompoundKey(EmbeddedDocument):
|
||||||
|
name = StringField(required=True)
|
||||||
|
term = StringField(required=True)
|
||||||
|
|
||||||
|
class Report(Document):
|
||||||
|
key = EmbeddedDocumentField(CompoundKey, primary_key=True)
|
||||||
|
text = StringField()
|
||||||
|
|
||||||
|
Report.drop_collection()
|
||||||
|
|
||||||
|
my_key = CompoundKey(name="n", term="ok")
|
||||||
|
report = Report(text="OK", key=my_key).save()
|
||||||
|
|
||||||
|
self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}},
|
||||||
|
report.to_mongo())
|
||||||
|
self.assertEqual(report, Report.objects.get(pk=my_key))
|
||||||
|
|
||||||
|
def test_compound_key_dictfield(self):
|
||||||
|
|
||||||
|
class Report(Document):
|
||||||
|
key = DictField(primary_key=True)
|
||||||
|
text = StringField()
|
||||||
|
|
||||||
|
Report.drop_collection()
|
||||||
|
|
||||||
|
my_key = {"name": "n", "term": "ok"}
|
||||||
|
report = Report(text="OK", key=my_key).save()
|
||||||
|
|
||||||
|
self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}},
|
||||||
|
report.to_mongo())
|
||||||
|
self.assertEqual(report, Report.objects.get(pk=my_key))
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
414
tests/document/inheritance.py
Normal file
414
tests/document/inheritance.py
Normal file
@@ -0,0 +1,414 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
import unittest
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from tests.fixtures import Base
|
||||||
|
|
||||||
|
from mongoengine import Document, EmbeddedDocument, connect
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
from mongoengine.fields import (BooleanField, GenericReferenceField,
|
||||||
|
IntField, StringField)
|
||||||
|
|
||||||
|
__all__ = ('InheritanceTest', )
|
||||||
|
|
||||||
|
|
||||||
|
class InheritanceTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
for collection in self.db.collection_names():
|
||||||
|
if 'system.' in collection:
|
||||||
|
continue
|
||||||
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
|
def test_superclasses(self):
|
||||||
|
"""Ensure that the correct list of superclasses is assembled.
|
||||||
|
"""
|
||||||
|
class Animal(Document):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
class Fish(Animal): pass
|
||||||
|
class Guppy(Fish): pass
|
||||||
|
class Mammal(Animal): pass
|
||||||
|
class Dog(Mammal): pass
|
||||||
|
class Human(Mammal): pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal._superclasses, ())
|
||||||
|
self.assertEqual(Fish._superclasses, ('Animal',))
|
||||||
|
self.assertEqual(Guppy._superclasses, ('Animal', 'Animal.Fish'))
|
||||||
|
self.assertEqual(Mammal._superclasses, ('Animal',))
|
||||||
|
self.assertEqual(Dog._superclasses, ('Animal', 'Animal.Mammal'))
|
||||||
|
self.assertEqual(Human._superclasses, ('Animal', 'Animal.Mammal'))
|
||||||
|
|
||||||
|
def test_external_superclasses(self):
|
||||||
|
"""Ensure that the correct list of super classes is assembled when
|
||||||
|
importing part of the model.
|
||||||
|
"""
|
||||||
|
class Animal(Base): pass
|
||||||
|
class Fish(Animal): pass
|
||||||
|
class Guppy(Fish): pass
|
||||||
|
class Mammal(Animal): pass
|
||||||
|
class Dog(Mammal): pass
|
||||||
|
class Human(Mammal): pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal._superclasses, ('Base', ))
|
||||||
|
self.assertEqual(Fish._superclasses, ('Base', 'Base.Animal',))
|
||||||
|
self.assertEqual(Guppy._superclasses, ('Base', 'Base.Animal',
|
||||||
|
'Base.Animal.Fish'))
|
||||||
|
self.assertEqual(Mammal._superclasses, ('Base', 'Base.Animal',))
|
||||||
|
self.assertEqual(Dog._superclasses, ('Base', 'Base.Animal',
|
||||||
|
'Base.Animal.Mammal'))
|
||||||
|
self.assertEqual(Human._superclasses, ('Base', 'Base.Animal',
|
||||||
|
'Base.Animal.Mammal'))
|
||||||
|
|
||||||
|
def test_subclasses(self):
|
||||||
|
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||||
|
assembled.
|
||||||
|
"""
|
||||||
|
class Animal(Document):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
class Fish(Animal): pass
|
||||||
|
class Guppy(Fish): pass
|
||||||
|
class Mammal(Animal): pass
|
||||||
|
class Dog(Mammal): pass
|
||||||
|
class Human(Mammal): pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal._subclasses, ('Animal',
|
||||||
|
'Animal.Fish',
|
||||||
|
'Animal.Fish.Guppy',
|
||||||
|
'Animal.Mammal',
|
||||||
|
'Animal.Mammal.Dog',
|
||||||
|
'Animal.Mammal.Human'))
|
||||||
|
self.assertEqual(Fish._subclasses, ('Animal.Fish',
|
||||||
|
'Animal.Fish.Guppy',))
|
||||||
|
self.assertEqual(Guppy._subclasses, ('Animal.Fish.Guppy',))
|
||||||
|
self.assertEqual(Mammal._subclasses, ('Animal.Mammal',
|
||||||
|
'Animal.Mammal.Dog',
|
||||||
|
'Animal.Mammal.Human'))
|
||||||
|
self.assertEqual(Human._subclasses, ('Animal.Mammal.Human',))
|
||||||
|
|
||||||
|
def test_external_subclasses(self):
|
||||||
|
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||||
|
assembled when importing part of the model.
|
||||||
|
"""
|
||||||
|
class Animal(Base): pass
|
||||||
|
class Fish(Animal): pass
|
||||||
|
class Guppy(Fish): pass
|
||||||
|
class Mammal(Animal): pass
|
||||||
|
class Dog(Mammal): pass
|
||||||
|
class Human(Mammal): pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal._subclasses, ('Base.Animal',
|
||||||
|
'Base.Animal.Fish',
|
||||||
|
'Base.Animal.Fish.Guppy',
|
||||||
|
'Base.Animal.Mammal',
|
||||||
|
'Base.Animal.Mammal.Dog',
|
||||||
|
'Base.Animal.Mammal.Human'))
|
||||||
|
self.assertEqual(Fish._subclasses, ('Base.Animal.Fish',
|
||||||
|
'Base.Animal.Fish.Guppy',))
|
||||||
|
self.assertEqual(Guppy._subclasses, ('Base.Animal.Fish.Guppy',))
|
||||||
|
self.assertEqual(Mammal._subclasses, ('Base.Animal.Mammal',
|
||||||
|
'Base.Animal.Mammal.Dog',
|
||||||
|
'Base.Animal.Mammal.Human'))
|
||||||
|
self.assertEqual(Human._subclasses, ('Base.Animal.Mammal.Human',))
|
||||||
|
|
||||||
|
def test_dynamic_declarations(self):
|
||||||
|
"""Test that declaring an extra class updates meta data"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
self.assertEqual(Animal._superclasses, ())
|
||||||
|
self.assertEqual(Animal._subclasses, ('Animal',))
|
||||||
|
|
||||||
|
# Test dynamically adding a class changes the meta data
|
||||||
|
class Fish(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal._superclasses, ())
|
||||||
|
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish'))
|
||||||
|
|
||||||
|
self.assertEqual(Fish._superclasses, ('Animal', ))
|
||||||
|
self.assertEqual(Fish._subclasses, ('Animal.Fish',))
|
||||||
|
|
||||||
|
# Test dynamically adding an inherited class changes the meta data
|
||||||
|
class Pike(Fish):
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal._superclasses, ())
|
||||||
|
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish',
|
||||||
|
'Animal.Fish.Pike'))
|
||||||
|
|
||||||
|
self.assertEqual(Fish._superclasses, ('Animal', ))
|
||||||
|
self.assertEqual(Fish._subclasses, ('Animal.Fish', 'Animal.Fish.Pike'))
|
||||||
|
|
||||||
|
self.assertEqual(Pike._superclasses, ('Animal', 'Animal.Fish'))
|
||||||
|
self.assertEqual(Pike._subclasses, ('Animal.Fish.Pike',))
|
||||||
|
|
||||||
|
def test_inheritance_meta_data(self):
|
||||||
|
"""Ensure that document may inherit fields from a superclass document.
|
||||||
|
"""
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
class Employee(Person):
|
||||||
|
salary = IntField()
|
||||||
|
|
||||||
|
self.assertEqual(['age', 'id', 'name', 'salary'],
|
||||||
|
sorted(Employee._fields.keys()))
|
||||||
|
self.assertEqual(Employee._get_collection_name(),
|
||||||
|
Person._get_collection_name())
|
||||||
|
|
||||||
|
def test_inheritance_to_mongo_keys(self):
|
||||||
|
"""Ensure that document may inherit fields from a superclass document.
|
||||||
|
"""
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
class Employee(Person):
|
||||||
|
salary = IntField()
|
||||||
|
|
||||||
|
self.assertEqual(['age', 'id', 'name', 'salary'],
|
||||||
|
sorted(Employee._fields.keys()))
|
||||||
|
self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(),
|
||||||
|
['_cls', 'name', 'age'])
|
||||||
|
self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(),
|
||||||
|
['_cls', 'name', 'age', 'salary'])
|
||||||
|
self.assertEqual(Employee._get_collection_name(),
|
||||||
|
Person._get_collection_name())
|
||||||
|
|
||||||
|
def test_polymorphic_queries(self):
|
||||||
|
"""Ensure that the correct subclasses are returned from a query
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
class Fish(Animal): pass
|
||||||
|
class Mammal(Animal): pass
|
||||||
|
class Dog(Mammal): pass
|
||||||
|
class Human(Mammal): pass
|
||||||
|
|
||||||
|
Animal.drop_collection()
|
||||||
|
|
||||||
|
Animal().save()
|
||||||
|
Fish().save()
|
||||||
|
Mammal().save()
|
||||||
|
Dog().save()
|
||||||
|
Human().save()
|
||||||
|
|
||||||
|
classes = [obj.__class__ for obj in Animal.objects]
|
||||||
|
self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human])
|
||||||
|
|
||||||
|
classes = [obj.__class__ for obj in Mammal.objects]
|
||||||
|
self.assertEqual(classes, [Mammal, Dog, Human])
|
||||||
|
|
||||||
|
classes = [obj.__class__ for obj in Human.objects]
|
||||||
|
self.assertEqual(classes, [Human])
|
||||||
|
|
||||||
|
def test_allow_inheritance(self):
|
||||||
|
"""Ensure that inheritance may be disabled on simple classes and that
|
||||||
|
_cls and _subclasses will not be used.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
def create_dog_class():
|
||||||
|
class Dog(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertRaises(ValueError, create_dog_class)
|
||||||
|
|
||||||
|
# Check that _cls etc aren't present on simple documents
|
||||||
|
dog = Animal(name='dog').save()
|
||||||
|
self.assertEqual(dog.to_mongo().keys(), ['_id', 'name'])
|
||||||
|
|
||||||
|
collection = self.db[Animal._get_collection_name()]
|
||||||
|
obj = collection.find_one()
|
||||||
|
self.assertFalse('_cls' in obj)
|
||||||
|
|
||||||
|
def test_cant_turn_off_inheritance_on_subclass(self):
|
||||||
|
"""Ensure if inheritance is on in a subclass you cant turn it off
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
def create_mammal_class():
|
||||||
|
class Mammal(Animal):
|
||||||
|
meta = {'allow_inheritance': False}
|
||||||
|
self.assertRaises(ValueError, create_mammal_class)
|
||||||
|
|
||||||
|
def test_allow_inheritance_abstract_document(self):
|
||||||
|
"""Ensure that abstract documents can set inheritance rules and that
|
||||||
|
_cls will not be used.
|
||||||
|
"""
|
||||||
|
class FinalDocument(Document):
|
||||||
|
meta = {'abstract': True,
|
||||||
|
'allow_inheritance': False}
|
||||||
|
|
||||||
|
class Animal(FinalDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
def create_mammal_class():
|
||||||
|
class Mammal(Animal):
|
||||||
|
pass
|
||||||
|
self.assertRaises(ValueError, create_mammal_class)
|
||||||
|
|
||||||
|
# Check that _cls isn't present in simple documents
|
||||||
|
doc = Animal(name='dog')
|
||||||
|
self.assertFalse('_cls' in doc.to_mongo())
|
||||||
|
|
||||||
|
def test_allow_inheritance_embedded_document(self):
|
||||||
|
"""Ensure embedded documents respect inheritance
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
content = StringField()
|
||||||
|
|
||||||
|
def create_special_comment():
|
||||||
|
class SpecialComment(Comment):
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertRaises(ValueError, create_special_comment)
|
||||||
|
|
||||||
|
doc = Comment(content='test')
|
||||||
|
self.assertFalse('_cls' in doc.to_mongo())
|
||||||
|
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
content = StringField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
doc = Comment(content='test')
|
||||||
|
self.assertTrue('_cls' in doc.to_mongo())
|
||||||
|
|
||||||
|
def test_document_inheritance(self):
|
||||||
|
"""Ensure mutliple inheritance of abstract documents
|
||||||
|
"""
|
||||||
|
class DateCreatedDocument(Document):
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'abstract': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
class DateUpdatedDocument(Document):
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'abstract': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
class MyDocument(DateCreatedDocument, DateUpdatedDocument):
|
||||||
|
pass
|
||||||
|
except:
|
||||||
|
self.assertTrue(False, "Couldn't create MyDocument class")
|
||||||
|
|
||||||
|
def test_abstract_documents(self):
|
||||||
|
"""Ensure that a document superclass can be marked as abstract
|
||||||
|
thereby not using it as the name for the collection."""
|
||||||
|
|
||||||
|
defaults = {'index_background': True,
|
||||||
|
'index_drop_dups': True,
|
||||||
|
'index_opts': {'hello': 'world'},
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'queryset_class': 'QuerySet',
|
||||||
|
'db_alias': 'myDB',
|
||||||
|
'shard_key': ('hello', 'world')}
|
||||||
|
|
||||||
|
meta_settings = {'abstract': True}
|
||||||
|
meta_settings.update(defaults)
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = meta_settings
|
||||||
|
|
||||||
|
class Fish(Animal): pass
|
||||||
|
class Guppy(Fish): pass
|
||||||
|
|
||||||
|
class Mammal(Animal):
|
||||||
|
meta = {'abstract': True}
|
||||||
|
class Human(Mammal): pass
|
||||||
|
|
||||||
|
for k, v in defaults.iteritems():
|
||||||
|
for cls in [Animal, Fish, Guppy]:
|
||||||
|
self.assertEqual(cls._meta[k], v)
|
||||||
|
|
||||||
|
self.assertFalse('collection' in Animal._meta)
|
||||||
|
self.assertFalse('collection' in Mammal._meta)
|
||||||
|
|
||||||
|
self.assertEqual(Animal._get_collection_name(), None)
|
||||||
|
self.assertEqual(Mammal._get_collection_name(), None)
|
||||||
|
|
||||||
|
self.assertEqual(Fish._get_collection_name(), 'fish')
|
||||||
|
self.assertEqual(Guppy._get_collection_name(), 'fish')
|
||||||
|
self.assertEqual(Human._get_collection_name(), 'human')
|
||||||
|
|
||||||
|
def create_bad_abstract():
|
||||||
|
class EvilHuman(Human):
|
||||||
|
evil = BooleanField(default=True)
|
||||||
|
meta = {'abstract': True}
|
||||||
|
self.assertRaises(ValueError, create_bad_abstract)
|
||||||
|
|
||||||
|
def test_inherited_collections(self):
|
||||||
|
"""Ensure that subclassed documents don't override parents'
|
||||||
|
collections
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Drink(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
class Drinker(Document):
|
||||||
|
drink = GenericReferenceField()
|
||||||
|
|
||||||
|
try:
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
|
class AcloholicDrink(Drink):
|
||||||
|
meta = {'collection': 'booze'}
|
||||||
|
|
||||||
|
except SyntaxWarning:
|
||||||
|
warnings.simplefilter("ignore")
|
||||||
|
|
||||||
|
class AlcoholicDrink(Drink):
|
||||||
|
meta = {'collection': 'booze'}
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise AssertionError("SyntaxWarning should be triggered")
|
||||||
|
|
||||||
|
warnings.resetwarnings()
|
||||||
|
|
||||||
|
Drink.drop_collection()
|
||||||
|
AlcoholicDrink.drop_collection()
|
||||||
|
Drinker.drop_collection()
|
||||||
|
|
||||||
|
red_bull = Drink(name='Red Bull')
|
||||||
|
red_bull.save()
|
||||||
|
|
||||||
|
programmer = Drinker(drink=red_bull)
|
||||||
|
programmer.save()
|
||||||
|
|
||||||
|
beer = AlcoholicDrink(name='Beer')
|
||||||
|
beer.save()
|
||||||
|
real_person = Drinker(drink=beer)
|
||||||
|
real_person.save()
|
||||||
|
|
||||||
|
self.assertEqual(Drinker.objects[0].drink.name, red_bull.name)
|
||||||
|
self.assertEqual(Drinker.objects[1].drink.name, beer.name)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
2260
tests/document/instance.py
Normal file
2260
tests/document/instance.py
Normal file
File diff suppressed because it is too large
Load Diff
81
tests/document/json_serialisation.py
Normal file
81
tests/document/json_serialisation.py
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from nose.plugins.skip import SkipTest
|
||||||
|
from datetime import datetime
|
||||||
|
from bson import ObjectId
|
||||||
|
|
||||||
|
import pymongo
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
__all__ = ("TestJson",)
|
||||||
|
|
||||||
|
|
||||||
|
class TestJson(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
|
def test_json_simple(self):
|
||||||
|
|
||||||
|
class Embedded(EmbeddedDocument):
|
||||||
|
string = StringField()
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
string = StringField()
|
||||||
|
embedded_field = EmbeddedDocumentField(Embedded)
|
||||||
|
|
||||||
|
doc = Doc(string="Hi", embedded_field=Embedded(string="Hi"))
|
||||||
|
|
||||||
|
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
||||||
|
|
||||||
|
def test_json_complex(self):
|
||||||
|
|
||||||
|
if pymongo.version_tuple[0] <= 2 and pymongo.version_tuple[1] <= 3:
|
||||||
|
raise SkipTest("Need pymongo 2.4 as has a fix for DBRefs")
|
||||||
|
|
||||||
|
class EmbeddedDoc(EmbeddedDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Simple(Document):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
string_field = StringField(default='1')
|
||||||
|
int_field = IntField(default=1)
|
||||||
|
float_field = FloatField(default=1.1)
|
||||||
|
boolean_field = BooleanField(default=True)
|
||||||
|
datetime_field = DateTimeField(default=datetime.now)
|
||||||
|
embedded_document_field = EmbeddedDocumentField(EmbeddedDoc,
|
||||||
|
default=lambda: EmbeddedDoc())
|
||||||
|
list_field = ListField(default=lambda: [1, 2, 3])
|
||||||
|
dict_field = DictField(default=lambda: {"hello": "world"})
|
||||||
|
objectid_field = ObjectIdField(default=ObjectId)
|
||||||
|
reference_field = ReferenceField(Simple, default=lambda:
|
||||||
|
Simple().save())
|
||||||
|
map_field = MapField(IntField(), default=lambda: {"simple": 1})
|
||||||
|
decimal_field = DecimalField(default=1.0)
|
||||||
|
complex_datetime_field = ComplexDateTimeField(default=datetime.now)
|
||||||
|
url_field = URLField(default="http://mongoengine.org")
|
||||||
|
dynamic_field = DynamicField(default=1)
|
||||||
|
generic_reference_field = GenericReferenceField(
|
||||||
|
default=lambda: Simple().save())
|
||||||
|
sorted_list_field = SortedListField(IntField(),
|
||||||
|
default=lambda: [1, 2, 3])
|
||||||
|
email_field = EmailField(default="ross@example.com")
|
||||||
|
geo_point_field = GeoPointField(default=lambda: [1, 2])
|
||||||
|
sequence_field = SequenceField()
|
||||||
|
uuid_field = UUIDField(default=uuid.uuid4)
|
||||||
|
generic_embedded_document_field = GenericEmbeddedDocumentField(
|
||||||
|
default=lambda: EmbeddedDoc())
|
||||||
|
|
||||||
|
doc = Doc()
|
||||||
|
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
146
tests/document/validation.py
Normal file
146
tests/document/validation.py
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
__all__ = ("ValidatorErrorTest",)
|
||||||
|
|
||||||
|
|
||||||
|
class ValidatorErrorTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
|
def test_to_dict(self):
|
||||||
|
"""Ensure a ValidationError handles error to_dict correctly.
|
||||||
|
"""
|
||||||
|
error = ValidationError('root')
|
||||||
|
self.assertEqual(error.to_dict(), {})
|
||||||
|
|
||||||
|
# 1st level error schema
|
||||||
|
error.errors = {'1st': ValidationError('bad 1st'), }
|
||||||
|
self.assertTrue('1st' in error.to_dict())
|
||||||
|
self.assertEqual(error.to_dict()['1st'], 'bad 1st')
|
||||||
|
|
||||||
|
# 2nd level error schema
|
||||||
|
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||||
|
'2nd': ValidationError('bad 2nd'),
|
||||||
|
})}
|
||||||
|
self.assertTrue('1st' in error.to_dict())
|
||||||
|
self.assertTrue(isinstance(error.to_dict()['1st'], dict))
|
||||||
|
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||||
|
self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd')
|
||||||
|
|
||||||
|
# moar levels
|
||||||
|
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||||
|
'2nd': ValidationError('bad 2nd', errors={
|
||||||
|
'3rd': ValidationError('bad 3rd', errors={
|
||||||
|
'4th': ValidationError('Inception'),
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
})}
|
||||||
|
self.assertTrue('1st' in error.to_dict())
|
||||||
|
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||||
|
self.assertTrue('3rd' in error.to_dict()['1st']['2nd'])
|
||||||
|
self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd'])
|
||||||
|
self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'],
|
||||||
|
'Inception')
|
||||||
|
|
||||||
|
self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])")
|
||||||
|
|
||||||
|
def test_model_validation(self):
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
username = StringField(primary_key=True)
|
||||||
|
name = StringField(required=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
User().validate()
|
||||||
|
except ValidationError, e:
|
||||||
|
self.assertTrue("User:None" in e.message)
|
||||||
|
self.assertEqual(e.to_dict(), {
|
||||||
|
'username': 'Field is required',
|
||||||
|
'name': 'Field is required'})
|
||||||
|
|
||||||
|
user = User(username="RossC0", name="Ross").save()
|
||||||
|
user.name = None
|
||||||
|
try:
|
||||||
|
user.save()
|
||||||
|
except ValidationError, e:
|
||||||
|
self.assertTrue("User:RossC0" in e.message)
|
||||||
|
self.assertEqual(e.to_dict(), {
|
||||||
|
'name': 'Field is required'})
|
||||||
|
|
||||||
|
def test_fields_rewrite(self):
|
||||||
|
class BasePerson(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
meta = {'abstract': True}
|
||||||
|
|
||||||
|
class Person(BasePerson):
|
||||||
|
name = StringField(required=True)
|
||||||
|
|
||||||
|
p = Person(age=15)
|
||||||
|
self.assertRaises(ValidationError, p.validate)
|
||||||
|
|
||||||
|
def test_embedded_document_validation(self):
|
||||||
|
"""Ensure that embedded documents may be validated.
|
||||||
|
"""
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
date = DateTimeField()
|
||||||
|
content = StringField(required=True)
|
||||||
|
|
||||||
|
comment = Comment()
|
||||||
|
self.assertRaises(ValidationError, comment.validate)
|
||||||
|
|
||||||
|
comment.content = 'test'
|
||||||
|
comment.validate()
|
||||||
|
|
||||||
|
comment.date = 4
|
||||||
|
self.assertRaises(ValidationError, comment.validate)
|
||||||
|
|
||||||
|
comment.date = datetime.now()
|
||||||
|
comment.validate()
|
||||||
|
self.assertEqual(comment._instance, None)
|
||||||
|
|
||||||
|
def test_embedded_db_field_validate(self):
|
||||||
|
|
||||||
|
class SubDoc(EmbeddedDocument):
|
||||||
|
val = IntField(required=True)
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
id = StringField(primary_key=True)
|
||||||
|
e = EmbeddedDocumentField(SubDoc, db_field='eb')
|
||||||
|
|
||||||
|
try:
|
||||||
|
Doc(id="bad").validate()
|
||||||
|
except ValidationError, e:
|
||||||
|
self.assertTrue("SubDoc:None" in e.message)
|
||||||
|
self.assertEqual(e.to_dict(), {
|
||||||
|
"e": {'val': 'OK could not be converted to int'}})
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
|
||||||
|
Doc(id="test", e=SubDoc(val=15)).save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
keys = doc._data.keys()
|
||||||
|
self.assertEqual(2, len(keys))
|
||||||
|
self.assertTrue('e' in keys)
|
||||||
|
self.assertTrue('id' in keys)
|
||||||
|
|
||||||
|
doc.e.val = "OK"
|
||||||
|
try:
|
||||||
|
doc.save()
|
||||||
|
except ValidationError, e:
|
||||||
|
self.assertTrue("Doc:test" in e.message)
|
||||||
|
self.assertEqual(e.to_dict(), {
|
||||||
|
"e": {'val': 'OK could not be converted to int'}})
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
246
tests/fields.py
246
tests/fields.py
@@ -1,246 +0,0 @@
|
|||||||
import unittest
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine.connection import _get_db
|
|
||||||
|
|
||||||
|
|
||||||
class FieldTest(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
self.db = _get_db()
|
|
||||||
|
|
||||||
def test_default_values(self):
|
|
||||||
"""Ensure that default field values are used when creating a document.
|
|
||||||
"""
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
age = IntField(default=30)
|
|
||||||
userid = StringField(default=lambda: 'test')
|
|
||||||
|
|
||||||
person = Person(name='Test Person')
|
|
||||||
self.assertEqual(person._data['age'], 30)
|
|
||||||
self.assertEqual(person._data['userid'], 'test')
|
|
||||||
|
|
||||||
def test_required_values(self):
|
|
||||||
"""Ensure that required field constraints are enforced.
|
|
||||||
"""
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField(required=True)
|
|
||||||
age = IntField(required=True)
|
|
||||||
userid = StringField()
|
|
||||||
|
|
||||||
person = Person(name="Test User")
|
|
||||||
self.assertRaises(ValidationError, person.validate)
|
|
||||||
person = Person(age=30)
|
|
||||||
self.assertRaises(ValidationError, person.validate)
|
|
||||||
|
|
||||||
def test_object_id_validation(self):
|
|
||||||
"""Ensure that invalid values cannot be assigned to string fields.
|
|
||||||
"""
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
person = Person(name='Test User')
|
|
||||||
self.assertEqual(person.id, None)
|
|
||||||
|
|
||||||
person.id = 47
|
|
||||||
self.assertRaises(ValidationError, person.validate)
|
|
||||||
|
|
||||||
person.id = 'abc'
|
|
||||||
self.assertRaises(ValidationError, person.validate)
|
|
||||||
|
|
||||||
person.id = '497ce96f395f2f052a494fd4'
|
|
||||||
person.validate()
|
|
||||||
|
|
||||||
def test_string_validation(self):
|
|
||||||
"""Ensure that invalid values cannot be assigned to string fields.
|
|
||||||
"""
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField(max_length=20)
|
|
||||||
userid = StringField(r'[0-9a-z_]+$')
|
|
||||||
|
|
||||||
person = Person(name=34)
|
|
||||||
self.assertRaises(ValidationError, person.validate)
|
|
||||||
|
|
||||||
# Test regex validation on userid
|
|
||||||
person = Person(userid='test.User')
|
|
||||||
self.assertRaises(ValidationError, person.validate)
|
|
||||||
|
|
||||||
person.userid = 'test_user'
|
|
||||||
self.assertEqual(person.userid, 'test_user')
|
|
||||||
person.validate()
|
|
||||||
|
|
||||||
# Test max length validation on name
|
|
||||||
person = Person(name='Name that is more than twenty characters')
|
|
||||||
self.assertRaises(ValidationError, person.validate)
|
|
||||||
|
|
||||||
person.name = 'Shorter name'
|
|
||||||
person.validate()
|
|
||||||
|
|
||||||
def test_int_validation(self):
|
|
||||||
"""Ensure that invalid values cannot be assigned to int fields.
|
|
||||||
"""
|
|
||||||
class Person(Document):
|
|
||||||
age = IntField(min_value=0, max_value=110)
|
|
||||||
|
|
||||||
person = Person()
|
|
||||||
person.age = 50
|
|
||||||
person.validate()
|
|
||||||
|
|
||||||
person.age = -1
|
|
||||||
self.assertRaises(ValidationError, person.validate)
|
|
||||||
person.age = 120
|
|
||||||
self.assertRaises(ValidationError, person.validate)
|
|
||||||
person.age = 'ten'
|
|
||||||
self.assertRaises(ValidationError, person.validate)
|
|
||||||
|
|
||||||
def test_float_validation(self):
|
|
||||||
"""Ensure that invalid values cannot be assigned to float fields.
|
|
||||||
"""
|
|
||||||
class Person(Document):
|
|
||||||
height = FloatField(min_value=0.1, max_value=3.5)
|
|
||||||
|
|
||||||
person = Person()
|
|
||||||
person.height = 1.89
|
|
||||||
person.validate()
|
|
||||||
|
|
||||||
person.height = 2
|
|
||||||
self.assertRaises(ValidationError, person.validate)
|
|
||||||
person.height = 0.01
|
|
||||||
self.assertRaises(ValidationError, person.validate)
|
|
||||||
person.height = 4.0
|
|
||||||
self.assertRaises(ValidationError, person.validate)
|
|
||||||
|
|
||||||
def test_datetime_validation(self):
|
|
||||||
"""Ensure that invalid values cannot be assigned to datetime fields.
|
|
||||||
"""
|
|
||||||
class LogEntry(Document):
|
|
||||||
time = DateTimeField()
|
|
||||||
|
|
||||||
log = LogEntry()
|
|
||||||
log.time = datetime.datetime.now()
|
|
||||||
log.validate()
|
|
||||||
|
|
||||||
log.time = -1
|
|
||||||
self.assertRaises(ValidationError, log.validate)
|
|
||||||
log.time = '1pm'
|
|
||||||
self.assertRaises(ValidationError, log.validate)
|
|
||||||
|
|
||||||
def test_list_validation(self):
|
|
||||||
"""Ensure that a list field only accepts lists with valid elements.
|
|
||||||
"""
|
|
||||||
class Comment(EmbeddedDocument):
|
|
||||||
content = StringField()
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
content = StringField()
|
|
||||||
comments = ListField(EmbeddedDocumentField(Comment))
|
|
||||||
tags = ListField(StringField())
|
|
||||||
|
|
||||||
post = BlogPost(content='Went for a walk today...')
|
|
||||||
post.validate()
|
|
||||||
|
|
||||||
post.tags = 'fun'
|
|
||||||
self.assertRaises(ValidationError, post.validate)
|
|
||||||
post.tags = [1, 2]
|
|
||||||
self.assertRaises(ValidationError, post.validate)
|
|
||||||
|
|
||||||
post.tags = ['fun', 'leisure']
|
|
||||||
post.validate()
|
|
||||||
post.tags = ('fun', 'leisure')
|
|
||||||
post.validate()
|
|
||||||
|
|
||||||
comments = [Comment(content='Good for you'), Comment(content='Yay.')]
|
|
||||||
post.comments = comments
|
|
||||||
post.validate()
|
|
||||||
|
|
||||||
post.comments = ['a']
|
|
||||||
self.assertRaises(ValidationError, post.validate)
|
|
||||||
post.comments = 'yay'
|
|
||||||
self.assertRaises(ValidationError, post.validate)
|
|
||||||
|
|
||||||
def test_embedded_document_validation(self):
|
|
||||||
"""Ensure that invalid embedded documents cannot be assigned to
|
|
||||||
embedded document fields.
|
|
||||||
"""
|
|
||||||
class Comment(EmbeddedDocument):
|
|
||||||
content = StringField()
|
|
||||||
|
|
||||||
class PersonPreferences(EmbeddedDocument):
|
|
||||||
food = StringField()
|
|
||||||
number = IntField()
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
preferences = EmbeddedDocumentField(PersonPreferences)
|
|
||||||
|
|
||||||
person = Person(name='Test User')
|
|
||||||
person.preferences = 'My Preferences'
|
|
||||||
self.assertRaises(ValidationError, person.validate)
|
|
||||||
|
|
||||||
person.preferences = Comment(content='Nice blog post...')
|
|
||||||
self.assertRaises(ValidationError, person.validate)
|
|
||||||
|
|
||||||
person.preferences = PersonPreferences(food='Cheese', number=47)
|
|
||||||
self.assertEqual(person.preferences.food, 'Cheese')
|
|
||||||
person.validate()
|
|
||||||
|
|
||||||
def test_embedded_document_inheritance(self):
|
|
||||||
"""Ensure that subclasses of embedded documents may be provided to
|
|
||||||
EmbeddedDocumentFields of the superclass' type.
|
|
||||||
"""
|
|
||||||
class User(EmbeddedDocument):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class PowerUser(User):
|
|
||||||
power = IntField()
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
content = StringField()
|
|
||||||
author = EmbeddedDocumentField(User)
|
|
||||||
|
|
||||||
post = BlogPost(content='What I did today...')
|
|
||||||
post.author = User(name='Test User')
|
|
||||||
post.author = PowerUser(name='Test User', power=47)
|
|
||||||
|
|
||||||
def test_reference_validation(self):
|
|
||||||
"""Ensure that invalid docment objects cannot be assigned to reference
|
|
||||||
fields.
|
|
||||||
"""
|
|
||||||
class User(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
content = StringField()
|
|
||||||
author = ReferenceField(User)
|
|
||||||
|
|
||||||
self.assertRaises(ValidationError, ReferenceField, EmbeddedDocument)
|
|
||||||
|
|
||||||
user = User(name='Test User')
|
|
||||||
|
|
||||||
# Ensure that the referenced object must have been saved
|
|
||||||
post1 = BlogPost(content='Chips and gravy taste good.')
|
|
||||||
post1.author = user
|
|
||||||
self.assertRaises(ValidationError, post1.save)
|
|
||||||
|
|
||||||
# Check that an invalid object type cannot be used
|
|
||||||
post2 = BlogPost(content='Chips and chilli taste good.')
|
|
||||||
post1.author = post2
|
|
||||||
self.assertRaises(ValidationError, post1.validate)
|
|
||||||
|
|
||||||
user.save()
|
|
||||||
post1.author = user
|
|
||||||
post1.save()
|
|
||||||
|
|
||||||
post2.save()
|
|
||||||
post1.author = post2
|
|
||||||
self.assertRaises(ValidationError, post1.validate)
|
|
||||||
|
|
||||||
User.drop_collection()
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
3
tests/fields/__init__.py
Normal file
3
tests/fields/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
from fields import *
|
||||||
|
from file_tests import *
|
||||||
|
from geo import *
|
||||||
2243
tests/fields/fields.py
Normal file
2243
tests/fields/fields.py
Normal file
File diff suppressed because it is too large
Load Diff
412
tests/fields/file_tests.py
Normal file
412
tests/fields/file_tests.py
Normal file
@@ -0,0 +1,412 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import copy
|
||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
import gridfs
|
||||||
|
|
||||||
|
from nose.plugins.skip import SkipTest
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
from mongoengine.python_support import PY3, b, StringIO
|
||||||
|
|
||||||
|
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png')
|
||||||
|
TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png')
|
||||||
|
|
||||||
|
|
||||||
|
class FileTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.db.drop_collection('fs.files')
|
||||||
|
self.db.drop_collection('fs.chunks')
|
||||||
|
|
||||||
|
def test_file_field_optional(self):
|
||||||
|
# Make sure FileField is optional and not required
|
||||||
|
class DemoFile(Document):
|
||||||
|
the_file = FileField()
|
||||||
|
DemoFile.objects.create()
|
||||||
|
|
||||||
|
def test_file_fields(self):
|
||||||
|
"""Ensure that file fields can be written to and their data retrieved
|
||||||
|
"""
|
||||||
|
|
||||||
|
class PutFile(Document):
|
||||||
|
the_file = FileField()
|
||||||
|
|
||||||
|
PutFile.drop_collection()
|
||||||
|
|
||||||
|
text = b('Hello, World!')
|
||||||
|
content_type = 'text/plain'
|
||||||
|
|
||||||
|
putfile = PutFile()
|
||||||
|
putfile.the_file.put(text, content_type=content_type)
|
||||||
|
putfile.save()
|
||||||
|
|
||||||
|
result = PutFile.objects.first()
|
||||||
|
self.assertTrue(putfile == result)
|
||||||
|
self.assertEqual(result.the_file.read(), text)
|
||||||
|
self.assertEqual(result.the_file.content_type, content_type)
|
||||||
|
result.the_file.delete() # Remove file from GridFS
|
||||||
|
PutFile.objects.delete()
|
||||||
|
|
||||||
|
# Ensure file-like objects are stored
|
||||||
|
PutFile.drop_collection()
|
||||||
|
|
||||||
|
putfile = PutFile()
|
||||||
|
putstring = StringIO()
|
||||||
|
putstring.write(text)
|
||||||
|
putstring.seek(0)
|
||||||
|
putfile.the_file.put(putstring, content_type=content_type)
|
||||||
|
putfile.save()
|
||||||
|
|
||||||
|
result = PutFile.objects.first()
|
||||||
|
self.assertTrue(putfile == result)
|
||||||
|
self.assertEqual(result.the_file.read(), text)
|
||||||
|
self.assertEqual(result.the_file.content_type, content_type)
|
||||||
|
result.the_file.delete()
|
||||||
|
|
||||||
|
def test_file_fields_stream(self):
|
||||||
|
"""Ensure that file fields can be written to and their data retrieved
|
||||||
|
"""
|
||||||
|
class StreamFile(Document):
|
||||||
|
the_file = FileField()
|
||||||
|
|
||||||
|
StreamFile.drop_collection()
|
||||||
|
|
||||||
|
text = b('Hello, World!')
|
||||||
|
more_text = b('Foo Bar')
|
||||||
|
content_type = 'text/plain'
|
||||||
|
|
||||||
|
streamfile = StreamFile()
|
||||||
|
streamfile.the_file.new_file(content_type=content_type)
|
||||||
|
streamfile.the_file.write(text)
|
||||||
|
streamfile.the_file.write(more_text)
|
||||||
|
streamfile.the_file.close()
|
||||||
|
streamfile.save()
|
||||||
|
|
||||||
|
result = StreamFile.objects.first()
|
||||||
|
self.assertTrue(streamfile == result)
|
||||||
|
self.assertEqual(result.the_file.read(), text + more_text)
|
||||||
|
self.assertEqual(result.the_file.content_type, content_type)
|
||||||
|
result.the_file.seek(0)
|
||||||
|
self.assertEqual(result.the_file.tell(), 0)
|
||||||
|
self.assertEqual(result.the_file.read(len(text)), text)
|
||||||
|
self.assertEqual(result.the_file.tell(), len(text))
|
||||||
|
self.assertEqual(result.the_file.read(len(more_text)), more_text)
|
||||||
|
self.assertEqual(result.the_file.tell(), len(text + more_text))
|
||||||
|
result.the_file.delete()
|
||||||
|
|
||||||
|
# Ensure deleted file returns None
|
||||||
|
self.assertTrue(result.the_file.read() == None)
|
||||||
|
|
||||||
|
def test_file_fields_set(self):
|
||||||
|
|
||||||
|
class SetFile(Document):
|
||||||
|
the_file = FileField()
|
||||||
|
|
||||||
|
text = b('Hello, World!')
|
||||||
|
more_text = b('Foo Bar')
|
||||||
|
|
||||||
|
SetFile.drop_collection()
|
||||||
|
|
||||||
|
setfile = SetFile()
|
||||||
|
setfile.the_file = text
|
||||||
|
setfile.save()
|
||||||
|
|
||||||
|
result = SetFile.objects.first()
|
||||||
|
self.assertTrue(setfile == result)
|
||||||
|
self.assertEqual(result.the_file.read(), text)
|
||||||
|
|
||||||
|
# Try replacing file with new one
|
||||||
|
result.the_file.replace(more_text)
|
||||||
|
result.save()
|
||||||
|
|
||||||
|
result = SetFile.objects.first()
|
||||||
|
self.assertTrue(setfile == result)
|
||||||
|
self.assertEqual(result.the_file.read(), more_text)
|
||||||
|
result.the_file.delete()
|
||||||
|
|
||||||
|
def test_file_field_no_default(self):
|
||||||
|
|
||||||
|
class GridDocument(Document):
|
||||||
|
the_file = FileField()
|
||||||
|
|
||||||
|
GridDocument.drop_collection()
|
||||||
|
|
||||||
|
with tempfile.TemporaryFile() as f:
|
||||||
|
f.write(b("Hello World!"))
|
||||||
|
f.flush()
|
||||||
|
|
||||||
|
# Test without default
|
||||||
|
doc_a = GridDocument()
|
||||||
|
doc_a.save()
|
||||||
|
|
||||||
|
doc_b = GridDocument.objects.with_id(doc_a.id)
|
||||||
|
doc_b.the_file.replace(f, filename='doc_b')
|
||||||
|
doc_b.save()
|
||||||
|
self.assertNotEqual(doc_b.the_file.grid_id, None)
|
||||||
|
|
||||||
|
# Test it matches
|
||||||
|
doc_c = GridDocument.objects.with_id(doc_b.id)
|
||||||
|
self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id)
|
||||||
|
|
||||||
|
# Test with default
|
||||||
|
doc_d = GridDocument(the_file=b(''))
|
||||||
|
doc_d.save()
|
||||||
|
|
||||||
|
doc_e = GridDocument.objects.with_id(doc_d.id)
|
||||||
|
self.assertEqual(doc_d.the_file.grid_id, doc_e.the_file.grid_id)
|
||||||
|
|
||||||
|
doc_e.the_file.replace(f, filename='doc_e')
|
||||||
|
doc_e.save()
|
||||||
|
|
||||||
|
doc_f = GridDocument.objects.with_id(doc_e.id)
|
||||||
|
self.assertEqual(doc_e.the_file.grid_id, doc_f.the_file.grid_id)
|
||||||
|
|
||||||
|
db = GridDocument._get_db()
|
||||||
|
grid_fs = gridfs.GridFS(db)
|
||||||
|
self.assertEqual(['doc_b', 'doc_e'], grid_fs.list())
|
||||||
|
|
||||||
|
def test_file_uniqueness(self):
|
||||||
|
"""Ensure that each instance of a FileField is unique
|
||||||
|
"""
|
||||||
|
class TestFile(Document):
|
||||||
|
name = StringField()
|
||||||
|
the_file = FileField()
|
||||||
|
|
||||||
|
# First instance
|
||||||
|
test_file = TestFile()
|
||||||
|
test_file.name = "Hello, World!"
|
||||||
|
test_file.the_file.put(b('Hello, World!'))
|
||||||
|
test_file.save()
|
||||||
|
|
||||||
|
# Second instance
|
||||||
|
test_file_dupe = TestFile()
|
||||||
|
data = test_file_dupe.the_file.read() # Should be None
|
||||||
|
|
||||||
|
self.assertTrue(test_file.name != test_file_dupe.name)
|
||||||
|
self.assertTrue(test_file.the_file.read() != data)
|
||||||
|
|
||||||
|
TestFile.drop_collection()
|
||||||
|
|
||||||
|
def test_file_saving(self):
|
||||||
|
"""Ensure you can add meta data to file"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
genus = StringField()
|
||||||
|
family = StringField()
|
||||||
|
photo = FileField()
|
||||||
|
|
||||||
|
Animal.drop_collection()
|
||||||
|
marmot = Animal(genus='Marmota', family='Sciuridae')
|
||||||
|
|
||||||
|
marmot_photo = open(TEST_IMAGE_PATH, 'rb') # Retrieve a photo from disk
|
||||||
|
marmot.photo.put(marmot_photo, content_type='image/jpeg', foo='bar')
|
||||||
|
marmot.photo.close()
|
||||||
|
marmot.save()
|
||||||
|
|
||||||
|
marmot = Animal.objects.get()
|
||||||
|
self.assertEqual(marmot.photo.content_type, 'image/jpeg')
|
||||||
|
self.assertEqual(marmot.photo.foo, 'bar')
|
||||||
|
|
||||||
|
def test_file_reassigning(self):
|
||||||
|
class TestFile(Document):
|
||||||
|
the_file = FileField()
|
||||||
|
TestFile.drop_collection()
|
||||||
|
|
||||||
|
test_file = TestFile(the_file=open(TEST_IMAGE_PATH, 'rb')).save()
|
||||||
|
self.assertEqual(test_file.the_file.get().length, 8313)
|
||||||
|
|
||||||
|
test_file = TestFile.objects.first()
|
||||||
|
test_file.the_file = open(TEST_IMAGE2_PATH, 'rb')
|
||||||
|
test_file.save()
|
||||||
|
self.assertEqual(test_file.the_file.get().length, 4971)
|
||||||
|
|
||||||
|
def test_file_boolean(self):
|
||||||
|
"""Ensure that a boolean test of a FileField indicates its presence
|
||||||
|
"""
|
||||||
|
class TestFile(Document):
|
||||||
|
the_file = FileField()
|
||||||
|
TestFile.drop_collection()
|
||||||
|
|
||||||
|
test_file = TestFile()
|
||||||
|
self.assertFalse(bool(test_file.the_file))
|
||||||
|
test_file.the_file.put(b('Hello, World!'), content_type='text/plain')
|
||||||
|
test_file.save()
|
||||||
|
self.assertTrue(bool(test_file.the_file))
|
||||||
|
|
||||||
|
test_file = TestFile.objects.first()
|
||||||
|
self.assertEqual(test_file.the_file.content_type, "text/plain")
|
||||||
|
|
||||||
|
def test_file_cmp(self):
|
||||||
|
"""Test comparing against other types"""
|
||||||
|
class TestFile(Document):
|
||||||
|
the_file = FileField()
|
||||||
|
|
||||||
|
test_file = TestFile()
|
||||||
|
self.assertFalse(test_file.the_file in [{"test": 1}])
|
||||||
|
|
||||||
|
def test_image_field(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('PIL does not have Python 3 support')
|
||||||
|
|
||||||
|
class TestImage(Document):
|
||||||
|
image = ImageField()
|
||||||
|
|
||||||
|
TestImage.drop_collection()
|
||||||
|
|
||||||
|
t = TestImage()
|
||||||
|
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||||
|
t.save()
|
||||||
|
|
||||||
|
t = TestImage.objects.first()
|
||||||
|
|
||||||
|
self.assertEqual(t.image.format, 'PNG')
|
||||||
|
|
||||||
|
w, h = t.image.size
|
||||||
|
self.assertEqual(w, 371)
|
||||||
|
self.assertEqual(h, 76)
|
||||||
|
|
||||||
|
t.image.delete()
|
||||||
|
|
||||||
|
def test_image_field_reassigning(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('PIL does not have Python 3 support')
|
||||||
|
|
||||||
|
class TestFile(Document):
|
||||||
|
the_file = ImageField()
|
||||||
|
TestFile.drop_collection()
|
||||||
|
|
||||||
|
test_file = TestFile(the_file=open(TEST_IMAGE_PATH, 'rb')).save()
|
||||||
|
self.assertEqual(test_file.the_file.size, (371, 76))
|
||||||
|
|
||||||
|
test_file = TestFile.objects.first()
|
||||||
|
test_file.the_file = open(TEST_IMAGE2_PATH, 'rb')
|
||||||
|
test_file.save()
|
||||||
|
self.assertEqual(test_file.the_file.size, (45, 101))
|
||||||
|
|
||||||
|
def test_image_field_resize(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('PIL does not have Python 3 support')
|
||||||
|
|
||||||
|
class TestImage(Document):
|
||||||
|
image = ImageField(size=(185, 37))
|
||||||
|
|
||||||
|
TestImage.drop_collection()
|
||||||
|
|
||||||
|
t = TestImage()
|
||||||
|
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||||
|
t.save()
|
||||||
|
|
||||||
|
t = TestImage.objects.first()
|
||||||
|
|
||||||
|
self.assertEqual(t.image.format, 'PNG')
|
||||||
|
w, h = t.image.size
|
||||||
|
|
||||||
|
self.assertEqual(w, 185)
|
||||||
|
self.assertEqual(h, 37)
|
||||||
|
|
||||||
|
t.image.delete()
|
||||||
|
|
||||||
|
def test_image_field_resize_force(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('PIL does not have Python 3 support')
|
||||||
|
|
||||||
|
class TestImage(Document):
|
||||||
|
image = ImageField(size=(185, 37, True))
|
||||||
|
|
||||||
|
TestImage.drop_collection()
|
||||||
|
|
||||||
|
t = TestImage()
|
||||||
|
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||||
|
t.save()
|
||||||
|
|
||||||
|
t = TestImage.objects.first()
|
||||||
|
|
||||||
|
self.assertEqual(t.image.format, 'PNG')
|
||||||
|
w, h = t.image.size
|
||||||
|
|
||||||
|
self.assertEqual(w, 185)
|
||||||
|
self.assertEqual(h, 37)
|
||||||
|
|
||||||
|
t.image.delete()
|
||||||
|
|
||||||
|
def test_image_field_thumbnail(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('PIL does not have Python 3 support')
|
||||||
|
|
||||||
|
class TestImage(Document):
|
||||||
|
image = ImageField(thumbnail_size=(92, 18))
|
||||||
|
|
||||||
|
TestImage.drop_collection()
|
||||||
|
|
||||||
|
t = TestImage()
|
||||||
|
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||||
|
t.save()
|
||||||
|
|
||||||
|
t = TestImage.objects.first()
|
||||||
|
|
||||||
|
self.assertEqual(t.image.thumbnail.format, 'PNG')
|
||||||
|
self.assertEqual(t.image.thumbnail.width, 92)
|
||||||
|
self.assertEqual(t.image.thumbnail.height, 18)
|
||||||
|
|
||||||
|
t.image.delete()
|
||||||
|
|
||||||
|
def test_file_multidb(self):
|
||||||
|
register_connection('test_files', 'test_files')
|
||||||
|
|
||||||
|
class TestFile(Document):
|
||||||
|
name = StringField()
|
||||||
|
the_file = FileField(db_alias="test_files",
|
||||||
|
collection_name="macumba")
|
||||||
|
|
||||||
|
TestFile.drop_collection()
|
||||||
|
|
||||||
|
# delete old filesystem
|
||||||
|
get_db("test_files").macumba.files.drop()
|
||||||
|
get_db("test_files").macumba.chunks.drop()
|
||||||
|
|
||||||
|
# First instance
|
||||||
|
test_file = TestFile()
|
||||||
|
test_file.name = "Hello, World!"
|
||||||
|
test_file.the_file.put(b('Hello, World!'),
|
||||||
|
name="hello.txt")
|
||||||
|
test_file.save()
|
||||||
|
|
||||||
|
data = get_db("test_files").macumba.files.find_one()
|
||||||
|
self.assertEqual(data.get('name'), 'hello.txt')
|
||||||
|
|
||||||
|
test_file = TestFile.objects.first()
|
||||||
|
self.assertEqual(test_file.the_file.read(),
|
||||||
|
b('Hello, World!'))
|
||||||
|
|
||||||
|
def test_copyable(self):
|
||||||
|
class PutFile(Document):
|
||||||
|
the_file = FileField()
|
||||||
|
|
||||||
|
PutFile.drop_collection()
|
||||||
|
|
||||||
|
text = b('Hello, World!')
|
||||||
|
content_type = 'text/plain'
|
||||||
|
|
||||||
|
putfile = PutFile()
|
||||||
|
putfile.the_file.put(text, content_type=content_type)
|
||||||
|
putfile.save()
|
||||||
|
|
||||||
|
class TestFile(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
self.assertEqual(putfile, copy.copy(putfile))
|
||||||
|
self.assertEqual(putfile, copy.deepcopy(putfile))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
274
tests/fields/geo.py
Normal file
274
tests/fields/geo.py
Normal file
@@ -0,0 +1,274 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
|
||||||
|
__all__ = ("GeoFieldTest", )
|
||||||
|
|
||||||
|
|
||||||
|
class GeoFieldTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
def _test_for_expected_error(self, Cls, loc, expected):
|
||||||
|
try:
|
||||||
|
Cls(loc=loc).validate()
|
||||||
|
self.fail()
|
||||||
|
except ValidationError, e:
|
||||||
|
self.assertEqual(expected, e.to_dict()['loc'])
|
||||||
|
|
||||||
|
def test_geopoint_validation(self):
|
||||||
|
class Location(Document):
|
||||||
|
loc = GeoPointField()
|
||||||
|
|
||||||
|
invalid_coords = [{"x": 1, "y": 2}, 5, "a"]
|
||||||
|
expected = 'GeoPointField can only accept tuples or lists of (x, y)'
|
||||||
|
|
||||||
|
for coord in invalid_coords:
|
||||||
|
self._test_for_expected_error(Location, coord, expected)
|
||||||
|
|
||||||
|
invalid_coords = [[], [1], [1, 2, 3]]
|
||||||
|
for coord in invalid_coords:
|
||||||
|
expected = "Value (%s) must be a two-dimensional point" % repr(coord)
|
||||||
|
self._test_for_expected_error(Location, coord, expected)
|
||||||
|
|
||||||
|
invalid_coords = [[{}, {}], ("a", "b")]
|
||||||
|
for coord in invalid_coords:
|
||||||
|
expected = "Both values (%s) in point must be float or int" % repr(coord)
|
||||||
|
self._test_for_expected_error(Location, coord, expected)
|
||||||
|
|
||||||
|
def test_point_validation(self):
|
||||||
|
class Location(Document):
|
||||||
|
loc = PointField()
|
||||||
|
|
||||||
|
invalid_coords = {"x": 1, "y": 2}
|
||||||
|
expected = 'PointField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||||
|
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||||
|
|
||||||
|
invalid_coords = {"type": "MadeUp", "coordinates": []}
|
||||||
|
expected = 'PointField type must be "Point"'
|
||||||
|
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||||
|
|
||||||
|
invalid_coords = {"type": "Point", "coordinates": [1, 2, 3]}
|
||||||
|
expected = "Value ([1, 2, 3]) must be a two-dimensional point"
|
||||||
|
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||||
|
|
||||||
|
invalid_coords = [5, "a"]
|
||||||
|
expected = "PointField can only accept lists of [x, y]"
|
||||||
|
for coord in invalid_coords:
|
||||||
|
self._test_for_expected_error(Location, coord, expected)
|
||||||
|
|
||||||
|
invalid_coords = [[], [1], [1, 2, 3]]
|
||||||
|
for coord in invalid_coords:
|
||||||
|
expected = "Value (%s) must be a two-dimensional point" % repr(coord)
|
||||||
|
self._test_for_expected_error(Location, coord, expected)
|
||||||
|
|
||||||
|
invalid_coords = [[{}, {}], ("a", "b")]
|
||||||
|
for coord in invalid_coords:
|
||||||
|
expected = "Both values (%s) in point must be float or int" % repr(coord)
|
||||||
|
self._test_for_expected_error(Location, coord, expected)
|
||||||
|
|
||||||
|
Location(loc=[1, 2]).validate()
|
||||||
|
|
||||||
|
def test_linestring_validation(self):
|
||||||
|
class Location(Document):
|
||||||
|
loc = LineStringField()
|
||||||
|
|
||||||
|
invalid_coords = {"x": 1, "y": 2}
|
||||||
|
expected = 'LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||||
|
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||||
|
|
||||||
|
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||||
|
expected = 'LineStringField type must be "LineString"'
|
||||||
|
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||||
|
|
||||||
|
invalid_coords = {"type": "LineString", "coordinates": [[1, 2, 3]]}
|
||||||
|
expected = "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||||
|
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||||
|
|
||||||
|
invalid_coords = [5, "a"]
|
||||||
|
expected = "Invalid LineString must contain at least one valid point"
|
||||||
|
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||||
|
|
||||||
|
invalid_coords = [[1]]
|
||||||
|
expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0])
|
||||||
|
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||||
|
|
||||||
|
invalid_coords = [[1, 2, 3]]
|
||||||
|
expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0])
|
||||||
|
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||||
|
|
||||||
|
invalid_coords = [[[{}, {}]], [("a", "b")]]
|
||||||
|
for coord in invalid_coords:
|
||||||
|
expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0])
|
||||||
|
self._test_for_expected_error(Location, coord, expected)
|
||||||
|
|
||||||
|
Location(loc=[[1, 2], [3, 4], [5, 6], [1,2]]).validate()
|
||||||
|
|
||||||
|
def test_polygon_validation(self):
|
||||||
|
class Location(Document):
|
||||||
|
loc = PolygonField()
|
||||||
|
|
||||||
|
invalid_coords = {"x": 1, "y": 2}
|
||||||
|
expected = 'PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||||
|
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||||
|
|
||||||
|
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||||
|
expected = 'PolygonField type must be "Polygon"'
|
||||||
|
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||||
|
|
||||||
|
invalid_coords = {"type": "Polygon", "coordinates": [[[1, 2, 3]]]}
|
||||||
|
expected = "Invalid Polygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||||
|
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||||
|
|
||||||
|
invalid_coords = [[[5, "a"]]]
|
||||||
|
expected = "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int"
|
||||||
|
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||||
|
|
||||||
|
invalid_coords = [[[]]]
|
||||||
|
expected = "Invalid Polygon must contain at least one valid linestring"
|
||||||
|
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||||
|
|
||||||
|
invalid_coords = [[[1, 2, 3]]]
|
||||||
|
expected = "Invalid Polygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||||
|
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||||
|
|
||||||
|
invalid_coords = [[[{}, {}]], [("a", "b")]]
|
||||||
|
expected = "Invalid Polygon:\nBoth values ([{}, {}]) in point must be float or int, Both values (('a', 'b')) in point must be float or int"
|
||||||
|
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||||
|
|
||||||
|
invalid_coords = [[[1, 2], [3, 4]]]
|
||||||
|
expected = "Invalid Polygon:\nLineStrings must start and end at the same point"
|
||||||
|
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||||
|
|
||||||
|
Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate()
|
||||||
|
|
||||||
|
def test_indexes_geopoint(self):
|
||||||
|
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||||
|
"""
|
||||||
|
class Event(Document):
|
||||||
|
title = StringField()
|
||||||
|
location = GeoPointField()
|
||||||
|
|
||||||
|
geo_indicies = Event._geo_indices()
|
||||||
|
self.assertEqual(geo_indicies, [{'fields': [('location', '2d')]}])
|
||||||
|
|
||||||
|
def test_geopoint_embedded_indexes(self):
|
||||||
|
"""Ensure that indexes are created automatically for GeoPointFields on
|
||||||
|
embedded documents.
|
||||||
|
"""
|
||||||
|
class Venue(EmbeddedDocument):
|
||||||
|
location = GeoPointField()
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Event(Document):
|
||||||
|
title = StringField()
|
||||||
|
venue = EmbeddedDocumentField(Venue)
|
||||||
|
|
||||||
|
geo_indicies = Event._geo_indices()
|
||||||
|
self.assertEqual(geo_indicies, [{'fields': [('venue.location', '2d')]}])
|
||||||
|
|
||||||
|
def test_indexes_2dsphere(self):
|
||||||
|
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||||
|
"""
|
||||||
|
class Event(Document):
|
||||||
|
title = StringField()
|
||||||
|
point = PointField()
|
||||||
|
line = LineStringField()
|
||||||
|
polygon = PolygonField()
|
||||||
|
|
||||||
|
geo_indicies = Event._geo_indices()
|
||||||
|
self.assertTrue({'fields': [('line', '2dsphere')]} in geo_indicies)
|
||||||
|
self.assertTrue({'fields': [('polygon', '2dsphere')]} in geo_indicies)
|
||||||
|
self.assertTrue({'fields': [('point', '2dsphere')]} in geo_indicies)
|
||||||
|
|
||||||
|
def test_indexes_2dsphere_embedded(self):
|
||||||
|
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||||
|
"""
|
||||||
|
class Venue(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
point = PointField()
|
||||||
|
line = LineStringField()
|
||||||
|
polygon = PolygonField()
|
||||||
|
|
||||||
|
class Event(Document):
|
||||||
|
title = StringField()
|
||||||
|
venue = EmbeddedDocumentField(Venue)
|
||||||
|
|
||||||
|
geo_indicies = Event._geo_indices()
|
||||||
|
self.assertTrue({'fields': [('venue.line', '2dsphere')]} in geo_indicies)
|
||||||
|
self.assertTrue({'fields': [('venue.polygon', '2dsphere')]} in geo_indicies)
|
||||||
|
self.assertTrue({'fields': [('venue.point', '2dsphere')]} in geo_indicies)
|
||||||
|
|
||||||
|
def test_geo_indexes_recursion(self):
|
||||||
|
|
||||||
|
class Location(Document):
|
||||||
|
name = StringField()
|
||||||
|
location = GeoPointField()
|
||||||
|
|
||||||
|
class Parent(Document):
|
||||||
|
name = StringField()
|
||||||
|
location = ReferenceField(Location)
|
||||||
|
|
||||||
|
Location.drop_collection()
|
||||||
|
Parent.drop_collection()
|
||||||
|
|
||||||
|
list(Parent.objects)
|
||||||
|
|
||||||
|
collection = Parent._get_collection()
|
||||||
|
info = collection.index_information()
|
||||||
|
|
||||||
|
self.assertFalse('location_2d' in info)
|
||||||
|
|
||||||
|
self.assertEqual(len(Parent._geo_indices()), 0)
|
||||||
|
self.assertEqual(len(Location._geo_indices()), 1)
|
||||||
|
|
||||||
|
def test_geo_indexes_auto_index(self):
|
||||||
|
|
||||||
|
# Test just listing the fields
|
||||||
|
class Log(Document):
|
||||||
|
location = PointField(auto_index=False)
|
||||||
|
datetime = DateTimeField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [[("location", "2dsphere"), ("datetime", 1)]]
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertEqual([], Log._geo_indices())
|
||||||
|
|
||||||
|
Log.drop_collection()
|
||||||
|
Log.ensure_indexes()
|
||||||
|
|
||||||
|
info = Log._get_collection().index_information()
|
||||||
|
self.assertEqual(info["location_2dsphere_datetime_1"]["key"],
|
||||||
|
[('location', '2dsphere'), ('datetime', 1)])
|
||||||
|
|
||||||
|
# Test listing explicitly
|
||||||
|
class Log(Document):
|
||||||
|
location = PointField(auto_index=False)
|
||||||
|
datetime = DateTimeField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
{'fields': [("location", "2dsphere"), ("datetime", 1)]}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertEqual([], Log._geo_indices())
|
||||||
|
|
||||||
|
Log.drop_collection()
|
||||||
|
Log.ensure_indexes()
|
||||||
|
|
||||||
|
info = Log._get_collection().index_information()
|
||||||
|
self.assertEqual(info["location_2dsphere_datetime_1"]["key"],
|
||||||
|
[('location', '2dsphere'), ('datetime', 1)])
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
BIN
tests/fields/mongodb_leaf.png
Normal file
BIN
tests/fields/mongodb_leaf.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 4.9 KiB |
BIN
tests/fields/mongoengine.png
Normal file
BIN
tests/fields/mongoengine.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 8.1 KiB |
43
tests/fixtures.py
Normal file
43
tests/fixtures.py
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
import pickle
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine import signals
|
||||||
|
|
||||||
|
|
||||||
|
class PickleEmbedded(EmbeddedDocument):
|
||||||
|
date = DateTimeField(default=datetime.now)
|
||||||
|
|
||||||
|
|
||||||
|
class PickleTest(Document):
|
||||||
|
number = IntField()
|
||||||
|
string = StringField(choices=(('One', '1'), ('Two', '2')))
|
||||||
|
embedded = EmbeddedDocumentField(PickleEmbedded)
|
||||||
|
lists = ListField(StringField())
|
||||||
|
photo = FileField()
|
||||||
|
|
||||||
|
|
||||||
|
class PickleSignalsTest(Document):
|
||||||
|
number = IntField()
|
||||||
|
string = StringField(choices=(('One', '1'), ('Two', '2')))
|
||||||
|
embedded = EmbeddedDocumentField(PickleEmbedded)
|
||||||
|
lists = ListField(StringField())
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def post_save(self, sender, document, created, **kwargs):
|
||||||
|
pickled = pickle.dumps(document)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def post_delete(self, sender, document, **kwargs):
|
||||||
|
pickled = pickle.dumps(document)
|
||||||
|
|
||||||
|
signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest)
|
||||||
|
signals.post_delete.connect(PickleSignalsTest.post_delete, sender=PickleSignalsTest)
|
||||||
|
|
||||||
|
|
||||||
|
class Mixin(object):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
|
||||||
|
class Base(Document):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
8
tests/migration/__init__.py
Normal file
8
tests/migration/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
from convert_to_new_inheritance_model import *
|
||||||
|
from decimalfield_as_float import *
|
||||||
|
from refrencefield_dbref_to_object_id import *
|
||||||
|
from turn_off_inheritance import *
|
||||||
|
from uuidfield_to_binary import *
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
51
tests/migration/convert_to_new_inheritance_model.py
Normal file
51
tests/migration/convert_to_new_inheritance_model.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import Document, connect
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
from mongoengine.fields import StringField
|
||||||
|
|
||||||
|
__all__ = ('ConvertToNewInheritanceModel', )
|
||||||
|
|
||||||
|
|
||||||
|
class ConvertToNewInheritanceModel(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
for collection in self.db.collection_names():
|
||||||
|
if 'system.' in collection:
|
||||||
|
continue
|
||||||
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
|
def test_how_to_convert_to_the_new_inheritance_model(self):
|
||||||
|
"""Demonstrates migrating from 0.7 to 0.8
|
||||||
|
"""
|
||||||
|
|
||||||
|
# 1. Declaration of the class
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'indexes': ['name']
|
||||||
|
}
|
||||||
|
|
||||||
|
# 2. Remove _types
|
||||||
|
collection = Animal._get_collection()
|
||||||
|
collection.update({}, {"$unset": {"_types": 1}}, multi=True)
|
||||||
|
|
||||||
|
# 3. Confirm extra data is removed
|
||||||
|
count = collection.find({'_types': {"$exists": True}}).count()
|
||||||
|
self.assertEqual(0, count)
|
||||||
|
|
||||||
|
# 4. Remove indexes
|
||||||
|
info = collection.index_information()
|
||||||
|
indexes_to_drop = [key for key, value in info.iteritems()
|
||||||
|
if '_types' in dict(value['key'])]
|
||||||
|
for index in indexes_to_drop:
|
||||||
|
collection.drop_index(index)
|
||||||
|
|
||||||
|
# 5. Recreate indexes
|
||||||
|
Animal.ensure_indexes()
|
||||||
50
tests/migration/decimalfield_as_float.py
Normal file
50
tests/migration/decimalfield_as_float.py
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import unittest
|
||||||
|
import decimal
|
||||||
|
from decimal import Decimal
|
||||||
|
|
||||||
|
from mongoengine import Document, connect
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
from mongoengine.fields import StringField, DecimalField, ListField
|
||||||
|
|
||||||
|
__all__ = ('ConvertDecimalField', )
|
||||||
|
|
||||||
|
|
||||||
|
class ConvertDecimalField(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
def test_how_to_convert_decimal_fields(self):
|
||||||
|
"""Demonstrates migrating from 0.7 to 0.8
|
||||||
|
"""
|
||||||
|
|
||||||
|
# 1. Old definition - using dbrefs
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
money = DecimalField(force_string=True)
|
||||||
|
monies = ListField(DecimalField(force_string=True))
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
Person(name="Wilson Jr", money=Decimal("2.50"),
|
||||||
|
monies=[Decimal("2.10"), Decimal("5.00")]).save()
|
||||||
|
|
||||||
|
# 2. Start the migration by changing the schema
|
||||||
|
# Change DecimalField - add precision and rounding settings
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
money = DecimalField(precision=2, rounding=decimal.ROUND_HALF_UP)
|
||||||
|
monies = ListField(DecimalField(precision=2,
|
||||||
|
rounding=decimal.ROUND_HALF_UP))
|
||||||
|
|
||||||
|
# 3. Loop all the objects and mark parent as changed
|
||||||
|
for p in Person.objects:
|
||||||
|
p._mark_as_changed('money')
|
||||||
|
p._mark_as_changed('monies')
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
# 4. Confirmation of the fix!
|
||||||
|
wilson = Person.objects(name="Wilson Jr").as_pymongo()[0]
|
||||||
|
self.assertTrue(isinstance(wilson['money'], float))
|
||||||
|
self.assertTrue(all([isinstance(m, float) for m in wilson['monies']]))
|
||||||
52
tests/migration/refrencefield_dbref_to_object_id.py
Normal file
52
tests/migration/refrencefield_dbref_to_object_id.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import Document, connect
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
from mongoengine.fields import StringField, ReferenceField, ListField
|
||||||
|
|
||||||
|
__all__ = ('ConvertToObjectIdsModel', )
|
||||||
|
|
||||||
|
|
||||||
|
class ConvertToObjectIdsModel(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
def test_how_to_convert_to_object_id_reference_fields(self):
|
||||||
|
"""Demonstrates migrating from 0.7 to 0.8
|
||||||
|
"""
|
||||||
|
|
||||||
|
# 1. Old definition - using dbrefs
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
parent = ReferenceField('self', dbref=True)
|
||||||
|
friends = ListField(ReferenceField('self', dbref=True))
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
p1 = Person(name="Wilson", parent=None).save()
|
||||||
|
f1 = Person(name="John", parent=None).save()
|
||||||
|
f2 = Person(name="Paul", parent=None).save()
|
||||||
|
f3 = Person(name="George", parent=None).save()
|
||||||
|
f4 = Person(name="Ringo", parent=None).save()
|
||||||
|
Person(name="Wilson Jr", parent=p1, friends=[f1, f2, f3, f4]).save()
|
||||||
|
|
||||||
|
# 2. Start the migration by changing the schema
|
||||||
|
# Change ReferenceField as now dbref defaults to False
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
parent = ReferenceField('self')
|
||||||
|
friends = ListField(ReferenceField('self'))
|
||||||
|
|
||||||
|
# 3. Loop all the objects and mark parent as changed
|
||||||
|
for p in Person.objects:
|
||||||
|
p._mark_as_changed('parent')
|
||||||
|
p._mark_as_changed('friends')
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
# 4. Confirmation of the fix!
|
||||||
|
wilson = Person.objects(name="Wilson Jr").as_pymongo()[0]
|
||||||
|
self.assertEqual(p1.id, wilson['parent'])
|
||||||
|
self.assertEqual([f1.id, f2.id, f3.id, f4.id], wilson['friends'])
|
||||||
62
tests/migration/turn_off_inheritance.py
Normal file
62
tests/migration/turn_off_inheritance.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import Document, connect
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
from mongoengine.fields import StringField
|
||||||
|
|
||||||
|
__all__ = ('TurnOffInheritanceTest', )
|
||||||
|
|
||||||
|
|
||||||
|
class TurnOffInheritanceTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
for collection in self.db.collection_names():
|
||||||
|
if 'system.' in collection:
|
||||||
|
continue
|
||||||
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
|
def test_how_to_turn_off_inheritance(self):
|
||||||
|
"""Demonstrates migrating from allow_inheritance = True to False.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# 1. Old declaration of the class
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'indexes': ['name']
|
||||||
|
}
|
||||||
|
|
||||||
|
# 2. Turn off inheritance
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': False,
|
||||||
|
'indexes': ['name']
|
||||||
|
}
|
||||||
|
|
||||||
|
# 3. Remove _types and _cls
|
||||||
|
collection = Animal._get_collection()
|
||||||
|
collection.update({}, {"$unset": {"_types": 1, "_cls": 1}}, multi=True)
|
||||||
|
|
||||||
|
# 3. Confirm extra data is removed
|
||||||
|
count = collection.find({"$or": [{'_types': {"$exists": True}},
|
||||||
|
{'_cls': {"$exists": True}}]}).count()
|
||||||
|
assert count == 0
|
||||||
|
|
||||||
|
# 4. Remove indexes
|
||||||
|
info = collection.index_information()
|
||||||
|
indexes_to_drop = [key for key, value in info.iteritems()
|
||||||
|
if '_types' in dict(value['key'])
|
||||||
|
or '_cls' in dict(value['key'])]
|
||||||
|
for index in indexes_to_drop:
|
||||||
|
collection.drop_index(index)
|
||||||
|
|
||||||
|
# 5. Recreate indexes
|
||||||
|
Animal.ensure_indexes()
|
||||||
48
tests/migration/uuidfield_to_binary.py
Normal file
48
tests/migration/uuidfield_to_binary.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import unittest
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from mongoengine import Document, connect
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
from mongoengine.fields import StringField, UUIDField, ListField
|
||||||
|
|
||||||
|
__all__ = ('ConvertToBinaryUUID', )
|
||||||
|
|
||||||
|
|
||||||
|
class ConvertToBinaryUUID(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
def test_how_to_convert_to_binary_uuid_fields(self):
|
||||||
|
"""Demonstrates migrating from 0.7 to 0.8
|
||||||
|
"""
|
||||||
|
|
||||||
|
# 1. Old definition - using dbrefs
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
uuid = UUIDField(binary=False)
|
||||||
|
uuids = ListField(UUIDField(binary=False))
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
Person(name="Wilson Jr", uuid=uuid.uuid4(),
|
||||||
|
uuids=[uuid.uuid4(), uuid.uuid4()]).save()
|
||||||
|
|
||||||
|
# 2. Start the migration by changing the schema
|
||||||
|
# Change UUIDFIeld as now binary defaults to True
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
uuid = UUIDField()
|
||||||
|
uuids = ListField(UUIDField())
|
||||||
|
|
||||||
|
# 3. Loop all the objects and mark parent as changed
|
||||||
|
for p in Person.objects:
|
||||||
|
p._mark_as_changed('uuid')
|
||||||
|
p._mark_as_changed('uuids')
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
# 4. Confirmation of the fix!
|
||||||
|
wilson = Person.objects(name="Wilson Jr").as_pymongo()[0]
|
||||||
|
self.assertTrue(isinstance(wilson['uuid'], uuid.UUID))
|
||||||
|
self.assertTrue(all([isinstance(u, uuid.UUID) for u in wilson['uuids']]))
|
||||||
@@ -1,308 +0,0 @@
|
|||||||
import unittest
|
|
||||||
import pymongo
|
|
||||||
|
|
||||||
from mongoengine.queryset import QuerySet
|
|
||||||
from mongoengine import *
|
|
||||||
|
|
||||||
|
|
||||||
class QuerySetTest(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
age = IntField()
|
|
||||||
self.Person = Person
|
|
||||||
|
|
||||||
def test_initialisation(self):
|
|
||||||
"""Ensure that CollectionManager is correctly initialised.
|
|
||||||
"""
|
|
||||||
self.assertTrue(isinstance(self.Person.objects, QuerySet))
|
|
||||||
self.assertEqual(self.Person.objects._collection.name(),
|
|
||||||
self.Person._meta['collection'])
|
|
||||||
self.assertTrue(isinstance(self.Person.objects._collection,
|
|
||||||
pymongo.collection.Collection))
|
|
||||||
|
|
||||||
def test_transform_query(self):
|
|
||||||
"""Ensure that the _transform_query function operates correctly.
|
|
||||||
"""
|
|
||||||
self.assertEqual(QuerySet._transform_query(name='test', age=30),
|
|
||||||
{'name': 'test', 'age': 30})
|
|
||||||
self.assertEqual(QuerySet._transform_query(age__lt=30),
|
|
||||||
{'age': {'$lt': 30}})
|
|
||||||
self.assertEqual(QuerySet._transform_query(age__gt=20, age__lt=50),
|
|
||||||
{'age': {'$gt': 20, '$lt': 50}})
|
|
||||||
self.assertEqual(QuerySet._transform_query(age=20, age__gt=50),
|
|
||||||
{'age': 20})
|
|
||||||
self.assertEqual(QuerySet._transform_query(friend__age__gte=30),
|
|
||||||
{'friend.age': {'$gte': 30}})
|
|
||||||
self.assertEqual(QuerySet._transform_query(name__exists=True),
|
|
||||||
{'name': {'$exists': True}})
|
|
||||||
|
|
||||||
def test_find(self):
|
|
||||||
"""Ensure that a query returns a valid set of results.
|
|
||||||
"""
|
|
||||||
person1 = self.Person(name="User A", age=20)
|
|
||||||
person1.save()
|
|
||||||
person2 = self.Person(name="User B", age=30)
|
|
||||||
person2.save()
|
|
||||||
|
|
||||||
# Find all people in the collection
|
|
||||||
people = self.Person.objects
|
|
||||||
self.assertEqual(len(people), 2)
|
|
||||||
results = list(people)
|
|
||||||
self.assertTrue(isinstance(results[0], self.Person))
|
|
||||||
self.assertTrue(isinstance(results[0].id, (pymongo.objectid.ObjectId,
|
|
||||||
str, unicode)))
|
|
||||||
self.assertEqual(results[0].name, "User A")
|
|
||||||
self.assertEqual(results[0].age, 20)
|
|
||||||
self.assertEqual(results[1].name, "User B")
|
|
||||||
self.assertEqual(results[1].age, 30)
|
|
||||||
|
|
||||||
# Use a query to filter the people found to just person1
|
|
||||||
people = self.Person.objects(age=20)
|
|
||||||
self.assertEqual(len(people), 1)
|
|
||||||
person = people.next()
|
|
||||||
self.assertEqual(person.name, "User A")
|
|
||||||
self.assertEqual(person.age, 20)
|
|
||||||
|
|
||||||
# Test limit
|
|
||||||
people = list(self.Person.objects.limit(1))
|
|
||||||
self.assertEqual(len(people), 1)
|
|
||||||
self.assertEqual(people[0].name, 'User A')
|
|
||||||
|
|
||||||
# Test skip
|
|
||||||
people = list(self.Person.objects.skip(1))
|
|
||||||
self.assertEqual(len(people), 1)
|
|
||||||
self.assertEqual(people[0].name, 'User B')
|
|
||||||
|
|
||||||
person3 = self.Person(name="User C", age=40)
|
|
||||||
person3.save()
|
|
||||||
|
|
||||||
# Test slice limit
|
|
||||||
people = list(self.Person.objects[:2])
|
|
||||||
self.assertEqual(len(people), 2)
|
|
||||||
self.assertEqual(people[0].name, 'User A')
|
|
||||||
self.assertEqual(people[1].name, 'User B')
|
|
||||||
|
|
||||||
# Test slice skip
|
|
||||||
people = list(self.Person.objects[1:])
|
|
||||||
self.assertEqual(len(people), 2)
|
|
||||||
self.assertEqual(people[0].name, 'User B')
|
|
||||||
self.assertEqual(people[1].name, 'User C')
|
|
||||||
|
|
||||||
# Test slice limit and skip
|
|
||||||
people = list(self.Person.objects[1:2])
|
|
||||||
self.assertEqual(len(people), 1)
|
|
||||||
self.assertEqual(people[0].name, 'User B')
|
|
||||||
|
|
||||||
def test_find_one(self):
|
|
||||||
"""Ensure that a query using find_one returns a valid result.
|
|
||||||
"""
|
|
||||||
person1 = self.Person(name="User A", age=20)
|
|
||||||
person1.save()
|
|
||||||
person2 = self.Person(name="User B", age=30)
|
|
||||||
person2.save()
|
|
||||||
|
|
||||||
# Retrieve the first person from the database
|
|
||||||
person = self.Person.objects.first()
|
|
||||||
self.assertTrue(isinstance(person, self.Person))
|
|
||||||
self.assertEqual(person.name, "User A")
|
|
||||||
self.assertEqual(person.age, 20)
|
|
||||||
|
|
||||||
# Use a query to filter the people found to just person2
|
|
||||||
person = self.Person.objects(age=30).first()
|
|
||||||
self.assertEqual(person.name, "User B")
|
|
||||||
|
|
||||||
person = self.Person.objects(age__lt=30).first()
|
|
||||||
self.assertEqual(person.name, "User A")
|
|
||||||
|
|
||||||
# Use array syntax
|
|
||||||
person = self.Person.objects[0]
|
|
||||||
self.assertEqual(person.name, "User A")
|
|
||||||
|
|
||||||
person = self.Person.objects[1]
|
|
||||||
self.assertEqual(person.name, "User B")
|
|
||||||
|
|
||||||
self.assertRaises(IndexError, self.Person.objects.__getitem__, 2)
|
|
||||||
|
|
||||||
# Find a document using just the object id
|
|
||||||
person = self.Person.objects.with_id(person1.id)
|
|
||||||
self.assertEqual(person.name, "User A")
|
|
||||||
|
|
||||||
def test_find_embedded(self):
|
|
||||||
"""Ensure that an embedded document is properly returned from a query.
|
|
||||||
"""
|
|
||||||
class User(EmbeddedDocument):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
content = StringField()
|
|
||||||
author = EmbeddedDocumentField(User)
|
|
||||||
|
|
||||||
post = BlogPost(content='Had a good coffee today...')
|
|
||||||
post.author = User(name='Test User')
|
|
||||||
post.save()
|
|
||||||
|
|
||||||
result = BlogPost.objects.first()
|
|
||||||
self.assertTrue(isinstance(result.author, User))
|
|
||||||
self.assertEqual(result.author.name, 'Test User')
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
def test_delete(self):
|
|
||||||
"""Ensure that documents are properly deleted from the database.
|
|
||||||
"""
|
|
||||||
self.Person(name="User A", age=20).save()
|
|
||||||
self.Person(name="User B", age=30).save()
|
|
||||||
self.Person(name="User C", age=40).save()
|
|
||||||
|
|
||||||
self.assertEqual(len(self.Person.objects), 3)
|
|
||||||
|
|
||||||
self.Person.objects(age__lt=30).delete()
|
|
||||||
self.assertEqual(len(self.Person.objects), 2)
|
|
||||||
|
|
||||||
self.Person.objects.delete()
|
|
||||||
self.assertEqual(len(self.Person.objects), 0)
|
|
||||||
|
|
||||||
def test_order_by(self):
|
|
||||||
"""Ensure that QuerySets may be ordered.
|
|
||||||
"""
|
|
||||||
self.Person(name="User A", age=20).save()
|
|
||||||
self.Person(name="User B", age=40).save()
|
|
||||||
self.Person(name="User C", age=30).save()
|
|
||||||
|
|
||||||
names = [p.name for p in self.Person.objects.order_by('-age')]
|
|
||||||
self.assertEqual(names, ['User B', 'User C', 'User A'])
|
|
||||||
|
|
||||||
names = [p.name for p in self.Person.objects.order_by('+age')]
|
|
||||||
self.assertEqual(names, ['User A', 'User C', 'User B'])
|
|
||||||
|
|
||||||
names = [p.name for p in self.Person.objects.order_by('age')]
|
|
||||||
self.assertEqual(names, ['User A', 'User C', 'User B'])
|
|
||||||
|
|
||||||
ages = [p.age for p in self.Person.objects.order_by('-name')]
|
|
||||||
self.assertEqual(ages, [30, 40, 20])
|
|
||||||
|
|
||||||
def test_item_frequencies(self):
|
|
||||||
"""Ensure that item frequencies are properly generated from lists.
|
|
||||||
"""
|
|
||||||
class BlogPost(Document):
|
|
||||||
hits = IntField()
|
|
||||||
tags = ListField(StringField(), name='blogTags')
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
BlogPost(hits=1, tags=['music', 'film', 'actors']).save()
|
|
||||||
BlogPost(hits=2, tags=['music']).save()
|
|
||||||
BlogPost(hits=3, tags=['music', 'actors']).save()
|
|
||||||
|
|
||||||
f = BlogPost.objects.item_frequencies('tags')
|
|
||||||
f = dict((key, int(val)) for key, val in f.items())
|
|
||||||
self.assertEqual(set(['music', 'film', 'actors']), set(f.keys()))
|
|
||||||
self.assertEqual(f['music'], 3)
|
|
||||||
self.assertEqual(f['actors'], 2)
|
|
||||||
self.assertEqual(f['film'], 1)
|
|
||||||
|
|
||||||
# Ensure query is taken into account
|
|
||||||
f = BlogPost.objects(hits__gt=1).item_frequencies('tags')
|
|
||||||
f = dict((key, int(val)) for key, val in f.items())
|
|
||||||
self.assertEqual(set(['music', 'actors']), set(f.keys()))
|
|
||||||
self.assertEqual(f['music'], 2)
|
|
||||||
self.assertEqual(f['actors'], 1)
|
|
||||||
|
|
||||||
# Check that normalization works
|
|
||||||
f = BlogPost.objects.item_frequencies('tags', normalize=True)
|
|
||||||
self.assertAlmostEqual(f['music'], 3.0/6.0)
|
|
||||||
self.assertAlmostEqual(f['actors'], 2.0/6.0)
|
|
||||||
self.assertAlmostEqual(f['film'], 1.0/6.0)
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
def test_average(self):
|
|
||||||
"""Ensure that field can be averaged correctly.
|
|
||||||
"""
|
|
||||||
ages = [23, 54, 12, 94, 27]
|
|
||||||
for i, age in enumerate(ages):
|
|
||||||
self.Person(name='test%s' % i, age=age).save()
|
|
||||||
|
|
||||||
avg = float(sum(ages)) / len(ages)
|
|
||||||
self.assertAlmostEqual(int(self.Person.objects.average('age')), avg)
|
|
||||||
|
|
||||||
self.Person(name='ageless person').save()
|
|
||||||
self.assertEqual(int(self.Person.objects.average('age')), avg)
|
|
||||||
|
|
||||||
def test_sum(self):
|
|
||||||
"""Ensure that field can be summed over correctly.
|
|
||||||
"""
|
|
||||||
ages = [23, 54, 12, 94, 27]
|
|
||||||
for i, age in enumerate(ages):
|
|
||||||
self.Person(name='test%s' % i, age=age).save()
|
|
||||||
|
|
||||||
self.assertEqual(int(self.Person.objects.sum('age')), sum(ages))
|
|
||||||
|
|
||||||
self.Person(name='ageless person').save()
|
|
||||||
self.assertEqual(int(self.Person.objects.sum('age')), sum(ages))
|
|
||||||
|
|
||||||
def test_custom_manager(self):
|
|
||||||
"""Ensure that custom QuerySetManager instances work as expected.
|
|
||||||
"""
|
|
||||||
class BlogPost(Document):
|
|
||||||
tags = ListField(StringField())
|
|
||||||
|
|
||||||
@queryset_manager
|
|
||||||
def music_posts(queryset):
|
|
||||||
return queryset(tags='music')
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
post1 = BlogPost(tags=['music', 'film'])
|
|
||||||
post1.save()
|
|
||||||
post2 = BlogPost(tags=['music'])
|
|
||||||
post2.save()
|
|
||||||
post3 = BlogPost(tags=['film', 'actors'])
|
|
||||||
post3.save()
|
|
||||||
|
|
||||||
self.assertEqual([p.id for p in BlogPost.objects],
|
|
||||||
[post1.id, post2.id, post3.id])
|
|
||||||
self.assertEqual([p.id for p in BlogPost.music_posts],
|
|
||||||
[post1.id, post2.id])
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
def test_query_field_name(self):
|
|
||||||
"""Ensure that the correct field name is used when querying.
|
|
||||||
"""
|
|
||||||
class Comment(EmbeddedDocument):
|
|
||||||
content = StringField(name='commentContent')
|
|
||||||
|
|
||||||
class BlogPost(Document):
|
|
||||||
title = StringField(name='postTitle')
|
|
||||||
comments = ListField(EmbeddedDocumentField(Comment),
|
|
||||||
name='postComments')
|
|
||||||
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
data = {'title': 'Post 1', 'comments': [Comment(content='test')]}
|
|
||||||
BlogPost(**data).save()
|
|
||||||
|
|
||||||
self.assertTrue('postTitle' in
|
|
||||||
BlogPost.objects(title=data['title'])._query)
|
|
||||||
self.assertFalse('title' in
|
|
||||||
BlogPost.objects(title=data['title'])._query)
|
|
||||||
self.assertEqual(len(BlogPost.objects(title=data['title'])), 1)
|
|
||||||
|
|
||||||
self.assertTrue('postComments.commentContent' in
|
|
||||||
BlogPost.objects(comments__content='test')._query)
|
|
||||||
self.assertEqual(len(BlogPost.objects(comments__content='test')), 1)
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
self.Person.drop_collection()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
5
tests/queryset/__init__.py
Normal file
5
tests/queryset/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from transform import *
|
||||||
|
from field_list import *
|
||||||
|
from queryset import *
|
||||||
|
from visitor import *
|
||||||
|
from geo import *
|
||||||
399
tests/queryset/field_list.py
Normal file
399
tests/queryset/field_list.py
Normal file
@@ -0,0 +1,399 @@
|
|||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.queryset import QueryFieldList
|
||||||
|
|
||||||
|
__all__ = ("QueryFieldListTest", "OnlyExcludeAllTest")
|
||||||
|
|
||||||
|
|
||||||
|
class QueryFieldListTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_empty(self):
|
||||||
|
q = QueryFieldList()
|
||||||
|
self.assertFalse(q)
|
||||||
|
|
||||||
|
q = QueryFieldList(always_include=['_cls'])
|
||||||
|
self.assertFalse(q)
|
||||||
|
|
||||||
|
def test_include_include(self):
|
||||||
|
q = QueryFieldList()
|
||||||
|
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY, _only_called=True)
|
||||||
|
self.assertEqual(q.as_dict(), {'a': 1, 'b': 1})
|
||||||
|
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||||
|
self.assertEqual(q.as_dict(), {'a': 1, 'b': 1, 'c': 1})
|
||||||
|
|
||||||
|
def test_include_exclude(self):
|
||||||
|
q = QueryFieldList()
|
||||||
|
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY)
|
||||||
|
self.assertEqual(q.as_dict(), {'a': 1, 'b': 1})
|
||||||
|
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE)
|
||||||
|
self.assertEqual(q.as_dict(), {'a': 1})
|
||||||
|
|
||||||
|
def test_exclude_exclude(self):
|
||||||
|
q = QueryFieldList()
|
||||||
|
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE)
|
||||||
|
self.assertEqual(q.as_dict(), {'a': 0, 'b': 0})
|
||||||
|
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE)
|
||||||
|
self.assertEqual(q.as_dict(), {'a': 0, 'b': 0, 'c': 0})
|
||||||
|
|
||||||
|
def test_exclude_include(self):
|
||||||
|
q = QueryFieldList()
|
||||||
|
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE)
|
||||||
|
self.assertEqual(q.as_dict(), {'a': 0, 'b': 0})
|
||||||
|
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||||
|
self.assertEqual(q.as_dict(), {'c': 1})
|
||||||
|
|
||||||
|
def test_always_include(self):
|
||||||
|
q = QueryFieldList(always_include=['x', 'y'])
|
||||||
|
q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE)
|
||||||
|
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||||
|
self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1})
|
||||||
|
|
||||||
|
def test_reset(self):
|
||||||
|
q = QueryFieldList(always_include=['x', 'y'])
|
||||||
|
q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE)
|
||||||
|
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||||
|
self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1})
|
||||||
|
q.reset()
|
||||||
|
self.assertFalse(q)
|
||||||
|
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||||
|
self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'b': 1, 'c': 1})
|
||||||
|
|
||||||
|
def test_using_a_slice(self):
|
||||||
|
q = QueryFieldList()
|
||||||
|
q += QueryFieldList(fields=['a'], value={"$slice": 5})
|
||||||
|
self.assertEqual(q.as_dict(), {'a': {"$slice": 5}})
|
||||||
|
|
||||||
|
|
||||||
|
class OnlyExcludeAllTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def test_mixing_only_exclude(self):
|
||||||
|
|
||||||
|
class MyDoc(Document):
|
||||||
|
a = StringField()
|
||||||
|
b = StringField()
|
||||||
|
c = StringField()
|
||||||
|
d = StringField()
|
||||||
|
e = StringField()
|
||||||
|
f = StringField()
|
||||||
|
|
||||||
|
include = ['a', 'b', 'c', 'd', 'e']
|
||||||
|
exclude = ['d', 'e']
|
||||||
|
only = ['b', 'c']
|
||||||
|
|
||||||
|
qs = MyDoc.objects.fields(**dict(((i, 1) for i in include)))
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||||
|
{'a': 1, 'b': 1, 'c': 1, 'd': 1, 'e': 1})
|
||||||
|
qs = qs.only(*only)
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||||
|
qs = qs.exclude(*exclude)
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||||
|
|
||||||
|
qs = MyDoc.objects.fields(**dict(((i, 1) for i in include)))
|
||||||
|
qs = qs.exclude(*exclude)
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1})
|
||||||
|
qs = qs.only(*only)
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||||
|
|
||||||
|
qs = MyDoc.objects.exclude(*exclude)
|
||||||
|
qs = qs.fields(**dict(((i, 1) for i in include)))
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1})
|
||||||
|
qs = qs.only(*only)
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||||
|
|
||||||
|
def test_slicing(self):
|
||||||
|
|
||||||
|
class MyDoc(Document):
|
||||||
|
a = ListField()
|
||||||
|
b = ListField()
|
||||||
|
c = ListField()
|
||||||
|
d = ListField()
|
||||||
|
e = ListField()
|
||||||
|
f = ListField()
|
||||||
|
|
||||||
|
include = ['a', 'b', 'c', 'd', 'e']
|
||||||
|
exclude = ['d', 'e']
|
||||||
|
only = ['b', 'c']
|
||||||
|
|
||||||
|
qs = MyDoc.objects.fields(**dict(((i, 1) for i in include)))
|
||||||
|
qs = qs.exclude(*exclude)
|
||||||
|
qs = qs.only(*only)
|
||||||
|
qs = qs.fields(slice__b=5)
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||||
|
{'b': {'$slice': 5}, 'c': 1})
|
||||||
|
|
||||||
|
qs = qs.fields(slice__c=[5, 1])
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||||
|
{'b': {'$slice': 5}, 'c': {'$slice': [5, 1]}})
|
||||||
|
|
||||||
|
qs = qs.exclude('c')
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||||
|
{'b': {'$slice': 5}})
|
||||||
|
|
||||||
|
def test_only(self):
|
||||||
|
"""Ensure that QuerySet.only only returns the requested fields.
|
||||||
|
"""
|
||||||
|
person = self.Person(name='test', age=25)
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
obj = self.Person.objects.only('name').get()
|
||||||
|
self.assertEqual(obj.name, person.name)
|
||||||
|
self.assertEqual(obj.age, None)
|
||||||
|
|
||||||
|
obj = self.Person.objects.only('age').get()
|
||||||
|
self.assertEqual(obj.name, None)
|
||||||
|
self.assertEqual(obj.age, person.age)
|
||||||
|
|
||||||
|
obj = self.Person.objects.only('name', 'age').get()
|
||||||
|
self.assertEqual(obj.name, person.name)
|
||||||
|
self.assertEqual(obj.age, person.age)
|
||||||
|
|
||||||
|
# Check polymorphism still works
|
||||||
|
class Employee(self.Person):
|
||||||
|
salary = IntField(db_field='wage')
|
||||||
|
|
||||||
|
employee = Employee(name='test employee', age=40, salary=30000)
|
||||||
|
employee.save()
|
||||||
|
|
||||||
|
obj = self.Person.objects(id=employee.id).only('age').get()
|
||||||
|
self.assertTrue(isinstance(obj, Employee))
|
||||||
|
|
||||||
|
# Check field names are looked up properly
|
||||||
|
obj = Employee.objects(id=employee.id).only('salary').get()
|
||||||
|
self.assertEqual(obj.salary, employee.salary)
|
||||||
|
self.assertEqual(obj.name, None)
|
||||||
|
|
||||||
|
def test_only_with_subfields(self):
|
||||||
|
class User(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
email = StringField()
|
||||||
|
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
title = StringField()
|
||||||
|
text = StringField()
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
content = StringField()
|
||||||
|
author = EmbeddedDocumentField(User)
|
||||||
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
post = BlogPost(content='Had a good coffee today...')
|
||||||
|
post.author = User(name='Test User')
|
||||||
|
post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')]
|
||||||
|
post.save()
|
||||||
|
|
||||||
|
obj = BlogPost.objects.only('author.name',).get()
|
||||||
|
self.assertEqual(obj.content, None)
|
||||||
|
self.assertEqual(obj.author.email, None)
|
||||||
|
self.assertEqual(obj.author.name, 'Test User')
|
||||||
|
self.assertEqual(obj.comments, [])
|
||||||
|
|
||||||
|
obj = BlogPost.objects.only('content', 'comments.title',).get()
|
||||||
|
self.assertEqual(obj.content, 'Had a good coffee today...')
|
||||||
|
self.assertEqual(obj.author, None)
|
||||||
|
self.assertEqual(obj.comments[0].title, 'I aggree')
|
||||||
|
self.assertEqual(obj.comments[1].title, 'Coffee')
|
||||||
|
self.assertEqual(obj.comments[0].text, None)
|
||||||
|
self.assertEqual(obj.comments[1].text, None)
|
||||||
|
|
||||||
|
obj = BlogPost.objects.only('comments',).get()
|
||||||
|
self.assertEqual(obj.content, None)
|
||||||
|
self.assertEqual(obj.author, None)
|
||||||
|
self.assertEqual(obj.comments[0].title, 'I aggree')
|
||||||
|
self.assertEqual(obj.comments[1].title, 'Coffee')
|
||||||
|
self.assertEqual(obj.comments[0].text, 'Great post!')
|
||||||
|
self.assertEqual(obj.comments[1].text, 'I hate coffee')
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_exclude(self):
|
||||||
|
class User(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
email = StringField()
|
||||||
|
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
title = StringField()
|
||||||
|
text = StringField()
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
content = StringField()
|
||||||
|
author = EmbeddedDocumentField(User)
|
||||||
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
post = BlogPost(content='Had a good coffee today...')
|
||||||
|
post.author = User(name='Test User')
|
||||||
|
post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')]
|
||||||
|
post.save()
|
||||||
|
|
||||||
|
obj = BlogPost.objects.exclude('author', 'comments.text').get()
|
||||||
|
self.assertEqual(obj.author, None)
|
||||||
|
self.assertEqual(obj.content, 'Had a good coffee today...')
|
||||||
|
self.assertEqual(obj.comments[0].title, 'I aggree')
|
||||||
|
self.assertEqual(obj.comments[0].text, None)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_exclude_only_combining(self):
|
||||||
|
class Attachment(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
content = StringField()
|
||||||
|
|
||||||
|
class Email(Document):
|
||||||
|
sender = StringField()
|
||||||
|
to = StringField()
|
||||||
|
subject = StringField()
|
||||||
|
body = StringField()
|
||||||
|
content_type = StringField()
|
||||||
|
attachments = ListField(EmbeddedDocumentField(Attachment))
|
||||||
|
|
||||||
|
Email.drop_collection()
|
||||||
|
email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain')
|
||||||
|
email.attachments = [
|
||||||
|
Attachment(name='file1.doc', content='ABC'),
|
||||||
|
Attachment(name='file2.doc', content='XYZ'),
|
||||||
|
]
|
||||||
|
email.save()
|
||||||
|
|
||||||
|
obj = Email.objects.exclude('content_type').exclude('body').get()
|
||||||
|
self.assertEqual(obj.sender, 'me')
|
||||||
|
self.assertEqual(obj.to, 'you')
|
||||||
|
self.assertEqual(obj.subject, 'From Russia with Love')
|
||||||
|
self.assertEqual(obj.body, None)
|
||||||
|
self.assertEqual(obj.content_type, None)
|
||||||
|
|
||||||
|
obj = Email.objects.only('sender', 'to').exclude('body', 'sender').get()
|
||||||
|
self.assertEqual(obj.sender, None)
|
||||||
|
self.assertEqual(obj.to, 'you')
|
||||||
|
self.assertEqual(obj.subject, None)
|
||||||
|
self.assertEqual(obj.body, None)
|
||||||
|
self.assertEqual(obj.content_type, None)
|
||||||
|
|
||||||
|
obj = Email.objects.exclude('attachments.content').exclude('body').only('to', 'attachments.name').get()
|
||||||
|
self.assertEqual(obj.attachments[0].name, 'file1.doc')
|
||||||
|
self.assertEqual(obj.attachments[0].content, None)
|
||||||
|
self.assertEqual(obj.sender, None)
|
||||||
|
self.assertEqual(obj.to, 'you')
|
||||||
|
self.assertEqual(obj.subject, None)
|
||||||
|
self.assertEqual(obj.body, None)
|
||||||
|
self.assertEqual(obj.content_type, None)
|
||||||
|
|
||||||
|
Email.drop_collection()
|
||||||
|
|
||||||
|
def test_all_fields(self):
|
||||||
|
|
||||||
|
class Email(Document):
|
||||||
|
sender = StringField()
|
||||||
|
to = StringField()
|
||||||
|
subject = StringField()
|
||||||
|
body = StringField()
|
||||||
|
content_type = StringField()
|
||||||
|
|
||||||
|
Email.drop_collection()
|
||||||
|
|
||||||
|
email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain')
|
||||||
|
email.save()
|
||||||
|
|
||||||
|
obj = Email.objects.exclude('content_type', 'body').only('to', 'body').all_fields().get()
|
||||||
|
self.assertEqual(obj.sender, 'me')
|
||||||
|
self.assertEqual(obj.to, 'you')
|
||||||
|
self.assertEqual(obj.subject, 'From Russia with Love')
|
||||||
|
self.assertEqual(obj.body, 'Hello!')
|
||||||
|
self.assertEqual(obj.content_type, 'text/plain')
|
||||||
|
|
||||||
|
Email.drop_collection()
|
||||||
|
|
||||||
|
def test_slicing_fields(self):
|
||||||
|
"""Ensure that query slicing an array works.
|
||||||
|
"""
|
||||||
|
class Numbers(Document):
|
||||||
|
n = ListField(IntField())
|
||||||
|
|
||||||
|
Numbers.drop_collection()
|
||||||
|
|
||||||
|
numbers = Numbers(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1])
|
||||||
|
numbers.save()
|
||||||
|
|
||||||
|
# first three
|
||||||
|
numbers = Numbers.objects.fields(slice__n=3).get()
|
||||||
|
self.assertEqual(numbers.n, [0, 1, 2])
|
||||||
|
|
||||||
|
# last three
|
||||||
|
numbers = Numbers.objects.fields(slice__n=-3).get()
|
||||||
|
self.assertEqual(numbers.n, [-3, -2, -1])
|
||||||
|
|
||||||
|
# skip 2, limit 3
|
||||||
|
numbers = Numbers.objects.fields(slice__n=[2, 3]).get()
|
||||||
|
self.assertEqual(numbers.n, [2, 3, 4])
|
||||||
|
|
||||||
|
# skip to fifth from last, limit 4
|
||||||
|
numbers = Numbers.objects.fields(slice__n=[-5, 4]).get()
|
||||||
|
self.assertEqual(numbers.n, [-5, -4, -3, -2])
|
||||||
|
|
||||||
|
# skip to fifth from last, limit 10
|
||||||
|
numbers = Numbers.objects.fields(slice__n=[-5, 10]).get()
|
||||||
|
self.assertEqual(numbers.n, [-5, -4, -3, -2, -1])
|
||||||
|
|
||||||
|
# skip to fifth from last, limit 10 dict method
|
||||||
|
numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get()
|
||||||
|
self.assertEqual(numbers.n, [-5, -4, -3, -2, -1])
|
||||||
|
|
||||||
|
def test_slicing_nested_fields(self):
|
||||||
|
"""Ensure that query slicing an embedded array works.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class EmbeddedNumber(EmbeddedDocument):
|
||||||
|
n = ListField(IntField())
|
||||||
|
|
||||||
|
class Numbers(Document):
|
||||||
|
embedded = EmbeddedDocumentField(EmbeddedNumber)
|
||||||
|
|
||||||
|
Numbers.drop_collection()
|
||||||
|
|
||||||
|
numbers = Numbers()
|
||||||
|
numbers.embedded = EmbeddedNumber(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1])
|
||||||
|
numbers.save()
|
||||||
|
|
||||||
|
# first three
|
||||||
|
numbers = Numbers.objects.fields(slice__embedded__n=3).get()
|
||||||
|
self.assertEqual(numbers.embedded.n, [0, 1, 2])
|
||||||
|
|
||||||
|
# last three
|
||||||
|
numbers = Numbers.objects.fields(slice__embedded__n=-3).get()
|
||||||
|
self.assertEqual(numbers.embedded.n, [-3, -2, -1])
|
||||||
|
|
||||||
|
# skip 2, limit 3
|
||||||
|
numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get()
|
||||||
|
self.assertEqual(numbers.embedded.n, [2, 3, 4])
|
||||||
|
|
||||||
|
# skip to fifth from last, limit 4
|
||||||
|
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get()
|
||||||
|
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2])
|
||||||
|
|
||||||
|
# skip to fifth from last, limit 10
|
||||||
|
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get()
|
||||||
|
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1])
|
||||||
|
|
||||||
|
# skip to fifth from last, limit 10 dict method
|
||||||
|
numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get()
|
||||||
|
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1])
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
418
tests/queryset/geo.py
Normal file
418
tests/queryset/geo.py
Normal file
@@ -0,0 +1,418 @@
|
|||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
__all__ = ("GeoQueriesTest",)
|
||||||
|
|
||||||
|
|
||||||
|
class GeoQueriesTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
|
def test_geospatial_operators(self):
|
||||||
|
"""Ensure that geospatial queries are working.
|
||||||
|
"""
|
||||||
|
class Event(Document):
|
||||||
|
title = StringField()
|
||||||
|
date = DateTimeField()
|
||||||
|
location = GeoPointField()
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.title
|
||||||
|
|
||||||
|
Event.drop_collection()
|
||||||
|
|
||||||
|
event1 = Event(title="Coltrane Motion @ Double Door",
|
||||||
|
date=datetime.now() - timedelta(days=1),
|
||||||
|
location=[-87.677137, 41.909889]).save()
|
||||||
|
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
|
||||||
|
date=datetime.now() - timedelta(days=10),
|
||||||
|
location=[-122.4194155, 37.7749295]).save()
|
||||||
|
event3 = Event(title="Coltrane Motion @ Empty Bottle",
|
||||||
|
date=datetime.now(),
|
||||||
|
location=[-87.686638, 41.900474]).save()
|
||||||
|
|
||||||
|
# find all events "near" pitchfork office, chicago.
|
||||||
|
# note that "near" will show the san francisco event, too,
|
||||||
|
# although it sorts to last.
|
||||||
|
events = Event.objects(location__near=[-87.67892, 41.9120459])
|
||||||
|
self.assertEqual(events.count(), 3)
|
||||||
|
self.assertEqual(list(events), [event1, event3, event2])
|
||||||
|
|
||||||
|
# find events within 5 degrees of pitchfork office, chicago
|
||||||
|
point_and_distance = [[-87.67892, 41.9120459], 5]
|
||||||
|
events = Event.objects(location__within_distance=point_and_distance)
|
||||||
|
self.assertEqual(events.count(), 2)
|
||||||
|
events = list(events)
|
||||||
|
self.assertTrue(event2 not in events)
|
||||||
|
self.assertTrue(event1 in events)
|
||||||
|
self.assertTrue(event3 in events)
|
||||||
|
|
||||||
|
# ensure ordering is respected by "near"
|
||||||
|
events = Event.objects(location__near=[-87.67892, 41.9120459])
|
||||||
|
events = events.order_by("-date")
|
||||||
|
self.assertEqual(events.count(), 3)
|
||||||
|
self.assertEqual(list(events), [event3, event1, event2])
|
||||||
|
|
||||||
|
# find events within 10 degrees of san francisco
|
||||||
|
point = [-122.415579, 37.7566023]
|
||||||
|
events = Event.objects(location__near=point, location__max_distance=10)
|
||||||
|
self.assertEqual(events.count(), 1)
|
||||||
|
self.assertEqual(events[0], event2)
|
||||||
|
|
||||||
|
# find events within 10 degrees of san francisco
|
||||||
|
point_and_distance = [[-122.415579, 37.7566023], 10]
|
||||||
|
events = Event.objects(location__within_distance=point_and_distance)
|
||||||
|
self.assertEqual(events.count(), 1)
|
||||||
|
self.assertEqual(events[0], event2)
|
||||||
|
|
||||||
|
# find events within 1 degree of greenpoint, broolyn, nyc, ny
|
||||||
|
point_and_distance = [[-73.9509714, 40.7237134], 1]
|
||||||
|
events = Event.objects(location__within_distance=point_and_distance)
|
||||||
|
self.assertEqual(events.count(), 0)
|
||||||
|
|
||||||
|
# ensure ordering is respected by "within_distance"
|
||||||
|
point_and_distance = [[-87.67892, 41.9120459], 10]
|
||||||
|
events = Event.objects(location__within_distance=point_and_distance)
|
||||||
|
events = events.order_by("-date")
|
||||||
|
self.assertEqual(events.count(), 2)
|
||||||
|
self.assertEqual(events[0], event3)
|
||||||
|
|
||||||
|
# check that within_box works
|
||||||
|
box = [(-125.0, 35.0), (-100.0, 40.0)]
|
||||||
|
events = Event.objects(location__within_box=box)
|
||||||
|
self.assertEqual(events.count(), 1)
|
||||||
|
self.assertEqual(events[0].id, event2.id)
|
||||||
|
|
||||||
|
polygon = [
|
||||||
|
(-87.694445, 41.912114),
|
||||||
|
(-87.69084, 41.919395),
|
||||||
|
(-87.681742, 41.927186),
|
||||||
|
(-87.654276, 41.911731),
|
||||||
|
(-87.656164, 41.898061),
|
||||||
|
]
|
||||||
|
events = Event.objects(location__within_polygon=polygon)
|
||||||
|
self.assertEqual(events.count(), 1)
|
||||||
|
self.assertEqual(events[0].id, event1.id)
|
||||||
|
|
||||||
|
polygon2 = [
|
||||||
|
(-1.742249, 54.033586),
|
||||||
|
(-1.225891, 52.792797),
|
||||||
|
(-4.40094, 53.389881)
|
||||||
|
]
|
||||||
|
events = Event.objects(location__within_polygon=polygon2)
|
||||||
|
self.assertEqual(events.count(), 0)
|
||||||
|
|
||||||
|
def test_geo_spatial_embedded(self):
|
||||||
|
|
||||||
|
class Venue(EmbeddedDocument):
|
||||||
|
location = GeoPointField()
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Event(Document):
|
||||||
|
title = StringField()
|
||||||
|
venue = EmbeddedDocumentField(Venue)
|
||||||
|
|
||||||
|
Event.drop_collection()
|
||||||
|
|
||||||
|
venue1 = Venue(name="The Rock", location=[-87.677137, 41.909889])
|
||||||
|
venue2 = Venue(name="The Bridge", location=[-122.4194155, 37.7749295])
|
||||||
|
|
||||||
|
event1 = Event(title="Coltrane Motion @ Double Door",
|
||||||
|
venue=venue1).save()
|
||||||
|
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
|
||||||
|
venue=venue2).save()
|
||||||
|
event3 = Event(title="Coltrane Motion @ Empty Bottle",
|
||||||
|
venue=venue1).save()
|
||||||
|
|
||||||
|
# find all events "near" pitchfork office, chicago.
|
||||||
|
# note that "near" will show the san francisco event, too,
|
||||||
|
# although it sorts to last.
|
||||||
|
events = Event.objects(venue__location__near=[-87.67892, 41.9120459])
|
||||||
|
self.assertEqual(events.count(), 3)
|
||||||
|
self.assertEqual(list(events), [event1, event3, event2])
|
||||||
|
|
||||||
|
def test_spherical_geospatial_operators(self):
|
||||||
|
"""Ensure that spherical geospatial queries are working
|
||||||
|
"""
|
||||||
|
class Point(Document):
|
||||||
|
location = GeoPointField()
|
||||||
|
|
||||||
|
Point.drop_collection()
|
||||||
|
|
||||||
|
# These points are one degree apart, which (according to Google Maps)
|
||||||
|
# is about 110 km apart at this place on the Earth.
|
||||||
|
north_point = Point(location=[-122, 38]).save() # Near Concord, CA
|
||||||
|
south_point = Point(location=[-122, 37]).save() # Near Santa Cruz, CA
|
||||||
|
|
||||||
|
earth_radius = 6378.009 # in km (needs to be a float for dividing by)
|
||||||
|
|
||||||
|
# Finds both points because they are within 60 km of the reference
|
||||||
|
# point equidistant between them.
|
||||||
|
points = Point.objects(location__near_sphere=[-122, 37.5])
|
||||||
|
self.assertEqual(points.count(), 2)
|
||||||
|
|
||||||
|
# Same behavior for _within_spherical_distance
|
||||||
|
points = Point.objects(
|
||||||
|
location__within_spherical_distance=[[-122, 37.5], 60/earth_radius]
|
||||||
|
)
|
||||||
|
self.assertEqual(points.count(), 2)
|
||||||
|
|
||||||
|
points = Point.objects(location__near_sphere=[-122, 37.5],
|
||||||
|
location__max_distance=60 / earth_radius)
|
||||||
|
self.assertEqual(points.count(), 2)
|
||||||
|
|
||||||
|
# Finds both points, but orders the north point first because it's
|
||||||
|
# closer to the reference point to the north.
|
||||||
|
points = Point.objects(location__near_sphere=[-122, 38.5])
|
||||||
|
self.assertEqual(points.count(), 2)
|
||||||
|
self.assertEqual(points[0].id, north_point.id)
|
||||||
|
self.assertEqual(points[1].id, south_point.id)
|
||||||
|
|
||||||
|
# Finds both points, but orders the south point first because it's
|
||||||
|
# closer to the reference point to the south.
|
||||||
|
points = Point.objects(location__near_sphere=[-122, 36.5])
|
||||||
|
self.assertEqual(points.count(), 2)
|
||||||
|
self.assertEqual(points[0].id, south_point.id)
|
||||||
|
self.assertEqual(points[1].id, north_point.id)
|
||||||
|
|
||||||
|
# Finds only one point because only the first point is within 60km of
|
||||||
|
# the reference point to the south.
|
||||||
|
points = Point.objects(
|
||||||
|
location__within_spherical_distance=[[-122, 36.5], 60/earth_radius])
|
||||||
|
self.assertEqual(points.count(), 1)
|
||||||
|
self.assertEqual(points[0].id, south_point.id)
|
||||||
|
|
||||||
|
def test_2dsphere_point(self):
|
||||||
|
|
||||||
|
class Event(Document):
|
||||||
|
title = StringField()
|
||||||
|
date = DateTimeField()
|
||||||
|
location = PointField()
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.title
|
||||||
|
|
||||||
|
Event.drop_collection()
|
||||||
|
|
||||||
|
event1 = Event(title="Coltrane Motion @ Double Door",
|
||||||
|
date=datetime.now() - timedelta(days=1),
|
||||||
|
location=[-87.677137, 41.909889])
|
||||||
|
event1.save()
|
||||||
|
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
|
||||||
|
date=datetime.now() - timedelta(days=10),
|
||||||
|
location=[-122.4194155, 37.7749295]).save()
|
||||||
|
event3 = Event(title="Coltrane Motion @ Empty Bottle",
|
||||||
|
date=datetime.now(),
|
||||||
|
location=[-87.686638, 41.900474]).save()
|
||||||
|
|
||||||
|
# find all events "near" pitchfork office, chicago.
|
||||||
|
# note that "near" will show the san francisco event, too,
|
||||||
|
# although it sorts to last.
|
||||||
|
events = Event.objects(location__near=[-87.67892, 41.9120459])
|
||||||
|
self.assertEqual(events.count(), 3)
|
||||||
|
self.assertEqual(list(events), [event1, event3, event2])
|
||||||
|
|
||||||
|
# find events within 5 degrees of pitchfork office, chicago
|
||||||
|
point_and_distance = [[-87.67892, 41.9120459], 2]
|
||||||
|
events = Event.objects(location__geo_within_center=point_and_distance)
|
||||||
|
self.assertEqual(events.count(), 2)
|
||||||
|
events = list(events)
|
||||||
|
self.assertTrue(event2 not in events)
|
||||||
|
self.assertTrue(event1 in events)
|
||||||
|
self.assertTrue(event3 in events)
|
||||||
|
|
||||||
|
# ensure ordering is respected by "near"
|
||||||
|
events = Event.objects(location__near=[-87.67892, 41.9120459])
|
||||||
|
events = events.order_by("-date")
|
||||||
|
self.assertEqual(events.count(), 3)
|
||||||
|
self.assertEqual(list(events), [event3, event1, event2])
|
||||||
|
|
||||||
|
# find events within 10km of san francisco
|
||||||
|
point = [-122.415579, 37.7566023]
|
||||||
|
events = Event.objects(location__near=point, location__max_distance=10000)
|
||||||
|
self.assertEqual(events.count(), 1)
|
||||||
|
self.assertEqual(events[0], event2)
|
||||||
|
|
||||||
|
# find events within 1km of greenpoint, broolyn, nyc, ny
|
||||||
|
events = Event.objects(location__near=[-73.9509714, 40.7237134], location__max_distance=1000)
|
||||||
|
self.assertEqual(events.count(), 0)
|
||||||
|
|
||||||
|
# ensure ordering is respected by "near"
|
||||||
|
events = Event.objects(location__near=[-87.67892, 41.9120459],
|
||||||
|
location__max_distance=10000).order_by("-date")
|
||||||
|
self.assertEqual(events.count(), 2)
|
||||||
|
self.assertEqual(events[0], event3)
|
||||||
|
|
||||||
|
# check that within_box works
|
||||||
|
box = [(-125.0, 35.0), (-100.0, 40.0)]
|
||||||
|
events = Event.objects(location__geo_within_box=box)
|
||||||
|
self.assertEqual(events.count(), 1)
|
||||||
|
self.assertEqual(events[0].id, event2.id)
|
||||||
|
|
||||||
|
polygon = [
|
||||||
|
(-87.694445, 41.912114),
|
||||||
|
(-87.69084, 41.919395),
|
||||||
|
(-87.681742, 41.927186),
|
||||||
|
(-87.654276, 41.911731),
|
||||||
|
(-87.656164, 41.898061),
|
||||||
|
]
|
||||||
|
events = Event.objects(location__geo_within_polygon=polygon)
|
||||||
|
self.assertEqual(events.count(), 1)
|
||||||
|
self.assertEqual(events[0].id, event1.id)
|
||||||
|
|
||||||
|
polygon2 = [
|
||||||
|
(-1.742249, 54.033586),
|
||||||
|
(-1.225891, 52.792797),
|
||||||
|
(-4.40094, 53.389881)
|
||||||
|
]
|
||||||
|
events = Event.objects(location__geo_within_polygon=polygon2)
|
||||||
|
self.assertEqual(events.count(), 0)
|
||||||
|
|
||||||
|
def test_2dsphere_point_embedded(self):
|
||||||
|
|
||||||
|
class Venue(EmbeddedDocument):
|
||||||
|
location = GeoPointField()
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Event(Document):
|
||||||
|
title = StringField()
|
||||||
|
venue = EmbeddedDocumentField(Venue)
|
||||||
|
|
||||||
|
Event.drop_collection()
|
||||||
|
|
||||||
|
venue1 = Venue(name="The Rock", location=[-87.677137, 41.909889])
|
||||||
|
venue2 = Venue(name="The Bridge", location=[-122.4194155, 37.7749295])
|
||||||
|
|
||||||
|
event1 = Event(title="Coltrane Motion @ Double Door",
|
||||||
|
venue=venue1).save()
|
||||||
|
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
|
||||||
|
venue=venue2).save()
|
||||||
|
event3 = Event(title="Coltrane Motion @ Empty Bottle",
|
||||||
|
venue=venue1).save()
|
||||||
|
|
||||||
|
# find all events "near" pitchfork office, chicago.
|
||||||
|
# note that "near" will show the san francisco event, too,
|
||||||
|
# although it sorts to last.
|
||||||
|
events = Event.objects(venue__location__near=[-87.67892, 41.9120459])
|
||||||
|
self.assertEqual(events.count(), 3)
|
||||||
|
self.assertEqual(list(events), [event1, event3, event2])
|
||||||
|
|
||||||
|
def test_linestring(self):
|
||||||
|
|
||||||
|
class Road(Document):
|
||||||
|
name = StringField()
|
||||||
|
line = LineStringField()
|
||||||
|
|
||||||
|
Road.drop_collection()
|
||||||
|
|
||||||
|
Road(name="66", line=[[40, 5], [41, 6]]).save()
|
||||||
|
|
||||||
|
# near
|
||||||
|
point = {"type": "Point", "coordinates": [40, 5]}
|
||||||
|
roads = Road.objects.filter(line__near=point["coordinates"]).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
roads = Road.objects.filter(line__near=point).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
roads = Road.objects.filter(line__near={"$geometry": point}).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
# Within
|
||||||
|
polygon = {"type": "Polygon",
|
||||||
|
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||||
|
roads = Road.objects.filter(line__geo_within=polygon["coordinates"]).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
roads = Road.objects.filter(line__geo_within=polygon).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
roads = Road.objects.filter(line__geo_within={"$geometry": polygon}).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
# Intersects
|
||||||
|
line = {"type": "LineString",
|
||||||
|
"coordinates": [[40, 5], [40, 6]]}
|
||||||
|
roads = Road.objects.filter(line__geo_intersects=line["coordinates"]).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
roads = Road.objects.filter(line__geo_intersects=line).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
roads = Road.objects.filter(line__geo_intersects={"$geometry": line}).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
polygon = {"type": "Polygon",
|
||||||
|
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||||
|
roads = Road.objects.filter(line__geo_intersects=polygon["coordinates"]).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
roads = Road.objects.filter(line__geo_intersects=polygon).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
roads = Road.objects.filter(line__geo_intersects={"$geometry": polygon}).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
def test_polygon(self):
|
||||||
|
|
||||||
|
class Road(Document):
|
||||||
|
name = StringField()
|
||||||
|
poly = PolygonField()
|
||||||
|
|
||||||
|
Road.drop_collection()
|
||||||
|
|
||||||
|
Road(name="66", poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]).save()
|
||||||
|
|
||||||
|
# near
|
||||||
|
point = {"type": "Point", "coordinates": [40, 5]}
|
||||||
|
roads = Road.objects.filter(poly__near=point["coordinates"]).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
roads = Road.objects.filter(poly__near=point).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
roads = Road.objects.filter(poly__near={"$geometry": point}).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
# Within
|
||||||
|
polygon = {"type": "Polygon",
|
||||||
|
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||||
|
roads = Road.objects.filter(poly__geo_within=polygon["coordinates"]).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
roads = Road.objects.filter(poly__geo_within=polygon).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
roads = Road.objects.filter(poly__geo_within={"$geometry": polygon}).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
# Intersects
|
||||||
|
line = {"type": "LineString",
|
||||||
|
"coordinates": [[40, 5], [41, 6]]}
|
||||||
|
roads = Road.objects.filter(poly__geo_intersects=line["coordinates"]).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
roads = Road.objects.filter(poly__geo_intersects=line).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
roads = Road.objects.filter(poly__geo_intersects={"$geometry": line}).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
polygon = {"type": "Polygon",
|
||||||
|
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||||
|
roads = Road.objects.filter(poly__geo_intersects=polygon["coordinates"]).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
roads = Road.objects.filter(poly__geo_intersects=polygon).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count()
|
||||||
|
self.assertEqual(1, roads)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
3385
tests/queryset/queryset.py
Normal file
3385
tests/queryset/queryset.py
Normal file
File diff suppressed because it is too large
Load Diff
147
tests/queryset/transform.py
Normal file
147
tests/queryset/transform.py
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.queryset import Q
|
||||||
|
from mongoengine.queryset import transform
|
||||||
|
|
||||||
|
__all__ = ("TransformTest",)
|
||||||
|
|
||||||
|
|
||||||
|
class TransformTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
|
def test_transform_query(self):
|
||||||
|
"""Ensure that the _transform_query function operates correctly.
|
||||||
|
"""
|
||||||
|
self.assertEqual(transform.query(name='test', age=30),
|
||||||
|
{'name': 'test', 'age': 30})
|
||||||
|
self.assertEqual(transform.query(age__lt=30),
|
||||||
|
{'age': {'$lt': 30}})
|
||||||
|
self.assertEqual(transform.query(age__gt=20, age__lt=50),
|
||||||
|
{'age': {'$gt': 20, '$lt': 50}})
|
||||||
|
self.assertEqual(transform.query(age=20, age__gt=50),
|
||||||
|
{'$and': [{'age': {'$gt': 50}}, {'age': 20}]})
|
||||||
|
self.assertEqual(transform.query(friend__age__gte=30),
|
||||||
|
{'friend.age': {'$gte': 30}})
|
||||||
|
self.assertEqual(transform.query(name__exists=True),
|
||||||
|
{'name': {'$exists': True}})
|
||||||
|
|
||||||
|
def test_query_field_name(self):
|
||||||
|
"""Ensure that the correct field name is used when querying.
|
||||||
|
"""
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
content = StringField(db_field='commentContent')
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField(db_field='postTitle')
|
||||||
|
comments = ListField(EmbeddedDocumentField(Comment),
|
||||||
|
db_field='postComments')
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
data = {'title': 'Post 1', 'comments': [Comment(content='test')]}
|
||||||
|
post = BlogPost(**data)
|
||||||
|
post.save()
|
||||||
|
|
||||||
|
self.assertTrue('postTitle' in
|
||||||
|
BlogPost.objects(title=data['title'])._query)
|
||||||
|
self.assertFalse('title' in
|
||||||
|
BlogPost.objects(title=data['title'])._query)
|
||||||
|
self.assertEqual(BlogPost.objects(title=data['title']).count(), 1)
|
||||||
|
|
||||||
|
self.assertTrue('_id' in BlogPost.objects(pk=post.id)._query)
|
||||||
|
self.assertEqual(BlogPost.objects(pk=post.id).count(), 1)
|
||||||
|
|
||||||
|
self.assertTrue('postComments.commentContent' in
|
||||||
|
BlogPost.objects(comments__content='test')._query)
|
||||||
|
self.assertEqual(BlogPost.objects(comments__content='test').count(), 1)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_query_pk_field_name(self):
|
||||||
|
"""Ensure that the correct "primary key" field name is used when
|
||||||
|
querying
|
||||||
|
"""
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField(primary_key=True, db_field='postTitle')
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
data = {'title': 'Post 1'}
|
||||||
|
post = BlogPost(**data)
|
||||||
|
post.save()
|
||||||
|
|
||||||
|
self.assertTrue('_id' in BlogPost.objects(pk=data['title'])._query)
|
||||||
|
self.assertTrue('_id' in BlogPost.objects(title=data['title'])._query)
|
||||||
|
self.assertEqual(BlogPost.objects(pk=data['title']).count(), 1)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_chaining(self):
|
||||||
|
class A(Document):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class B(Document):
|
||||||
|
a = ReferenceField(A)
|
||||||
|
|
||||||
|
A.drop_collection()
|
||||||
|
B.drop_collection()
|
||||||
|
|
||||||
|
a1 = A().save()
|
||||||
|
a2 = A().save()
|
||||||
|
|
||||||
|
B(a=a1).save()
|
||||||
|
|
||||||
|
# Works
|
||||||
|
q1 = B.objects.filter(a__in=[a1, a2], a=a1)._query
|
||||||
|
|
||||||
|
# Doesn't work
|
||||||
|
q2 = B.objects.filter(a__in=[a1, a2])
|
||||||
|
q2 = q2.filter(a=a1)._query
|
||||||
|
|
||||||
|
self.assertEqual(q1, q2)
|
||||||
|
|
||||||
|
def test_raw_query_and_Q_objects(self):
|
||||||
|
"""
|
||||||
|
Test raw plays nicely
|
||||||
|
"""
|
||||||
|
class Foo(Document):
|
||||||
|
name = StringField()
|
||||||
|
a = StringField()
|
||||||
|
b = StringField()
|
||||||
|
c = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': False
|
||||||
|
}
|
||||||
|
|
||||||
|
query = Foo.objects(__raw__={'$nor': [{'name': 'bar'}]})._query
|
||||||
|
self.assertEqual(query, {'$nor': [{'name': 'bar'}]})
|
||||||
|
|
||||||
|
q1 = {'$or': [{'a': 1}, {'b': 1}]}
|
||||||
|
query = Foo.objects(Q(__raw__=q1) & Q(c=1))._query
|
||||||
|
self.assertEqual(query, {'$or': [{'a': 1}, {'b': 1}], 'c': 1})
|
||||||
|
|
||||||
|
def test_raw_and_merging(self):
|
||||||
|
class Doc(Document):
|
||||||
|
meta = {'allow_inheritance': False}
|
||||||
|
|
||||||
|
raw_query = Doc.objects(__raw__={'deleted': False,
|
||||||
|
'scraped': 'yes',
|
||||||
|
'$nor': [{'views.extracted': 'no'},
|
||||||
|
{'attachments.views.extracted':'no'}]
|
||||||
|
})._query
|
||||||
|
|
||||||
|
expected = {'deleted': False, 'scraped': 'yes',
|
||||||
|
'$nor': [{'views.extracted': 'no'},
|
||||||
|
{'attachments.views.extracted': 'no'}]}
|
||||||
|
self.assertEqual(expected, raw_query)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
334
tests/queryset/visitor.py
Normal file
334
tests/queryset/visitor.py
Normal file
@@ -0,0 +1,334 @@
|
|||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from bson import ObjectId
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.queryset import Q
|
||||||
|
from mongoengine.errors import InvalidQueryError
|
||||||
|
|
||||||
|
__all__ = ("QTest",)
|
||||||
|
|
||||||
|
|
||||||
|
class QTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def test_empty_q(self):
|
||||||
|
"""Ensure that empty Q objects won't hurt.
|
||||||
|
"""
|
||||||
|
q1 = Q()
|
||||||
|
q2 = Q(age__gte=18)
|
||||||
|
q3 = Q()
|
||||||
|
q4 = Q(name='test')
|
||||||
|
q5 = Q()
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
query = {'$or': [{'age': {'$gte': 18}}, {'name': 'test'}]}
|
||||||
|
self.assertEqual((q1 | q2 | q3 | q4 | q5).to_query(Person), query)
|
||||||
|
|
||||||
|
query = {'age': {'$gte': 18}, 'name': 'test'}
|
||||||
|
self.assertEqual((q1 & q2 & q3 & q4 & q5).to_query(Person), query)
|
||||||
|
|
||||||
|
def test_q_with_dbref(self):
|
||||||
|
"""Ensure Q objects handle DBRefs correctly"""
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Post(Document):
|
||||||
|
created_user = ReferenceField(User)
|
||||||
|
|
||||||
|
user = User.objects.create()
|
||||||
|
Post.objects.create(created_user=user)
|
||||||
|
|
||||||
|
self.assertEqual(Post.objects.filter(created_user=user).count(), 1)
|
||||||
|
self.assertEqual(Post.objects.filter(Q(created_user=user)).count(), 1)
|
||||||
|
|
||||||
|
def test_and_combination(self):
|
||||||
|
"""Ensure that Q-objects correctly AND together.
|
||||||
|
"""
|
||||||
|
class TestDoc(Document):
|
||||||
|
x = IntField()
|
||||||
|
y = StringField()
|
||||||
|
|
||||||
|
# Check than an error is raised when conflicting queries are anded
|
||||||
|
def invalid_combination():
|
||||||
|
query = Q(x__lt=7) & Q(x__lt=3)
|
||||||
|
query.to_query(TestDoc)
|
||||||
|
self.assertRaises(InvalidQueryError, invalid_combination)
|
||||||
|
|
||||||
|
# Check normal cases work without an error
|
||||||
|
query = Q(x__lt=7) & Q(x__gt=3)
|
||||||
|
|
||||||
|
q1 = Q(x__lt=7)
|
||||||
|
q2 = Q(x__gt=3)
|
||||||
|
query = (q1 & q2).to_query(TestDoc)
|
||||||
|
self.assertEqual(query, {'x': {'$lt': 7, '$gt': 3}})
|
||||||
|
|
||||||
|
# More complex nested example
|
||||||
|
query = Q(x__lt=100) & Q(y__ne='NotMyString')
|
||||||
|
query &= Q(y__in=['a', 'b', 'c']) & Q(x__gt=-100)
|
||||||
|
mongo_query = {
|
||||||
|
'x': {'$lt': 100, '$gt': -100},
|
||||||
|
'y': {'$ne': 'NotMyString', '$in': ['a', 'b', 'c']},
|
||||||
|
}
|
||||||
|
self.assertEqual(query.to_query(TestDoc), mongo_query)
|
||||||
|
|
||||||
|
def test_or_combination(self):
|
||||||
|
"""Ensure that Q-objects correctly OR together.
|
||||||
|
"""
|
||||||
|
class TestDoc(Document):
|
||||||
|
x = IntField()
|
||||||
|
|
||||||
|
q1 = Q(x__lt=3)
|
||||||
|
q2 = Q(x__gt=7)
|
||||||
|
query = (q1 | q2).to_query(TestDoc)
|
||||||
|
self.assertEqual(query, {
|
||||||
|
'$or': [
|
||||||
|
{'x': {'$lt': 3}},
|
||||||
|
{'x': {'$gt': 7}},
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
def test_and_or_combination(self):
|
||||||
|
"""Ensure that Q-objects handle ANDing ORed components.
|
||||||
|
"""
|
||||||
|
class TestDoc(Document):
|
||||||
|
x = IntField()
|
||||||
|
y = BooleanField()
|
||||||
|
|
||||||
|
TestDoc.drop_collection()
|
||||||
|
|
||||||
|
query = (Q(x__gt=0) | Q(x__exists=False))
|
||||||
|
query &= Q(x__lt=100)
|
||||||
|
self.assertEqual(query.to_query(TestDoc), {'$and': [
|
||||||
|
{'$or': [{'x': {'$gt': 0}},
|
||||||
|
{'x': {'$exists': False}}]},
|
||||||
|
{'x': {'$lt': 100}}]
|
||||||
|
})
|
||||||
|
|
||||||
|
q1 = (Q(x__gt=0) | Q(x__exists=False))
|
||||||
|
q2 = (Q(x__lt=100) | Q(y=True))
|
||||||
|
query = (q1 & q2).to_query(TestDoc)
|
||||||
|
|
||||||
|
TestDoc(x=101).save()
|
||||||
|
TestDoc(x=10).save()
|
||||||
|
TestDoc(y=True).save()
|
||||||
|
|
||||||
|
self.assertEqual(query,
|
||||||
|
{'$and': [
|
||||||
|
{'$or': [{'x': {'$gt': 0}}, {'x': {'$exists': False}}]},
|
||||||
|
{'$or': [{'x': {'$lt': 100}}, {'y': True}]}
|
||||||
|
]})
|
||||||
|
|
||||||
|
self.assertEqual(2, TestDoc.objects(q1 & q2).count())
|
||||||
|
|
||||||
|
def test_or_and_or_combination(self):
|
||||||
|
"""Ensure that Q-objects handle ORing ANDed ORed components. :)
|
||||||
|
"""
|
||||||
|
class TestDoc(Document):
|
||||||
|
x = IntField()
|
||||||
|
y = BooleanField()
|
||||||
|
|
||||||
|
TestDoc.drop_collection()
|
||||||
|
TestDoc(x=-1, y=True).save()
|
||||||
|
TestDoc(x=101, y=True).save()
|
||||||
|
TestDoc(x=99, y=False).save()
|
||||||
|
TestDoc(x=101, y=False).save()
|
||||||
|
|
||||||
|
q1 = (Q(x__gt=0) & (Q(y=True) | Q(y__exists=False)))
|
||||||
|
q2 = (Q(x__lt=100) & (Q(y=False) | Q(y__exists=False)))
|
||||||
|
query = (q1 | q2).to_query(TestDoc)
|
||||||
|
|
||||||
|
self.assertEqual(query,
|
||||||
|
{'$or': [
|
||||||
|
{'$and': [{'x': {'$gt': 0}},
|
||||||
|
{'$or': [{'y': True}, {'y': {'$exists': False}}]}]},
|
||||||
|
{'$and': [{'x': {'$lt': 100}},
|
||||||
|
{'$or': [{'y': False}, {'y': {'$exists': False}}]}]}
|
||||||
|
]}
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(2, TestDoc.objects(q1 | q2).count())
|
||||||
|
|
||||||
|
def test_multiple_occurence_in_field(self):
|
||||||
|
class Test(Document):
|
||||||
|
name = StringField(max_length=40)
|
||||||
|
title = StringField(max_length=40)
|
||||||
|
|
||||||
|
q1 = Q(name__contains='te') | Q(title__contains='te')
|
||||||
|
q2 = Q(name__contains='12') | Q(title__contains='12')
|
||||||
|
|
||||||
|
q3 = q1 & q2
|
||||||
|
|
||||||
|
query = q3.to_query(Test)
|
||||||
|
self.assertEqual(query["$and"][0], q1.to_query(Test))
|
||||||
|
self.assertEqual(query["$and"][1], q2.to_query(Test))
|
||||||
|
|
||||||
|
def test_q_clone(self):
|
||||||
|
|
||||||
|
class TestDoc(Document):
|
||||||
|
x = IntField()
|
||||||
|
|
||||||
|
TestDoc.drop_collection()
|
||||||
|
for i in xrange(1, 101):
|
||||||
|
t = TestDoc(x=i)
|
||||||
|
t.save()
|
||||||
|
|
||||||
|
# Check normal cases work without an error
|
||||||
|
test = TestDoc.objects(Q(x__lt=7) & Q(x__gt=3))
|
||||||
|
|
||||||
|
self.assertEqual(test.count(), 3)
|
||||||
|
|
||||||
|
test2 = test.clone()
|
||||||
|
self.assertEqual(test2.count(), 3)
|
||||||
|
self.assertFalse(test2 == test)
|
||||||
|
|
||||||
|
test3 = test2.filter(x=6)
|
||||||
|
self.assertEqual(test3.count(), 1)
|
||||||
|
self.assertEqual(test.count(), 3)
|
||||||
|
|
||||||
|
def test_q(self):
|
||||||
|
"""Ensure that Q objects may be used to query for documents.
|
||||||
|
"""
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField()
|
||||||
|
publish_date = DateTimeField()
|
||||||
|
published = BooleanField()
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
post1 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 8), published=False)
|
||||||
|
post1.save()
|
||||||
|
|
||||||
|
post2 = BlogPost(title='Test 2', publish_date=datetime(2010, 1, 15), published=True)
|
||||||
|
post2.save()
|
||||||
|
|
||||||
|
post3 = BlogPost(title='Test 3', published=True)
|
||||||
|
post3.save()
|
||||||
|
|
||||||
|
post4 = BlogPost(title='Test 4', publish_date=datetime(2010, 1, 8))
|
||||||
|
post4.save()
|
||||||
|
|
||||||
|
post5 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 15))
|
||||||
|
post5.save()
|
||||||
|
|
||||||
|
post6 = BlogPost(title='Test 1', published=False)
|
||||||
|
post6.save()
|
||||||
|
|
||||||
|
# Check ObjectId lookup works
|
||||||
|
obj = BlogPost.objects(id=post1.id).first()
|
||||||
|
self.assertEqual(obj, post1)
|
||||||
|
|
||||||
|
# Check Q object combination with one does not exist
|
||||||
|
q = BlogPost.objects(Q(title='Test 5') | Q(published=True))
|
||||||
|
posts = [post.id for post in q]
|
||||||
|
|
||||||
|
published_posts = (post2, post3)
|
||||||
|
self.assertTrue(all(obj.id in posts for obj in published_posts))
|
||||||
|
|
||||||
|
q = BlogPost.objects(Q(title='Test 1') | Q(published=True))
|
||||||
|
posts = [post.id for post in q]
|
||||||
|
published_posts = (post1, post2, post3, post5, post6)
|
||||||
|
self.assertTrue(all(obj.id in posts for obj in published_posts))
|
||||||
|
|
||||||
|
# Check Q object combination
|
||||||
|
date = datetime(2010, 1, 10)
|
||||||
|
q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True))
|
||||||
|
posts = [post.id for post in q]
|
||||||
|
|
||||||
|
published_posts = (post1, post2, post3, post4)
|
||||||
|
self.assertTrue(all(obj.id in posts for obj in published_posts))
|
||||||
|
|
||||||
|
self.assertFalse(any(obj.id in posts for obj in [post5, post6]))
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
# Check the 'in' operator
|
||||||
|
self.Person(name='user1', age=20).save()
|
||||||
|
self.Person(name='user2', age=20).save()
|
||||||
|
self.Person(name='user3', age=30).save()
|
||||||
|
self.Person(name='user4', age=40).save()
|
||||||
|
|
||||||
|
self.assertEqual(self.Person.objects(Q(age__in=[20])).count(), 2)
|
||||||
|
self.assertEqual(self.Person.objects(Q(age__in=[20, 30])).count(), 3)
|
||||||
|
|
||||||
|
# Test invalid query objs
|
||||||
|
def wrong_query_objs():
|
||||||
|
self.Person.objects('user1')
|
||||||
|
def wrong_query_objs_filter():
|
||||||
|
self.Person.objects('user1')
|
||||||
|
self.assertRaises(InvalidQueryError, wrong_query_objs)
|
||||||
|
self.assertRaises(InvalidQueryError, wrong_query_objs_filter)
|
||||||
|
|
||||||
|
def test_q_regex(self):
|
||||||
|
"""Ensure that Q objects can be queried using regexes.
|
||||||
|
"""
|
||||||
|
person = self.Person(name='Guido van Rossum')
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
import re
|
||||||
|
obj = self.Person.objects(Q(name=re.compile('^Gui'))).first()
|
||||||
|
self.assertEqual(obj, person)
|
||||||
|
obj = self.Person.objects(Q(name=re.compile('^gui'))).first()
|
||||||
|
self.assertEqual(obj, None)
|
||||||
|
|
||||||
|
obj = self.Person.objects(Q(name=re.compile('^gui', re.I))).first()
|
||||||
|
self.assertEqual(obj, person)
|
||||||
|
|
||||||
|
obj = self.Person.objects(Q(name__not=re.compile('^bob'))).first()
|
||||||
|
self.assertEqual(obj, person)
|
||||||
|
|
||||||
|
obj = self.Person.objects(Q(name__not=re.compile('^Gui'))).first()
|
||||||
|
self.assertEqual(obj, None)
|
||||||
|
|
||||||
|
def test_q_lists(self):
|
||||||
|
"""Ensure that Q objects query ListFields correctly.
|
||||||
|
"""
|
||||||
|
class BlogPost(Document):
|
||||||
|
tags = ListField(StringField())
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
BlogPost(tags=['python', 'mongo']).save()
|
||||||
|
BlogPost(tags=['python']).save()
|
||||||
|
|
||||||
|
self.assertEqual(BlogPost.objects(Q(tags='mongo')).count(), 1)
|
||||||
|
self.assertEqual(BlogPost.objects(Q(tags='python')).count(), 2)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_q_merge_queries_edge_case(self):
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
email = EmailField(required=False)
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
pk = ObjectId()
|
||||||
|
User(email='example@example.com', pk=pk).save()
|
||||||
|
|
||||||
|
self.assertEqual(1, User.objects.filter(
|
||||||
|
Q(email='example@example.com') |
|
||||||
|
Q(name='John Doe')
|
||||||
|
).limit(2).filter(pk=pk).count())
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
99
tests/test_connection.py
Normal file
99
tests/test_connection.py
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
import unittest
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
import pymongo
|
||||||
|
from bson.tz_util import utc
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
import mongoengine.connection
|
||||||
|
from mongoengine.connection import get_db, get_connection, ConnectionError
|
||||||
|
|
||||||
|
|
||||||
|
class ConnectionTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
mongoengine.connection._connection_settings = {}
|
||||||
|
mongoengine.connection._connections = {}
|
||||||
|
mongoengine.connection._dbs = {}
|
||||||
|
|
||||||
|
def test_connect(self):
|
||||||
|
"""Ensure that the connect() method works properly.
|
||||||
|
"""
|
||||||
|
connect('mongoenginetest')
|
||||||
|
|
||||||
|
conn = get_connection()
|
||||||
|
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||||
|
|
||||||
|
db = get_db()
|
||||||
|
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||||
|
self.assertEqual(db.name, 'mongoenginetest')
|
||||||
|
|
||||||
|
connect('mongoenginetest2', alias='testdb')
|
||||||
|
conn = get_connection('testdb')
|
||||||
|
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||||
|
|
||||||
|
def test_connect_uri(self):
|
||||||
|
"""Ensure that the connect() method works properly with uri's
|
||||||
|
"""
|
||||||
|
c = connect(db='mongoenginetest', alias='admin')
|
||||||
|
c.admin.system.users.remove({})
|
||||||
|
c.mongoenginetest.system.users.remove({})
|
||||||
|
|
||||||
|
c.admin.add_user("admin", "password")
|
||||||
|
c.admin.authenticate("admin", "password")
|
||||||
|
c.mongoenginetest.add_user("username", "password")
|
||||||
|
|
||||||
|
self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost')
|
||||||
|
|
||||||
|
connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest')
|
||||||
|
|
||||||
|
conn = get_connection()
|
||||||
|
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||||
|
|
||||||
|
db = get_db()
|
||||||
|
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||||
|
self.assertEqual(db.name, 'mongoenginetest')
|
||||||
|
|
||||||
|
def test_register_connection(self):
|
||||||
|
"""Ensure that connections with different aliases may be registered.
|
||||||
|
"""
|
||||||
|
register_connection('testdb', 'mongoenginetest2')
|
||||||
|
|
||||||
|
self.assertRaises(ConnectionError, get_connection)
|
||||||
|
conn = get_connection('testdb')
|
||||||
|
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||||
|
|
||||||
|
db = get_db('testdb')
|
||||||
|
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||||
|
self.assertEqual(db.name, 'mongoenginetest2')
|
||||||
|
|
||||||
|
def test_connection_kwargs(self):
|
||||||
|
"""Ensure that connection kwargs get passed to pymongo.
|
||||||
|
"""
|
||||||
|
connect('mongoenginetest', alias='t1', tz_aware=True)
|
||||||
|
conn = get_connection('t1')
|
||||||
|
|
||||||
|
self.assertTrue(conn.tz_aware)
|
||||||
|
|
||||||
|
connect('mongoenginetest2', alias='t2')
|
||||||
|
conn = get_connection('t2')
|
||||||
|
self.assertFalse(conn.tz_aware)
|
||||||
|
|
||||||
|
def test_datetime(self):
|
||||||
|
connect('mongoenginetest', tz_aware=True)
|
||||||
|
d = datetime.datetime(2010, 5, 5, tzinfo=utc)
|
||||||
|
|
||||||
|
class DateDoc(Document):
|
||||||
|
the_date = DateTimeField(required=True)
|
||||||
|
|
||||||
|
DateDoc.drop_collection()
|
||||||
|
DateDoc(the_date=d).save()
|
||||||
|
|
||||||
|
date_doc = DateDoc.objects.first()
|
||||||
|
self.assertEqual(d, date_doc.the_date)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
204
tests/test_context_managers.py
Normal file
204
tests/test_context_managers.py
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
from mongoengine.context_managers import (switch_db, switch_collection,
|
||||||
|
no_sub_classes, no_dereference,
|
||||||
|
query_counter)
|
||||||
|
|
||||||
|
|
||||||
|
class ContextManagersTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_switch_db_context_manager(self):
|
||||||
|
connect('mongoenginetest')
|
||||||
|
register_connection('testdb-1', 'mongoenginetest2')
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
Group(name="hello - default").save()
|
||||||
|
self.assertEqual(1, Group.objects.count())
|
||||||
|
|
||||||
|
with switch_db(Group, 'testdb-1') as Group:
|
||||||
|
|
||||||
|
self.assertEqual(0, Group.objects.count())
|
||||||
|
|
||||||
|
Group(name="hello").save()
|
||||||
|
|
||||||
|
self.assertEqual(1, Group.objects.count())
|
||||||
|
|
||||||
|
Group.drop_collection()
|
||||||
|
self.assertEqual(0, Group.objects.count())
|
||||||
|
|
||||||
|
self.assertEqual(1, Group.objects.count())
|
||||||
|
|
||||||
|
def test_switch_collection_context_manager(self):
|
||||||
|
connect('mongoenginetest')
|
||||||
|
register_connection('testdb-1', 'mongoenginetest2')
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Group.drop_collection()
|
||||||
|
with switch_collection(Group, 'group1') as Group:
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
Group(name="hello - group").save()
|
||||||
|
self.assertEqual(1, Group.objects.count())
|
||||||
|
|
||||||
|
with switch_collection(Group, 'group1') as Group:
|
||||||
|
|
||||||
|
self.assertEqual(0, Group.objects.count())
|
||||||
|
|
||||||
|
Group(name="hello - group1").save()
|
||||||
|
|
||||||
|
self.assertEqual(1, Group.objects.count())
|
||||||
|
|
||||||
|
Group.drop_collection()
|
||||||
|
self.assertEqual(0, Group.objects.count())
|
||||||
|
|
||||||
|
self.assertEqual(1, Group.objects.count())
|
||||||
|
|
||||||
|
def test_no_dereference_context_manager_object_id(self):
|
||||||
|
"""Ensure that DBRef items in ListFields aren't dereferenced.
|
||||||
|
"""
|
||||||
|
connect('mongoenginetest')
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
ref = ReferenceField(User, dbref=False)
|
||||||
|
generic = GenericReferenceField()
|
||||||
|
members = ListField(ReferenceField(User, dbref=False))
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
for i in xrange(1, 51):
|
||||||
|
User(name='user %s' % i).save()
|
||||||
|
|
||||||
|
user = User.objects.first()
|
||||||
|
Group(ref=user, members=User.objects, generic=user).save()
|
||||||
|
|
||||||
|
with no_dereference(Group) as NoDeRefGroup:
|
||||||
|
self.assertTrue(Group._fields['members']._auto_dereference)
|
||||||
|
self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference)
|
||||||
|
|
||||||
|
with no_dereference(Group) as Group:
|
||||||
|
group = Group.objects.first()
|
||||||
|
self.assertTrue(all([not isinstance(m, User)
|
||||||
|
for m in group.members]))
|
||||||
|
self.assertFalse(isinstance(group.ref, User))
|
||||||
|
self.assertFalse(isinstance(group.generic, User))
|
||||||
|
|
||||||
|
self.assertTrue(all([isinstance(m, User)
|
||||||
|
for m in group.members]))
|
||||||
|
self.assertTrue(isinstance(group.ref, User))
|
||||||
|
self.assertTrue(isinstance(group.generic, User))
|
||||||
|
|
||||||
|
def test_no_dereference_context_manager_dbref(self):
|
||||||
|
"""Ensure that DBRef items in ListFields aren't dereferenced.
|
||||||
|
"""
|
||||||
|
connect('mongoenginetest')
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
ref = ReferenceField(User, dbref=True)
|
||||||
|
generic = GenericReferenceField()
|
||||||
|
members = ListField(ReferenceField(User, dbref=True))
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
for i in xrange(1, 51):
|
||||||
|
User(name='user %s' % i).save()
|
||||||
|
|
||||||
|
user = User.objects.first()
|
||||||
|
Group(ref=user, members=User.objects, generic=user).save()
|
||||||
|
|
||||||
|
with no_dereference(Group) as NoDeRefGroup:
|
||||||
|
self.assertTrue(Group._fields['members']._auto_dereference)
|
||||||
|
self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference)
|
||||||
|
|
||||||
|
with no_dereference(Group) as Group:
|
||||||
|
group = Group.objects.first()
|
||||||
|
self.assertTrue(all([not isinstance(m, User)
|
||||||
|
for m in group.members]))
|
||||||
|
self.assertFalse(isinstance(group.ref, User))
|
||||||
|
self.assertFalse(isinstance(group.generic, User))
|
||||||
|
|
||||||
|
self.assertTrue(all([isinstance(m, User)
|
||||||
|
for m in group.members]))
|
||||||
|
self.assertTrue(isinstance(group.ref, User))
|
||||||
|
self.assertTrue(isinstance(group.generic, User))
|
||||||
|
|
||||||
|
def test_no_sub_classes(self):
|
||||||
|
class A(Document):
|
||||||
|
x = IntField()
|
||||||
|
y = IntField()
|
||||||
|
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
class B(A):
|
||||||
|
z = IntField()
|
||||||
|
|
||||||
|
class C(B):
|
||||||
|
zz = IntField()
|
||||||
|
|
||||||
|
A.drop_collection()
|
||||||
|
|
||||||
|
A(x=10, y=20).save()
|
||||||
|
A(x=15, y=30).save()
|
||||||
|
B(x=20, y=40).save()
|
||||||
|
B(x=30, y=50).save()
|
||||||
|
C(x=40, y=60).save()
|
||||||
|
|
||||||
|
self.assertEqual(A.objects.count(), 5)
|
||||||
|
self.assertEqual(B.objects.count(), 3)
|
||||||
|
self.assertEqual(C.objects.count(), 1)
|
||||||
|
|
||||||
|
with no_sub_classes(A) as A:
|
||||||
|
self.assertEqual(A.objects.count(), 2)
|
||||||
|
|
||||||
|
for obj in A.objects:
|
||||||
|
self.assertEqual(obj.__class__, A)
|
||||||
|
|
||||||
|
with no_sub_classes(B) as B:
|
||||||
|
self.assertEqual(B.objects.count(), 2)
|
||||||
|
|
||||||
|
for obj in B.objects:
|
||||||
|
self.assertEqual(obj.__class__, B)
|
||||||
|
|
||||||
|
with no_sub_classes(C) as C:
|
||||||
|
self.assertEqual(C.objects.count(), 1)
|
||||||
|
|
||||||
|
for obj in C.objects:
|
||||||
|
self.assertEqual(obj.__class__, C)
|
||||||
|
|
||||||
|
# Confirm context manager exit correctly
|
||||||
|
self.assertEqual(A.objects.count(), 5)
|
||||||
|
self.assertEqual(B.objects.count(), 3)
|
||||||
|
self.assertEqual(C.objects.count(), 1)
|
||||||
|
|
||||||
|
def test_query_counter(self):
|
||||||
|
connect('mongoenginetest')
|
||||||
|
db = get_db()
|
||||||
|
db.test.find({})
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(0, q)
|
||||||
|
|
||||||
|
for i in xrange(1, 51):
|
||||||
|
db.test.find({}).count()
|
||||||
|
|
||||||
|
self.assertEqual(50, q)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
1181
tests/test_dereference.py
Normal file
1181
tests/test_dereference.py
Normal file
File diff suppressed because it is too large
Load Diff
301
tests/test_django.py
Normal file
301
tests/test_django.py
Normal file
@@ -0,0 +1,301 @@
|
|||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
import unittest
|
||||||
|
from nose.plugins.skip import SkipTest
|
||||||
|
from mongoengine.python_support import PY3
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
try:
|
||||||
|
from mongoengine.django.shortcuts import get_document_or_404
|
||||||
|
|
||||||
|
from django.http import Http404
|
||||||
|
from django.template import Context, Template
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.paginator import Paginator
|
||||||
|
|
||||||
|
settings.configure(
|
||||||
|
USE_TZ=True,
|
||||||
|
INSTALLED_APPS=('django.contrib.auth', 'mongoengine.django.mongo_auth'),
|
||||||
|
AUTH_USER_MODEL=('mongo_auth.MongoUser'),
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from django.contrib.auth import authenticate, get_user_model
|
||||||
|
from mongoengine.django.auth import User
|
||||||
|
from mongoengine.django.mongo_auth.models import MongoUser, MongoUserManager
|
||||||
|
DJ15 = True
|
||||||
|
except Exception:
|
||||||
|
DJ15 = False
|
||||||
|
from django.contrib.sessions.tests import SessionTestsMixin
|
||||||
|
from mongoengine.django.sessions import SessionStore, MongoSession
|
||||||
|
except Exception, err:
|
||||||
|
if PY3:
|
||||||
|
SessionTestsMixin = type # dummy value so no error
|
||||||
|
SessionStore = None # dummy value so no error
|
||||||
|
else:
|
||||||
|
raise err
|
||||||
|
|
||||||
|
|
||||||
|
from datetime import tzinfo, timedelta
|
||||||
|
ZERO = timedelta(0)
|
||||||
|
|
||||||
|
class FixedOffset(tzinfo):
|
||||||
|
"""Fixed offset in minutes east from UTC."""
|
||||||
|
|
||||||
|
def __init__(self, offset, name):
|
||||||
|
self.__offset = timedelta(minutes = offset)
|
||||||
|
self.__name = name
|
||||||
|
|
||||||
|
def utcoffset(self, dt):
|
||||||
|
return self.__offset
|
||||||
|
|
||||||
|
def tzname(self, dt):
|
||||||
|
return self.__name
|
||||||
|
|
||||||
|
def dst(self, dt):
|
||||||
|
return ZERO
|
||||||
|
|
||||||
|
|
||||||
|
def activate_timezone(tz):
|
||||||
|
"""Activate Django timezone support if it is available.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from django.utils import timezone
|
||||||
|
timezone.deactivate()
|
||||||
|
timezone.activate(tz)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class QuerySetTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('django does not have Python 3 support')
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def test_order_by_in_django_template(self):
|
||||||
|
"""Ensure that QuerySets are properly ordered in Django template.
|
||||||
|
"""
|
||||||
|
self.Person.drop_collection()
|
||||||
|
|
||||||
|
self.Person(name="A", age=20).save()
|
||||||
|
self.Person(name="D", age=10).save()
|
||||||
|
self.Person(name="B", age=40).save()
|
||||||
|
self.Person(name="C", age=30).save()
|
||||||
|
|
||||||
|
t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}")
|
||||||
|
|
||||||
|
d = {"ol": self.Person.objects.order_by('-name')}
|
||||||
|
self.assertEqual(t.render(Context(d)), u'D-10:C-30:B-40:A-20:')
|
||||||
|
d = {"ol": self.Person.objects.order_by('+name')}
|
||||||
|
self.assertEqual(t.render(Context(d)), u'A-20:B-40:C-30:D-10:')
|
||||||
|
d = {"ol": self.Person.objects.order_by('-age')}
|
||||||
|
self.assertEqual(t.render(Context(d)), u'B-40:C-30:A-20:D-10:')
|
||||||
|
d = {"ol": self.Person.objects.order_by('+age')}
|
||||||
|
self.assertEqual(t.render(Context(d)), u'D-10:A-20:C-30:B-40:')
|
||||||
|
|
||||||
|
self.Person.drop_collection()
|
||||||
|
|
||||||
|
def test_q_object_filter_in_template(self):
|
||||||
|
|
||||||
|
self.Person.drop_collection()
|
||||||
|
|
||||||
|
self.Person(name="A", age=20).save()
|
||||||
|
self.Person(name="D", age=10).save()
|
||||||
|
self.Person(name="B", age=40).save()
|
||||||
|
self.Person(name="C", age=30).save()
|
||||||
|
|
||||||
|
t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}")
|
||||||
|
|
||||||
|
d = {"ol": self.Person.objects.filter(Q(age=10) | Q(name="C"))}
|
||||||
|
self.assertEqual(t.render(Context(d)), 'D-10:C-30:')
|
||||||
|
|
||||||
|
# Check double rendering doesn't throw an error
|
||||||
|
self.assertEqual(t.render(Context(d)), 'D-10:C-30:')
|
||||||
|
|
||||||
|
def test_get_document_or_404(self):
|
||||||
|
p = self.Person(name="G404")
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234')
|
||||||
|
self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk))
|
||||||
|
|
||||||
|
def test_pagination(self):
|
||||||
|
"""Ensure that Pagination works as expected
|
||||||
|
"""
|
||||||
|
class Page(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Page.drop_collection()
|
||||||
|
|
||||||
|
for i in xrange(1, 11):
|
||||||
|
Page(name=str(i)).save()
|
||||||
|
|
||||||
|
paginator = Paginator(Page.objects.all(), 2)
|
||||||
|
|
||||||
|
t = Template("{% for i in page.object_list %}{{ i.name }}:{% endfor %}")
|
||||||
|
for p in paginator.page_range:
|
||||||
|
d = {"page": paginator.page(p)}
|
||||||
|
end = p * 2
|
||||||
|
start = end - 1
|
||||||
|
self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end))
|
||||||
|
|
||||||
|
def test_nested_queryset_template_iterator(self):
|
||||||
|
# Try iterating the same queryset twice, nested, in a Django template.
|
||||||
|
names = ['A', 'B', 'C', 'D']
|
||||||
|
|
||||||
|
class CustomUser(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
CustomUser.drop_collection()
|
||||||
|
|
||||||
|
for name in names:
|
||||||
|
CustomUser(name=name).save()
|
||||||
|
|
||||||
|
users = CustomUser.objects.all().order_by('name')
|
||||||
|
template = Template("{% for user in users %}{{ user.name }}{% ifequal forloop.counter 2 %} {% for inner_user in users %}{{ inner_user.name }}{% endfor %} {% endifequal %}{% endfor %}")
|
||||||
|
rendered = template.render(Context({'users': users}))
|
||||||
|
self.assertEqual(rendered, 'AB ABCD CD')
|
||||||
|
|
||||||
|
def test_filter(self):
|
||||||
|
"""Ensure that a queryset and filters work as expected
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Note(Document):
|
||||||
|
text = StringField()
|
||||||
|
|
||||||
|
for i in xrange(1, 101):
|
||||||
|
Note(name="Note: %s" % i).save()
|
||||||
|
|
||||||
|
# Check the count
|
||||||
|
self.assertEqual(Note.objects.count(), 100)
|
||||||
|
|
||||||
|
# Get the first 10 and confirm
|
||||||
|
notes = Note.objects[:10]
|
||||||
|
self.assertEqual(notes.count(), 10)
|
||||||
|
|
||||||
|
# Test djangos template filters
|
||||||
|
# self.assertEqual(length(notes), 10)
|
||||||
|
t = Template("{{ notes.count }}")
|
||||||
|
c = Context({"notes": notes})
|
||||||
|
self.assertEqual(t.render(c), "10")
|
||||||
|
|
||||||
|
# Test with skip
|
||||||
|
notes = Note.objects.skip(90)
|
||||||
|
self.assertEqual(notes.count(), 10)
|
||||||
|
|
||||||
|
# Test djangos template filters
|
||||||
|
self.assertEqual(notes.count(), 10)
|
||||||
|
t = Template("{{ notes.count }}")
|
||||||
|
c = Context({"notes": notes})
|
||||||
|
self.assertEqual(t.render(c), "10")
|
||||||
|
|
||||||
|
# Test with limit
|
||||||
|
notes = Note.objects.skip(90)
|
||||||
|
self.assertEqual(notes.count(), 10)
|
||||||
|
|
||||||
|
# Test djangos template filters
|
||||||
|
self.assertEqual(notes.count(), 10)
|
||||||
|
t = Template("{{ notes.count }}")
|
||||||
|
c = Context({"notes": notes})
|
||||||
|
self.assertEqual(t.render(c), "10")
|
||||||
|
|
||||||
|
# Test with skip and limit
|
||||||
|
notes = Note.objects.skip(10).limit(10)
|
||||||
|
|
||||||
|
# Test djangos template filters
|
||||||
|
self.assertEqual(notes.count(), 10)
|
||||||
|
t = Template("{{ notes.count }}")
|
||||||
|
c = Context({"notes": notes})
|
||||||
|
self.assertEqual(t.render(c), "10")
|
||||||
|
|
||||||
|
|
||||||
|
class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase):
|
||||||
|
backend = SessionStore
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('django does not have Python 3 support')
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
MongoSession.drop_collection()
|
||||||
|
super(MongoDBSessionTest, self).setUp()
|
||||||
|
|
||||||
|
def assertIn(self, first, second, msg=None):
|
||||||
|
self.assertTrue(first in second, msg)
|
||||||
|
|
||||||
|
def assertNotIn(self, first, second, msg=None):
|
||||||
|
self.assertFalse(first in second, msg)
|
||||||
|
|
||||||
|
def test_first_save(self):
|
||||||
|
session = SessionStore()
|
||||||
|
session['test'] = True
|
||||||
|
session.save()
|
||||||
|
self.assertTrue('test' in session)
|
||||||
|
|
||||||
|
def test_session_expiration_tz(self):
|
||||||
|
activate_timezone(FixedOffset(60, 'UTC+1'))
|
||||||
|
# create and save new session
|
||||||
|
session = SessionStore()
|
||||||
|
session.set_expiry(600) # expire in 600 seconds
|
||||||
|
session['test_expire'] = True
|
||||||
|
session.save()
|
||||||
|
# reload session with key
|
||||||
|
key = session.session_key
|
||||||
|
session = SessionStore(key)
|
||||||
|
self.assertTrue('test_expire' in session, 'Session has expired before it is expected')
|
||||||
|
|
||||||
|
|
||||||
|
class MongoAuthTest(unittest.TestCase):
|
||||||
|
user_data = {
|
||||||
|
'username': 'user',
|
||||||
|
'email': 'user@example.com',
|
||||||
|
'password': 'test',
|
||||||
|
}
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('django does not have Python 3 support')
|
||||||
|
if not DJ15:
|
||||||
|
raise SkipTest('mongo_auth requires Django 1.5')
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
User.drop_collection()
|
||||||
|
super(MongoAuthTest, self).setUp()
|
||||||
|
|
||||||
|
def test_user_model(self):
|
||||||
|
self.assertEqual(get_user_model(), MongoUser)
|
||||||
|
|
||||||
|
def test_user_manager(self):
|
||||||
|
manager = get_user_model()._default_manager
|
||||||
|
self.assertIsInstance(manager, MongoUserManager)
|
||||||
|
|
||||||
|
def test_user_manager_exception(self):
|
||||||
|
manager = get_user_model()._default_manager
|
||||||
|
self.assertRaises(MongoUser.DoesNotExist, manager.get,
|
||||||
|
username='not found')
|
||||||
|
|
||||||
|
def test_create_user(self):
|
||||||
|
manager = get_user_model()._default_manager
|
||||||
|
user = manager.create_user(**self.user_data)
|
||||||
|
self.assertIsInstance(user, User)
|
||||||
|
db_user = User.objects.get(username='user')
|
||||||
|
self.assertEqual(user.id, db_user.id)
|
||||||
|
|
||||||
|
def test_authenticate(self):
|
||||||
|
get_user_model()._default_manager.create_user(**self.user_data)
|
||||||
|
user = authenticate(username='user', password='fail')
|
||||||
|
self.assertIsNone(user)
|
||||||
|
user = authenticate(username='user', password='test')
|
||||||
|
db_user = User.objects.get(username='user')
|
||||||
|
self.assertEqual(user.id, db_user.id)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
47
tests/test_jinja.py
Normal file
47
tests/test_jinja.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
import jinja2
|
||||||
|
|
||||||
|
|
||||||
|
class TemplateFilterTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
|
def test_jinja2(self):
|
||||||
|
env = jinja2.Environment()
|
||||||
|
|
||||||
|
class TestData(Document):
|
||||||
|
title = StringField()
|
||||||
|
description = StringField()
|
||||||
|
|
||||||
|
TestData.drop_collection()
|
||||||
|
|
||||||
|
examples = [('A', '1'),
|
||||||
|
('B', '2'),
|
||||||
|
('C', '3')]
|
||||||
|
|
||||||
|
for title, description in examples:
|
||||||
|
TestData(title=title, description=description).save()
|
||||||
|
|
||||||
|
tmpl = """
|
||||||
|
{%- for record in content -%}
|
||||||
|
{%- if loop.first -%}{ {%- endif -%}
|
||||||
|
"{{ record.title }}": "{{ record.description }}"
|
||||||
|
{%- if loop.last -%} }{%- else -%},{% endif -%}
|
||||||
|
{%- endfor -%}
|
||||||
|
"""
|
||||||
|
ctx = {'content': TestData.objects}
|
||||||
|
template = env.from_string(tmpl)
|
||||||
|
rendered = template.render(**ctx)
|
||||||
|
|
||||||
|
self.assertEqual('{"A": "1","B": "2","C": "3"}', rendered)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user