Compare commits
495 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f0eaec98c7 | ||
|
|
6dcd7006d0 | ||
|
|
5de4812477 | ||
|
|
d5b28356bc | ||
|
|
76fddd0db0 | ||
|
|
1108586303 | ||
|
|
3f49923298 | ||
|
|
c277be8b6b | ||
|
|
6e083fa6a1 | ||
|
|
073091a06e | ||
|
|
03bfd01862 | ||
|
|
539f01d08e | ||
|
|
dcf3c86dce | ||
|
|
ec639cd6e9 | ||
|
|
420376d036 | ||
|
|
51e50bf0a9 | ||
|
|
c2d77f51bb | ||
|
|
b4d87d9128 | ||
|
|
4401a309ee | ||
|
|
b562e209d1 | ||
|
|
3a85422e8f | ||
|
|
e45397c975 | ||
|
|
1f9ec0c888 | ||
|
|
f8ee470e70 | ||
|
|
d02de0798f | ||
|
|
6fe074fb13 | ||
|
|
4db339c5f4 | ||
|
|
a525764359 | ||
|
|
f970d5878a | ||
|
|
cc0a2cbc6f | ||
|
|
add0b463f5 | ||
|
|
d80b1a7749 | ||
|
|
6186691259 | ||
|
|
b451cc567d | ||
|
|
757ff31661 | ||
|
|
97a98f0045 | ||
|
|
8f05896bc9 | ||
|
|
da7a8939df | ||
|
|
b6977a88ea | ||
|
|
eafbc7f20d | ||
|
|
c9a5710554 | ||
|
|
f10e946896 | ||
|
|
2f19b22bb2 | ||
|
|
d134e11c6d | ||
|
|
63edd16a92 | ||
|
|
37740dc010 | ||
|
|
04b85ddbf2 | ||
|
|
836dc96f67 | ||
|
|
49a7542b14 | ||
|
|
a84ffce5a0 | ||
|
|
210b3e5192 | ||
|
|
5f1d5ea056 | ||
|
|
19a7372ff9 | ||
|
|
cc5b60b004 | ||
|
|
b06f9dbf8d | ||
|
|
d9b8ee7895 | ||
|
|
e9ff655b0e | ||
|
|
d58341d7ae | ||
|
|
669d21a114 | ||
|
|
7e980a16d0 | ||
|
|
47df8deb58 | ||
|
|
dd006a502e | ||
|
|
07d3e52e6a | ||
|
|
fc1ce6d39b | ||
|
|
32d5c0c946 | ||
|
|
dfabfce01b | ||
|
|
74f3f4eb15 | ||
|
|
20cb0285f0 | ||
|
|
faf840f924 | ||
|
|
165bea5bb9 | ||
|
|
f7515cfca8 | ||
|
|
a762a10dec | ||
|
|
a192029901 | ||
|
|
67182713d9 | ||
|
|
e9464e32db | ||
|
|
2d6ae16912 | ||
|
|
f9cd8b1841 | ||
|
|
41a698b442 | ||
|
|
9f58bc9207 | ||
|
|
d36f6e7f24 | ||
|
|
eeb672feb9 | ||
|
|
063a162ce0 | ||
|
|
3e4a900279 | ||
|
|
43327ea4e1 | ||
|
|
0d2e84b16b | ||
|
|
3c78757778 | ||
|
|
d0245bb5ba | ||
|
|
3477b0107a | ||
|
|
8df9ff90cb | ||
|
|
d6b4ca7a98 | ||
|
|
2e18199eb2 | ||
|
|
025e17701b | ||
|
|
156ca44a13 | ||
|
|
39dac7d4db | ||
|
|
9ca632d518 | ||
|
|
4177fc6df2 | ||
|
|
d90890c08e | ||
|
|
1ca098c402 | ||
|
|
3208a7f15d | ||
|
|
8eda52e8e0 | ||
|
|
5b161b7445 | ||
|
|
8c1f8e54cd | ||
|
|
03d3c26a99 | ||
|
|
0cbd3663e4 | ||
|
|
f182daa85e | ||
|
|
de2f774e85 | ||
|
|
9d9a4afee9 | ||
|
|
0ea363c7fc | ||
|
|
d7ee47ee25 | ||
|
|
eb1b6e34c7 | ||
|
|
621b2b3f72 | ||
|
|
83da08ef7d | ||
|
|
9f551121fb | ||
|
|
ba48dfb4bf | ||
|
|
ed2ea24b75 | ||
|
|
eefbd3f597 | ||
|
|
e38bf63be0 | ||
|
|
e7ba5eb160 | ||
|
|
fff27f9b87 | ||
|
|
d58f594c17 | ||
|
|
9797d7a7fb | ||
|
|
c8b65317ef | ||
|
|
3a6dc77d36 | ||
|
|
4f70c27b56 | ||
|
|
ea46edf50a | ||
|
|
e5e88d792e | ||
|
|
6d68ad735c | ||
|
|
c44b98a7e1 | ||
|
|
445f9453c4 | ||
|
|
3364e040c8 | ||
|
|
692f00864d | ||
|
|
344dc64df8 | ||
|
|
473425a36a | ||
|
|
3ba58ebaae | ||
|
|
2c7b12c022 | ||
|
|
17eeeb7536 | ||
|
|
de5fbfde2c | ||
|
|
f5d02e1b10 | ||
|
|
e508625935 | ||
|
|
0b177ec4c1 | ||
|
|
87c965edd3 | ||
|
|
72dd9daa23 | ||
|
|
a68529fba8 | ||
|
|
06681a453f | ||
|
|
5907dde4a8 | ||
|
|
8e038dd563 | ||
|
|
50905ab459 | ||
|
|
7bb9c7d47f | ||
|
|
5c45eee817 | ||
|
|
0f9e4ef352 | ||
|
|
85173d188b | ||
|
|
d9ed33d1b1 | ||
|
|
e6ac8cab53 | ||
|
|
f890ebd0f4 | ||
|
|
e537369d98 | ||
|
|
9bbd8dbe62 | ||
|
|
09a5f5c8f3 | ||
|
|
b9e0f52526 | ||
|
|
1cdf71b647 | ||
|
|
3aff461039 | ||
|
|
bf74d7537c | ||
|
|
0c2fb6807e | ||
|
|
b9c9d127a2 | ||
|
|
286beca6c5 | ||
|
|
3a1521a34e | ||
|
|
c5b047d0cd | ||
|
|
485b811bd0 | ||
|
|
f335591045 | ||
|
|
1c10f3020b | ||
|
|
3074dad293 | ||
|
|
42f506adc6 | ||
|
|
50b755db0c | ||
|
|
420c3e0073 | ||
|
|
4a57fc33e4 | ||
|
|
25cdf16cc0 | ||
|
|
7f732459a1 | ||
|
|
9cc02d4dbe | ||
|
|
c528ac09d6 | ||
|
|
1a131ff120 | ||
|
|
accdd82970 | ||
|
|
3e8f02c64b | ||
|
|
3425264077 | ||
|
|
148f8b8a3a | ||
|
|
74343841e4 | ||
|
|
3b3738b36b | ||
|
|
b15c3f6a3f | ||
|
|
2459f9b0aa | ||
|
|
6ff1bd9b3c | ||
|
|
1bc2d2ec37 | ||
|
|
d7fd6a4628 | ||
|
|
9236f365fa | ||
|
|
90d22c2a28 | ||
|
|
c9f6e6b62a | ||
|
|
260d9377f5 | ||
|
|
22d1ce6319 | ||
|
|
6997e02476 | ||
|
|
155d79ff4d | ||
|
|
452cd125fa | ||
|
|
e62c35b040 | ||
|
|
d5ec3c6a31 | ||
|
|
ad983dc279 | ||
|
|
bb15bf8d13 | ||
|
|
94adc207ad | ||
|
|
376d1c97ab | ||
|
|
4fe87b40da | ||
|
|
b10d76cf4b | ||
|
|
3bdc9a2f09 | ||
|
|
9d52e18659 | ||
|
|
f6f7c12f0e | ||
|
|
219b28c97b | ||
|
|
3598fe0fb4 | ||
|
|
f9dd051ec9 | ||
|
|
68e4a27aaf | ||
|
|
b849c719a8 | ||
|
|
59e7617e82 | ||
|
|
b5e868655e | ||
|
|
027b3d36de | ||
|
|
653c4259ee | ||
|
|
9f5ab8149f | ||
|
|
66c6d14f7a | ||
|
|
2c0fc142a3 | ||
|
|
003454573c | ||
|
|
aa5a9ff1f4 | ||
|
|
28ef54986d | ||
|
|
0da2dfd191 | ||
|
|
787fc1cd8b | ||
|
|
dfdc0d92c3 | ||
|
|
f265915aa2 | ||
|
|
4228d06934 | ||
|
|
1a93b9b226 | ||
|
|
363e50abbe | ||
|
|
b8d53a6f0d | ||
|
|
4b45c0cd14 | ||
|
|
e7c0da38c2 | ||
|
|
8706fbe461 | ||
|
|
9ca96e4e17 | ||
|
|
99fe1da345 | ||
|
|
1986e82783 | ||
|
|
7073b9d395 | ||
|
|
f2049e9c18 | ||
|
|
f0f1308465 | ||
|
|
7d90aa76ff | ||
|
|
3cc2c617fd | ||
|
|
c31488add9 | ||
|
|
3d5b6ae332 | ||
|
|
59826c8cfd | ||
|
|
6f29d12386 | ||
|
|
0a89899ad0 | ||
|
|
e4af0e361a | ||
|
|
31ec7907b5 | ||
|
|
12f3f8c694 | ||
|
|
79098e997e | ||
|
|
dc1849bad5 | ||
|
|
e2d826c412 | ||
|
|
e6d796832e | ||
|
|
6f0a6df4f6 | ||
|
|
7a877a00d5 | ||
|
|
e8604d100e | ||
|
|
1647441ce8 | ||
|
|
9f8d6b3a00 | ||
|
|
0bfc96e459 | ||
|
|
3425574ddc | ||
|
|
4b2ad25405 | ||
|
|
3ce163b1a0 | ||
|
|
7c1ee28f13 | ||
|
|
2645e43da1 | ||
|
|
59bfe551a3 | ||
|
|
6a31736644 | ||
|
|
e2c78047b1 | ||
|
|
6a4351e44f | ||
|
|
adb60ef1ac | ||
|
|
3090adac04 | ||
|
|
b9253d86cc | ||
|
|
ab4d4e6230 | ||
|
|
7cd38c56c6 | ||
|
|
864053615b | ||
|
|
db2366f112 | ||
|
|
4defc82192 | ||
|
|
5949970a95 | ||
|
|
0ea4abda81 | ||
|
|
5c6035d636 | ||
|
|
a2183e3dcc | ||
|
|
99637151b5 | ||
|
|
a8e787c120 | ||
|
|
53339c7c72 | ||
|
|
3534bf7d70 | ||
|
|
1cf3989664 | ||
|
|
fd296918da | ||
|
|
8ad1f03dc5 | ||
|
|
fe7e17dbd5 | ||
|
|
d582394a42 | ||
|
|
02ef0df019 | ||
|
|
0dfd6aa518 | ||
|
|
0b23bc9cf2 | ||
|
|
f108c4288e | ||
|
|
9b9696aefd | ||
|
|
576e198ece | ||
|
|
52f85aab18 | ||
|
|
ab60fd0490 | ||
|
|
d79ae30f31 | ||
|
|
f27debe7f9 | ||
|
|
735e043ff6 | ||
|
|
6e7f2b73cf | ||
|
|
d645ce9745 | ||
|
|
7c08c140da | ||
|
|
81d402dc17 | ||
|
|
966fa12358 | ||
|
|
87792e1921 | ||
|
|
4c8296acc6 | ||
|
|
9989da07ed | ||
|
|
1c5e6a3425 | ||
|
|
eedf908770 | ||
|
|
5c9ef41403 | ||
|
|
0bf2ad5b67 | ||
|
|
a0e3f382cd | ||
|
|
f09c39b5d7 | ||
|
|
89c67bf259 | ||
|
|
ea666d4607 | ||
|
|
b8af154439 | ||
|
|
f594ece32a | ||
|
|
03beb6852a | ||
|
|
ab9e9a3329 | ||
|
|
a4b09344af | ||
|
|
8cb8aa392c | ||
|
|
3255519792 | ||
|
|
7e64bb2503 | ||
|
|
86a78402c3 | ||
|
|
ba276452fb | ||
|
|
4ffa8d0124 | ||
|
|
4bc5082681 | ||
|
|
0e3c34e1da | ||
|
|
658b3784ae | ||
|
|
0526f577ff | ||
|
|
bb1b9bc1d3 | ||
|
|
b1eeb77ddc | ||
|
|
999d4a7676 | ||
|
|
1b80193aac | ||
|
|
be8d39a48c | ||
|
|
a2f3d70f28 | ||
|
|
676a7bf712 | ||
|
|
e990a6c70c | ||
|
|
90fa0f6c4a | ||
|
|
22010d7d95 | ||
|
|
66279bd90f | ||
|
|
19da228855 | ||
|
|
9e67941bad | ||
|
|
0454fc74e9 | ||
|
|
2f6b1c7611 | ||
|
|
f00bed6058 | ||
|
|
529c522594 | ||
|
|
2bb9493fcf | ||
|
|
839ed8a64a | ||
|
|
500eb920e4 | ||
|
|
017a31ffd0 | ||
|
|
83b961c84d | ||
|
|
fa07423ca5 | ||
|
|
dd4af2df81 | ||
|
|
44bd8cb85b | ||
|
|
52d80ac23c | ||
|
|
43a5d73e14 | ||
|
|
abc764951d | ||
|
|
9cc6164026 | ||
|
|
475488b9f2 | ||
|
|
95b1783834 | ||
|
|
12c8b5c0b9 | ||
|
|
f99b7a811b | ||
|
|
0575abab23 | ||
|
|
9eebcf7beb | ||
|
|
ed74477150 | ||
|
|
2801b38c75 | ||
|
|
dc3fea875e | ||
|
|
aab8c2b687 | ||
|
|
3577773af3 | ||
|
|
dd023edc0f | ||
|
|
8ac9e6dc19 | ||
|
|
f45d4d781d | ||
|
|
c95652d6a8 | ||
|
|
97b37f75d3 | ||
|
|
95dae48778 | ||
|
|
73635033bd | ||
|
|
c1619d2a62 | ||
|
|
b87ef982f6 | ||
|
|
91aa90ad4a | ||
|
|
4b3cea9e78 | ||
|
|
2420b5e937 | ||
|
|
f23a976bea | ||
|
|
4226cd08f1 | ||
|
|
7a230f1693 | ||
|
|
a43d0d4612 | ||
|
|
78a40a0c70 | ||
|
|
2c69d8f0b0 | ||
|
|
0018c38b83 | ||
|
|
8df81571fc | ||
|
|
d1add62a06 | ||
|
|
c419f3379a | ||
|
|
69d57209f7 | ||
|
|
7ca81d6fb8 | ||
|
|
8a046bfa5d | ||
|
|
3628a7653c | ||
|
|
48f988acd7 | ||
|
|
6526923345 | ||
|
|
24fd1acce6 | ||
|
|
cbb9235dc5 | ||
|
|
19ec2c9bc9 | ||
|
|
6459d4c0b6 | ||
|
|
1304f2721f | ||
|
|
8bde0c0e53 | ||
|
|
598ffd3e5c | ||
|
|
1a4533a9cf | ||
|
|
601f0eb168 | ||
|
|
3070e0bf5d | ||
|
|
83c11a9834 | ||
|
|
5c912b930e | ||
|
|
1b17fb0ae7 | ||
|
|
d83e67c121 | ||
|
|
ae39ed94c9 | ||
|
|
1e51180d42 | ||
|
|
87ba69d02e | ||
|
|
8879d5560b | ||
|
|
c1621ee39c | ||
|
|
b0aa98edb4 | ||
|
|
a7a2fe0216 | ||
|
|
8e50f5fa3c | ||
|
|
31793520bf | ||
|
|
0b6b0368c5 | ||
|
|
d1d30a9280 | ||
|
|
420c6f2d1e | ||
|
|
34f06c4971 | ||
|
|
9cc4bbd49d | ||
|
|
f66b312869 | ||
|
|
2405ba8708 | ||
|
|
a91b6bff8b | ||
|
|
450dc11a68 | ||
|
|
1ce2f84ce5 | ||
|
|
f55b241cfa | ||
|
|
34d08ce8ef | ||
|
|
4f5aa8c43b | ||
|
|
27b375060d | ||
|
|
cbfdc401f7 | ||
|
|
b58bf3e0ce | ||
|
|
1fff7e9aca | ||
|
|
494b981b13 | ||
|
|
dd93995bd0 | ||
|
|
b3bb4add9c | ||
|
|
d305e71c27 | ||
|
|
0d92baa670 | ||
|
|
7a1b110f62 | ||
|
|
db8df057ce | ||
|
|
5d8ffded40 | ||
|
|
07f3e5356d | ||
|
|
1ece62f960 | ||
|
|
056c604dc3 | ||
|
|
2d08eec093 | ||
|
|
614b590551 | ||
|
|
6d90ce250a | ||
|
|
ea31846a19 | ||
|
|
e6317776c1 | ||
|
|
efeaba39a4 | ||
|
|
1a97dfd479 | ||
|
|
9fecf2b303 | ||
|
|
3d0d2f48ad | ||
|
|
581605e0e2 | ||
|
|
45d3a7f6ff | ||
|
|
7ca2ea0766 | ||
|
|
89220c142b | ||
|
|
c73ce3d220 | ||
|
|
b0f127af4e | ||
|
|
766d54795f | ||
|
|
bd41c6eea4 | ||
|
|
2435786713 | ||
|
|
9e7ea64bd2 | ||
|
|
89a6eee6af | ||
|
|
2ec1476e50 | ||
|
|
2d9b581f34 | ||
|
|
5bb63f645b | ||
|
|
a856c7cc37 | ||
|
|
26db9d8a9d | ||
|
|
8060179f6d | ||
|
|
77ebd87fed | ||
|
|
e4bc92235d | ||
|
|
27a4d83ce8 | ||
|
|
ece9b902f8 | ||
|
|
65a2f8a68b | ||
|
|
9c212306b8 | ||
|
|
1fdc7ce6bb | ||
|
|
0b22c140c5 | ||
|
|
944aa45459 | ||
|
|
c9842ba13a | ||
|
|
8840680303 | ||
|
|
376b9b1316 | ||
|
|
54bb1cb3d9 | ||
|
|
43468b474e | ||
|
|
28a957c684 | ||
|
|
ec5ddbf391 | ||
|
|
bab186e195 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -13,4 +13,5 @@ env/
|
|||||||
.settings
|
.settings
|
||||||
.project
|
.project
|
||||||
.pydevproject
|
.pydevproject
|
||||||
tests/bugfix.py
|
tests/test_bugfix.py
|
||||||
|
htmlcov/
|
||||||
26
.travis.yml
Normal file
26
.travis.yml
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
# http://travis-ci.org/#!/MongoEngine/mongoengine
|
||||||
|
language: python
|
||||||
|
services: mongodb
|
||||||
|
python:
|
||||||
|
- "2.6"
|
||||||
|
- "2.7"
|
||||||
|
- "3.2"
|
||||||
|
- "3.3"
|
||||||
|
env:
|
||||||
|
- PYMONGO=dev
|
||||||
|
- PYMONGO=2.5
|
||||||
|
- PYMONGO=2.4.2
|
||||||
|
install:
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi
|
||||||
|
- if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi
|
||||||
|
- if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi
|
||||||
|
- python setup.py install
|
||||||
|
script:
|
||||||
|
- python setup.py test
|
||||||
|
notifications:
|
||||||
|
irc: "irc.freenode.org#mongoengine"
|
||||||
|
branches:
|
||||||
|
only:
|
||||||
|
- master
|
||||||
|
- "0.8"
|
||||||
53
AUTHORS
53
AUTHORS
@@ -8,6 +8,7 @@ Florian Schlachter <flori@n-schlachter.de>
|
|||||||
Steve Challis <steve@stevechallis.com>
|
Steve Challis <steve@stevechallis.com>
|
||||||
Wilson Júnior <wilsonpjunior@gmail.com>
|
Wilson Júnior <wilsonpjunior@gmail.com>
|
||||||
Dan Crosta https://github.com/dcrosta
|
Dan Crosta https://github.com/dcrosta
|
||||||
|
Laine Herron https://github.com/LaineHerron
|
||||||
|
|
||||||
CONTRIBUTORS
|
CONTRIBUTORS
|
||||||
|
|
||||||
@@ -105,3 +106,55 @@ that much better:
|
|||||||
* Adam Reeve
|
* Adam Reeve
|
||||||
* Anthony Nemitz
|
* Anthony Nemitz
|
||||||
* deignacio
|
* deignacio
|
||||||
|
* Shaun Duncan
|
||||||
|
* Meir Kriheli
|
||||||
|
* Andrey Fedoseev
|
||||||
|
* aparajita
|
||||||
|
* Tristan Escalada
|
||||||
|
* Alexander Koshelev
|
||||||
|
* Jaime Irurzun
|
||||||
|
* Alexandre González
|
||||||
|
* Thomas Steinacher
|
||||||
|
* Tommi Komulainen
|
||||||
|
* Peter Landry
|
||||||
|
* biszkoptwielki
|
||||||
|
* Anton Kolechkin
|
||||||
|
* Sergey Nikitin
|
||||||
|
* psychogenic
|
||||||
|
* Stefan Wójcik
|
||||||
|
* dimonb
|
||||||
|
* Garry Polley
|
||||||
|
* James Slagle
|
||||||
|
* Adrian Scott
|
||||||
|
* Peter Teichman
|
||||||
|
* Jakub Kot
|
||||||
|
* Jorge Bastida
|
||||||
|
* Aleksandr Sorokoumov
|
||||||
|
* Yohan Graterol
|
||||||
|
* bool-dev
|
||||||
|
* Russ Weeks
|
||||||
|
* Paul Swartz
|
||||||
|
* Sundar Raman
|
||||||
|
* Benoit Louy
|
||||||
|
* lraucy
|
||||||
|
* hellysmile
|
||||||
|
* Jaepil Jeong
|
||||||
|
* Daniil Sharou
|
||||||
|
* Stefan Wójcik
|
||||||
|
* Pete Campton
|
||||||
|
* Martyn Smith
|
||||||
|
* Marcelo Anton
|
||||||
|
* Aleksey Porfirov
|
||||||
|
* Nicolas Trippar
|
||||||
|
* Manuel Hermann
|
||||||
|
* Gustavo Gawryszewski
|
||||||
|
* Max Countryman
|
||||||
|
* caitifbrito
|
||||||
|
* lcya86 刘春洋
|
||||||
|
* Martin Alderete (https://github.com/malderete)
|
||||||
|
* Nick Joyce
|
||||||
|
* Jared Forsyth
|
||||||
|
* Kenneth Falck
|
||||||
|
* Lukasz Balcerzak
|
||||||
|
* Nicolas Cortot
|
||||||
|
|
||||||
|
|||||||
61
CONTRIBUTING.rst
Normal file
61
CONTRIBUTING.rst
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
Contributing to MongoEngine
|
||||||
|
===========================
|
||||||
|
|
||||||
|
MongoEngine has a large `community
|
||||||
|
<https://raw.github.com/MongoEngine/mongoengine/master/AUTHORS>`_ and
|
||||||
|
contributions are always encouraged. Contributions can be as simple as
|
||||||
|
minor tweaks to the documentation. Please read these guidelines before
|
||||||
|
sending a pull request.
|
||||||
|
|
||||||
|
Bugfixes and New Features
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
Before starting to write code, look for existing `tickets
|
||||||
|
<https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one
|
||||||
|
<https://github.com/MongoEngine/mongoengine/issues>`_ for your specific
|
||||||
|
issue or feature request. That way you avoid working on something
|
||||||
|
that might not be of interest or that has already been addressed. If in doubt
|
||||||
|
post to the `user group <http://groups.google.com/group/mongoengine-users>`
|
||||||
|
|
||||||
|
Supported Interpreters
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
PyMongo supports CPython 2.5 and newer. Language
|
||||||
|
features not supported by all interpreters can not be used.
|
||||||
|
Please also ensure that your code is properly converted by
|
||||||
|
`2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support.
|
||||||
|
|
||||||
|
Style Guide
|
||||||
|
-----------
|
||||||
|
|
||||||
|
MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_
|
||||||
|
including 4 space indents and 79 character line limits.
|
||||||
|
|
||||||
|
Testing
|
||||||
|
-------
|
||||||
|
|
||||||
|
All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_
|
||||||
|
and any pull requests are automatically tested by Travis. Any pull requests
|
||||||
|
without tests will take longer to be integrated and might be refused.
|
||||||
|
|
||||||
|
General Guidelines
|
||||||
|
------------------
|
||||||
|
|
||||||
|
- Avoid backward breaking changes if at all possible.
|
||||||
|
- Write inline documentation for new classes and methods.
|
||||||
|
- Write tests and make sure they pass (make sure you have a mongod
|
||||||
|
running on the default port, then execute ``python setup.py test``
|
||||||
|
from the cmd line to run the test suite).
|
||||||
|
- Add yourself to AUTHORS.rst :)
|
||||||
|
|
||||||
|
Documentation
|
||||||
|
-------------
|
||||||
|
|
||||||
|
To contribute to the `API documentation
|
||||||
|
<http://docs.mongoengine.org/en/latest/apireference.html>`_
|
||||||
|
just make your changes to the inline documentation of the appropriate
|
||||||
|
`source code <https://github.com/MongoEngine/mongoengine>`_ or `rst file
|
||||||
|
<https://github.com/MongoEngine/mongoengine/tree/master/docs>`_ in a
|
||||||
|
branch and submit a `pull request <https://help.github.com/articles/using-pull-requests>`_.
|
||||||
|
You might also use the github `Edit <https://github.com/blog/844-forking-with-the-edit-button>`_
|
||||||
|
button.
|
||||||
2
LICENSE
2
LICENSE
@@ -1,4 +1,4 @@
|
|||||||
Copyright (c) 2009-2010 Harry Marr
|
Copyright (c) 2009 See AUTHORS
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person
|
Permission is hereby granted, free of charge, to any person
|
||||||
obtaining a copy of this software and associated documentation
|
obtaining a copy of this software and associated documentation
|
||||||
|
|||||||
19
README.rst
19
README.rst
@@ -2,15 +2,19 @@
|
|||||||
MongoEngine
|
MongoEngine
|
||||||
===========
|
===========
|
||||||
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
||||||
|
:Repository: https://github.com/MongoEngine/mongoengine
|
||||||
:Author: Harry Marr (http://github.com/hmarr)
|
:Author: Harry Marr (http://github.com/hmarr)
|
||||||
:Maintainer: Ross Lawley (http://github.com/rozza)
|
:Maintainer: Ross Lawley (http://github.com/rozza)
|
||||||
|
|
||||||
|
.. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master
|
||||||
|
:target: http://travis-ci.org/MongoEngine/mongoengine
|
||||||
|
|
||||||
About
|
About
|
||||||
=====
|
=====
|
||||||
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
||||||
Documentation available at http://mongoengine-odm.rtfd.org - there is currently
|
Documentation available at http://mongoengine-odm.rtfd.org - there is currently
|
||||||
a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide
|
a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide
|
||||||
<http://readthedocs.org/docs/mongoengine-odm/en/latest/userguide.html>`_ and an `API reference
|
<https://mongoengine-odm.readthedocs.org/en/latest/guide/index.html>`_ and an `API reference
|
||||||
<http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_.
|
<http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_.
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
@@ -59,11 +63,6 @@ Some simple examples of what MongoEngine code looks like::
|
|||||||
... print 'Link:', post.url
|
... print 'Link:', post.url
|
||||||
... print
|
... print
|
||||||
...
|
...
|
||||||
=== Using MongoEngine ===
|
|
||||||
See the tutorial
|
|
||||||
|
|
||||||
=== MongoEngine Docs ===
|
|
||||||
Link: hmarr.com/mongoengine
|
|
||||||
|
|
||||||
>>> len(BlogPost.objects)
|
>>> len(BlogPost.objects)
|
||||||
2
|
2
|
||||||
@@ -81,7 +80,7 @@ Some simple examples of what MongoEngine code looks like::
|
|||||||
Tests
|
Tests
|
||||||
=====
|
=====
|
||||||
To run the test suite, ensure you are running a local instance of MongoDB on
|
To run the test suite, ensure you are running a local instance of MongoDB on
|
||||||
the standard port, and run ``python setup.py test``.
|
the standard port, and run: ``python setup.py test``.
|
||||||
|
|
||||||
Community
|
Community
|
||||||
=========
|
=========
|
||||||
@@ -89,10 +88,8 @@ Community
|
|||||||
<http://groups.google.com/group/mongoengine-users>`_
|
<http://groups.google.com/group/mongoengine-users>`_
|
||||||
- `MongoEngine Developers mailing list
|
- `MongoEngine Developers mailing list
|
||||||
<http://groups.google.com/group/mongoengine-dev>`_
|
<http://groups.google.com/group/mongoengine-dev>`_
|
||||||
- `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_
|
- `#mongoengine IRC channel <http://webchat.freenode.net/?channels=mongoengine>`_
|
||||||
|
|
||||||
Contributing
|
Contributing
|
||||||
============
|
============
|
||||||
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to
|
We welcome contributions! see the`Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_
|
||||||
contribute to the project, fork it on GitHub and send a pull request, all
|
|
||||||
contributions and suggestions are welcome!
|
|
||||||
|
|||||||
49
benchmark.py
49
benchmark.py
@@ -28,47 +28,64 @@ def main():
|
|||||||
|
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - Pymongo
|
Creating 10000 dictionaries - Pymongo
|
||||||
1.1141769886
|
3.86744189262
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - MongoEngine
|
Creating 10000 dictionaries - MongoEngine
|
||||||
2.37724113464
|
6.23374891281
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||||
1.92479610443
|
5.33027005196
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||||
|
pass - No Cascade
|
||||||
|
|
||||||
0.5.X
|
0.5.X
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - Pymongo
|
Creating 10000 dictionaries - Pymongo
|
||||||
1.10552310944
|
3.89597702026
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - MongoEngine
|
Creating 10000 dictionaries - MongoEngine
|
||||||
16.5169169903
|
21.7735359669
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||||
14.9446101189
|
19.8670389652
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||||
14.912801981
|
pass - No Cascade
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, force=True
|
|
||||||
14.9617750645
|
|
||||||
|
|
||||||
Performance
|
0.6.X
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - Pymongo
|
Creating 10000 dictionaries - Pymongo
|
||||||
1.10072994232
|
3.81559205055
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - MongoEngine
|
Creating 10000 dictionaries - MongoEngine
|
||||||
5.27341103554
|
10.0446798801
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||||
4.49365401268
|
9.51354718208
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||||
4.43459296227
|
9.02567505836
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - MongoEngine, force=True
|
Creating 10000 dictionaries - MongoEngine, force=True
|
||||||
4.40114378929
|
8.44933390617
|
||||||
|
|
||||||
|
0.7.X
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - Pymongo
|
||||||
|
3.78801012039
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine
|
||||||
|
9.73050498962
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||||
|
8.33456707001
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||||
|
8.37778115273
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, force=True
|
||||||
|
8.36906409264
|
||||||
"""
|
"""
|
||||||
|
|
||||||
setup = """
|
setup = """
|
||||||
|
|||||||
@@ -34,6 +34,13 @@ Documents
|
|||||||
.. autoclass:: mongoengine.ValidationError
|
.. autoclass:: mongoengine.ValidationError
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
|
Context Managers
|
||||||
|
================
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.context_managers.switch_db
|
||||||
|
.. autoclass:: mongoengine.context_managers.no_dereference
|
||||||
|
.. autoclass:: mongoengine.context_managers.query_counter
|
||||||
|
|
||||||
Querying
|
Querying
|
||||||
========
|
========
|
||||||
|
|
||||||
@@ -47,25 +54,28 @@ Querying
|
|||||||
Fields
|
Fields
|
||||||
======
|
======
|
||||||
|
|
||||||
.. autoclass:: mongoengine.StringField
|
.. autoclass:: mongoengine.BinaryField
|
||||||
.. autoclass:: mongoengine.URLField
|
.. autoclass:: mongoengine.BooleanField
|
||||||
.. autoclass:: mongoengine.EmailField
|
|
||||||
.. autoclass:: mongoengine.IntField
|
|
||||||
.. autoclass:: mongoengine.FloatField
|
|
||||||
.. autoclass:: mongoengine.DecimalField
|
|
||||||
.. autoclass:: mongoengine.DateTimeField
|
|
||||||
.. autoclass:: mongoengine.ComplexDateTimeField
|
.. autoclass:: mongoengine.ComplexDateTimeField
|
||||||
.. autoclass:: mongoengine.ListField
|
.. autoclass:: mongoengine.DateTimeField
|
||||||
.. autoclass:: mongoengine.SortedListField
|
.. autoclass:: mongoengine.DecimalField
|
||||||
.. autoclass:: mongoengine.DictField
|
.. autoclass:: mongoengine.DictField
|
||||||
|
.. autoclass:: mongoengine.DynamicField
|
||||||
|
.. autoclass:: mongoengine.EmailField
|
||||||
|
.. autoclass:: mongoengine.EmbeddedDocumentField
|
||||||
|
.. autoclass:: mongoengine.FileField
|
||||||
|
.. autoclass:: mongoengine.FloatField
|
||||||
|
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
|
||||||
|
.. autoclass:: mongoengine.GenericReferenceField
|
||||||
|
.. autoclass:: mongoengine.GeoPointField
|
||||||
|
.. autoclass:: mongoengine.ImageField
|
||||||
|
.. autoclass:: mongoengine.IntField
|
||||||
|
.. autoclass:: mongoengine.ListField
|
||||||
.. autoclass:: mongoengine.MapField
|
.. autoclass:: mongoengine.MapField
|
||||||
.. autoclass:: mongoengine.ObjectIdField
|
.. autoclass:: mongoengine.ObjectIdField
|
||||||
.. autoclass:: mongoengine.ReferenceField
|
.. autoclass:: mongoengine.ReferenceField
|
||||||
.. autoclass:: mongoengine.GenericReferenceField
|
|
||||||
.. autoclass:: mongoengine.EmbeddedDocumentField
|
|
||||||
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
|
|
||||||
.. autoclass:: mongoengine.BooleanField
|
|
||||||
.. autoclass:: mongoengine.FileField
|
|
||||||
.. autoclass:: mongoengine.BinaryField
|
|
||||||
.. autoclass:: mongoengine.GeoPointField
|
|
||||||
.. autoclass:: mongoengine.SequenceField
|
.. autoclass:: mongoengine.SequenceField
|
||||||
|
.. autoclass:: mongoengine.SortedListField
|
||||||
|
.. autoclass:: mongoengine.StringField
|
||||||
|
.. autoclass:: mongoengine.URLField
|
||||||
|
.. autoclass:: mongoengine.UUIDField
|
||||||
|
|||||||
@@ -2,11 +2,228 @@
|
|||||||
Changelog
|
Changelog
|
||||||
=========
|
=========
|
||||||
|
|
||||||
|
Changes in 0.8.X
|
||||||
|
================
|
||||||
|
- Fixed db_alias and inherited Documents (#143)
|
||||||
|
- Documentation update for document errors (#124)
|
||||||
|
- Deprecated `get_or_create` (#35)
|
||||||
|
- Updated inheritable objects created by upsert now contain _cls (#118)
|
||||||
|
- Added support for creating documents with embedded documents in a single operation (#6)
|
||||||
|
- Added to_json and from_json to Document (#1)
|
||||||
|
- Added to_json and from_json to QuerySet (#131)
|
||||||
|
- Updated index creation now tied to Document class (#102)
|
||||||
|
- Added none() to queryset (#127)
|
||||||
|
- Updated SequenceFields to allow post processing of the calculated counter value (#141)
|
||||||
|
- Added clean method to documents for pre validation data cleaning (#60)
|
||||||
|
- Added support setting for read prefrence at a query level (#157)
|
||||||
|
- Added _instance to EmbeddedDocuments pointing to the parent (#139)
|
||||||
|
- Inheritance is off by default (#122)
|
||||||
|
- Remove _types and just use _cls for inheritance (#148)
|
||||||
|
- Only allow QNode instances to be passed as query objects (#199)
|
||||||
|
- Dynamic fields are now validated on save (#153) (#154)
|
||||||
|
- Added support for multiple slices and made slicing chainable. (#170) (#190) (#191)
|
||||||
|
- Fixed GridFSProxy __getattr__ behaviour (#196)
|
||||||
|
- Fix Django timezone support (#151)
|
||||||
|
- Simplified Q objects, removed QueryTreeTransformerVisitor (#98) (#171)
|
||||||
|
- FileFields now copyable (#198)
|
||||||
|
- Querysets now return clones and are no longer edit in place (#56)
|
||||||
|
- Added support for $maxDistance (#179)
|
||||||
|
- Uses getlasterror to test created on updated saves (#163)
|
||||||
|
- Fixed inheritance and unique index creation (#140)
|
||||||
|
- Fixed reverse delete rule with inheritance (#197)
|
||||||
|
- Fixed validation for GenericReferences which havent been dereferenced
|
||||||
|
- Added switch_db context manager (#106)
|
||||||
|
- Added switch_db method to document instances (#106)
|
||||||
|
- Added no_dereference context manager (#82) (#61)
|
||||||
|
- Added switch_collection context manager (#220)
|
||||||
|
- Added switch_collection method to document instances (#220)
|
||||||
|
- Added support for compound primary keys (#149) (#121)
|
||||||
|
- Fixed overriding objects with custom manager (#58)
|
||||||
|
- Added no_dereference method for querysets (#82) (#61)
|
||||||
|
- Undefined data should not override instance methods (#49)
|
||||||
|
- Added Django Group and Permission (#142)
|
||||||
|
- Added Doc class and pk to Validation messages (#69)
|
||||||
|
- Fixed Documents deleted via a queryset don't call any signals (#105)
|
||||||
|
- Added the "get_decoded" method to the MongoSession class (#216)
|
||||||
|
- Fixed invalid choices error bubbling (#214)
|
||||||
|
- Updated Save so it calls $set and $unset in a single operation (#211)
|
||||||
|
- Fixed inner queryset looping (#204)
|
||||||
|
|
||||||
|
Changes in 0.7.10
|
||||||
|
=================
|
||||||
|
- Fix UnicodeEncodeError for dbref (#278)
|
||||||
|
- Allow construction using positional parameters (#268)
|
||||||
|
- Updated EmailField length to support long domains (#243)
|
||||||
|
- Added 64-bit integer support (#251)
|
||||||
|
- Added Django sessions TTL support (#224)
|
||||||
|
- Fixed issue with numerical keys in MapField(EmbeddedDocumentField()) (#240)
|
||||||
|
- Fixed clearing _changed_fields for complex nested embedded documents (#237, #239, #242)
|
||||||
|
- Added "id" back to _data dictionary (#255)
|
||||||
|
- Only mark a field as changed if the value has changed (#258)
|
||||||
|
- Explicitly check for Document instances when dereferencing (#261)
|
||||||
|
- Fixed order_by chaining issue (#265)
|
||||||
|
- Added dereference support for tuples (#250)
|
||||||
|
- Resolve field name to db field name when using distinct(#260, #264, #269)
|
||||||
|
- Added kwargs to doc.save to help interop with django (#223, #270)
|
||||||
|
- Fixed cloning querysets in PY3
|
||||||
|
- Int fields no longer unset in save when changed to 0 (#272)
|
||||||
|
- Fixed ReferenceField query chaining bug fixed (#254)
|
||||||
|
|
||||||
|
Changes in 0.7.9
|
||||||
|
================
|
||||||
|
- Better fix handling for old style _types
|
||||||
|
- Embedded SequenceFields follow collection naming convention
|
||||||
|
|
||||||
|
Changes in 0.7.8
|
||||||
|
================
|
||||||
|
- Fix sequence fields in embedded documents (#166)
|
||||||
|
- Fix query chaining with .order_by() (#176)
|
||||||
|
- Added optional encoding and collection config for Django sessions (#180, #181, #183)
|
||||||
|
- Fixed EmailField so can add extra validation (#173, #174, #187)
|
||||||
|
- Fixed bulk inserts can now handle custom pk's (#192)
|
||||||
|
- Added as_pymongo method to return raw or cast results from pymongo (#193)
|
||||||
|
|
||||||
|
Changes in 0.7.7
|
||||||
|
================
|
||||||
|
- Fix handling for old style _types
|
||||||
|
|
||||||
|
Changes in 0.7.6
|
||||||
|
================
|
||||||
|
- Unicode fix for repr (#133)
|
||||||
|
- Allow updates with match operators (#144)
|
||||||
|
- Updated URLField - now can have a override the regex (#136)
|
||||||
|
- Allow Django AuthenticationBackends to work with Django user (hmarr/mongoengine#573)
|
||||||
|
- Fixed reload issue with ReferenceField where dbref=False (#138)
|
||||||
|
|
||||||
|
Changes in 0.7.5
|
||||||
|
================
|
||||||
|
- ReferenceFields with dbref=False use ObjectId instead of strings (#134)
|
||||||
|
See ticket for upgrade notes (#134)
|
||||||
|
|
||||||
|
Changes in 0.7.4
|
||||||
|
================
|
||||||
|
- Fixed index inheritance issues - firmed up testcases (#123) (#125)
|
||||||
|
|
||||||
|
Changes in 0.7.3
|
||||||
|
================
|
||||||
|
- Reverted EmbeddedDocuments meta handling - now can turn off inheritance (#119)
|
||||||
|
|
||||||
|
Changes in 0.7.2
|
||||||
|
================
|
||||||
|
- Update index spec generation so its not destructive (#113)
|
||||||
|
|
||||||
|
Changes in 0.7.1
|
||||||
|
=================
|
||||||
|
- Fixed index spec inheritance (#111)
|
||||||
|
|
||||||
|
Changes in 0.7.0
|
||||||
|
=================
|
||||||
|
- Updated queryset.delete so you can use with skip / limit (#107)
|
||||||
|
- Updated index creation allows kwargs to be passed through refs (#104)
|
||||||
|
- Fixed Q object merge edge case (#109)
|
||||||
|
- Fixed reloading on sharded documents (hmarr/mongoengine#569)
|
||||||
|
- Added NotUniqueError for duplicate keys (#62)
|
||||||
|
- Added custom collection / sequence naming for SequenceFields (#92)
|
||||||
|
- Fixed UnboundLocalError in composite index with pk field (#88)
|
||||||
|
- Updated ReferenceField's to optionally store ObjectId strings
|
||||||
|
this will become the default in 0.8 (#89)
|
||||||
|
- Added FutureWarning - save will default to `cascade=False` in 0.8
|
||||||
|
- Added example of indexing embedded document fields (#75)
|
||||||
|
- Fixed ImageField resizing when forcing size (#80)
|
||||||
|
- Add flexibility for fields handling bad data (#78)
|
||||||
|
- Embedded Documents no longer handle meta definitions
|
||||||
|
- Use weakref proxies in base lists / dicts (#74)
|
||||||
|
- Improved queryset filtering (hmarr/mongoengine#554)
|
||||||
|
- Fixed Dynamic Documents and Embedded Documents (hmarr/mongoengine#561)
|
||||||
|
- Fixed abstract classes and shard keys (#64)
|
||||||
|
- Fixed Python 2.5 support
|
||||||
|
- Added Python 3 support (thanks to Laine Heron)
|
||||||
|
|
||||||
|
Changes in 0.6.20
|
||||||
|
=================
|
||||||
|
- Added support for distinct and db_alias (#59)
|
||||||
|
- Improved support for chained querysets when constraining the same fields (hmarr/mongoengine#554)
|
||||||
|
- Fixed BinaryField lookup re (#48)
|
||||||
|
|
||||||
|
Changes in 0.6.19
|
||||||
|
=================
|
||||||
|
|
||||||
|
- Added Binary support to UUID (#47)
|
||||||
|
- Fixed MapField lookup for fields without declared lookups (#46)
|
||||||
|
- Fixed BinaryField python value issue (#48)
|
||||||
|
- Fixed SequenceField non numeric value lookup (#41)
|
||||||
|
- Fixed queryset manager issue (#52)
|
||||||
|
- Fixed FileField comparision (hmarr/mongoengine#547)
|
||||||
|
|
||||||
|
Changes in 0.6.18
|
||||||
|
=================
|
||||||
|
- Fixed recursion loading bug in _get_changed_fields
|
||||||
|
|
||||||
|
Changes in 0.6.17
|
||||||
|
=================
|
||||||
|
- Fixed issue with custom queryset manager expecting explict variable names
|
||||||
|
|
||||||
|
Changes in 0.6.16
|
||||||
|
=================
|
||||||
|
- Fixed issue where db_alias wasn't inherited
|
||||||
|
|
||||||
|
Changes in 0.6.15
|
||||||
|
=================
|
||||||
|
- Updated validation error messages
|
||||||
|
- Added support for null / zero / false values in item_frequencies
|
||||||
|
- Fixed cascade save edge case
|
||||||
|
- Fixed geo index creation through reference fields
|
||||||
|
- Added support for args / kwargs when using @queryset_manager
|
||||||
|
- Deref list custom id fix
|
||||||
|
|
||||||
|
Changes in 0.6.14
|
||||||
|
=================
|
||||||
|
- Fixed error dict with nested validation
|
||||||
|
- Fixed Int/Float fields and not equals None
|
||||||
|
- Exclude tests from installation
|
||||||
|
- Allow tuples for index meta
|
||||||
|
- Fixed use of str in instance checks
|
||||||
|
- Fixed unicode support in transform update
|
||||||
|
- Added support for add_to_set and each
|
||||||
|
|
||||||
|
Changes in 0.6.13
|
||||||
|
=================
|
||||||
|
- Fixed EmbeddedDocument db_field validation issue
|
||||||
|
- Fixed StringField unicode issue
|
||||||
|
- Fixes __repr__ modifying the cursor
|
||||||
|
|
||||||
|
Changes in 0.6.12
|
||||||
|
=================
|
||||||
|
- Fixes scalar lookups for primary_key
|
||||||
|
- Fixes error with _delta handling DBRefs
|
||||||
|
|
||||||
|
Changes in 0.6.11
|
||||||
|
==================
|
||||||
|
- Fixed inconsistency handling None values field attrs
|
||||||
|
- Fixed map_field embedded db_field issue
|
||||||
|
- Fixed .save() _delta issue with DbRefs
|
||||||
|
- Fixed Django TestCase
|
||||||
|
- Added cmp to Embedded Document
|
||||||
|
- Added PULL reverse_delete_rule
|
||||||
|
- Fixed CASCADE delete bug
|
||||||
|
- Fixed db_field data load error
|
||||||
|
- Fixed recursive save with FileField
|
||||||
|
|
||||||
|
Changes in 0.6.10
|
||||||
|
=================
|
||||||
|
- Fixed basedict / baselist to return super(..)
|
||||||
|
- Promoted BaseDynamicField to DynamicField
|
||||||
|
|
||||||
|
Changes in 0.6.9
|
||||||
|
================
|
||||||
|
- Fixed sparse indexes on inherited docs
|
||||||
|
- Removed FileField auto deletion, needs more work maybe 0.7
|
||||||
|
|
||||||
Changes in 0.6.8
|
Changes in 0.6.8
|
||||||
================
|
================
|
||||||
- Fixed FileField losing reference when no default set
|
- Fixed FileField losing reference when no default set
|
||||||
- Removed possible race condition from FileField (grid_file)
|
- Removed possible race condition from FileField (grid_file)
|
||||||
- Added assignment to save, can now do: b = MyDoc(**kwargs).save()
|
- Added assignment to save, can now do: `b = MyDoc(**kwargs).save()`
|
||||||
- Added support for pull operations on nested EmbeddedDocuments
|
- Added support for pull operations on nested EmbeddedDocuments
|
||||||
- Added support for choices with GenericReferenceFields
|
- Added support for choices with GenericReferenceFields
|
||||||
- Added support for choices with GenericEmbeddedDocumentFields
|
- Added support for choices with GenericEmbeddedDocumentFields
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ import sys, os
|
|||||||
# If extensions (or modules to document with autodoc) are in another directory,
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
# add these directories to sys.path here. If the directory is relative to the
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
sys.path.append(os.path.abspath('..'))
|
sys.path.insert(0, os.path.abspath('..'))
|
||||||
|
|
||||||
# -- General configuration -----------------------------------------------------
|
# -- General configuration -----------------------------------------------------
|
||||||
|
|
||||||
@@ -38,7 +38,7 @@ master_doc = 'index'
|
|||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u'MongoEngine'
|
project = u'MongoEngine'
|
||||||
copyright = u'2009-2012, MongoEngine Authors'
|
copyright = u'2009, MongoEngine Authors'
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
Using MongoEngine with Django
|
Using MongoEngine with Django
|
||||||
=============================
|
=============================
|
||||||
|
|
||||||
.. note :: Updated to support Django 1.4
|
.. note:: Updated to support Django 1.4
|
||||||
|
|
||||||
Connecting
|
Connecting
|
||||||
==========
|
==========
|
||||||
@@ -10,6 +10,16 @@ In your **settings.py** file, ignore the standard database settings (unless you
|
|||||||
also plan to use the ORM in your project), and instead call
|
also plan to use the ORM in your project), and instead call
|
||||||
:func:`~mongoengine.connect` somewhere in the settings module.
|
:func:`~mongoengine.connect` somewhere in the settings module.
|
||||||
|
|
||||||
|
.. note ::
|
||||||
|
If you are not using another Database backend you may need to add a dummy
|
||||||
|
database backend to ``settings.py`` eg::
|
||||||
|
|
||||||
|
DATABASES = {
|
||||||
|
'default': {
|
||||||
|
'ENGINE': 'django.db.backends.dummy'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Authentication
|
Authentication
|
||||||
==============
|
==============
|
||||||
MongoEngine includes a Django authentication backend, which uses MongoDB. The
|
MongoEngine includes a Django authentication backend, which uses MongoDB. The
|
||||||
@@ -45,6 +55,9 @@ into you settings module::
|
|||||||
|
|
||||||
SESSION_ENGINE = 'mongoengine.django.sessions'
|
SESSION_ENGINE = 'mongoengine.django.sessions'
|
||||||
|
|
||||||
|
Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` seconds, but doesnt delete cookie at sessions backend, so ``'mongoengine.django.sessions'`` supports `mongodb TTL
|
||||||
|
<http://docs.mongodb.org/manual/tutorial/expire-data/>`_.
|
||||||
|
|
||||||
.. versionadded:: 0.2.1
|
.. versionadded:: 0.2.1
|
||||||
|
|
||||||
Storage
|
Storage
|
||||||
|
|||||||
@@ -33,6 +33,12 @@ MongoEngine now supports :func:`~pymongo.replica_set_connection.ReplicaSetConnec
|
|||||||
to use them please use a URI style connection and provide the `replicaSet` name in the
|
to use them please use a URI style connection and provide the `replicaSet` name in the
|
||||||
connection kwargs.
|
connection kwargs.
|
||||||
|
|
||||||
|
Read preferences are supported throught the connection or via individual
|
||||||
|
queries by passing the read_preference ::
|
||||||
|
|
||||||
|
Bar.objects().read_preference(ReadPreference.PRIMARY)
|
||||||
|
Bar.objects(read_preference=ReadPreference.PRIMARY)
|
||||||
|
|
||||||
Multiple Databases
|
Multiple Databases
|
||||||
==================
|
==================
|
||||||
|
|
||||||
@@ -63,3 +69,21 @@ to point across databases and collections. Below is an example schema, using
|
|||||||
book = ReferenceField(Book)
|
book = ReferenceField(Book)
|
||||||
|
|
||||||
meta = {"db_alias": "users-books-db"}
|
meta = {"db_alias": "users-books-db"}
|
||||||
|
|
||||||
|
|
||||||
|
Switch Database Context Manager
|
||||||
|
===============================
|
||||||
|
|
||||||
|
Sometimes you might want to switch the database to query against for a class.
|
||||||
|
The :class:`~mongoengine.context_managers.switch_db` context manager allows
|
||||||
|
you to change the database alias for a class eg ::
|
||||||
|
|
||||||
|
from mongoengine.context_managers import switch_db
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
meta = {"db_alias": "user-db"}
|
||||||
|
|
||||||
|
with switch_db(User, 'archive-user-db') as User:
|
||||||
|
User(name="Ross").save() # Saves the 'archive-user-db'
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ be saved ::
|
|||||||
>>> Page.objects(tags='mongoengine').count()
|
>>> Page.objects(tags='mongoengine').count()
|
||||||
>>> 1
|
>>> 1
|
||||||
|
|
||||||
..note::
|
.. note::
|
||||||
|
|
||||||
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
||||||
|
|
||||||
@@ -62,28 +62,31 @@ not provided. Default values may optionally be a callable, which will be called
|
|||||||
to retrieve the value (such as in the above example). The field types available
|
to retrieve the value (such as in the above example). The field types available
|
||||||
are as follows:
|
are as follows:
|
||||||
|
|
||||||
* :class:`~mongoengine.StringField`
|
* :class:`~mongoengine.BinaryField`
|
||||||
* :class:`~mongoengine.URLField`
|
* :class:`~mongoengine.BooleanField`
|
||||||
* :class:`~mongoengine.EmailField`
|
|
||||||
* :class:`~mongoengine.IntField`
|
|
||||||
* :class:`~mongoengine.FloatField`
|
|
||||||
* :class:`~mongoengine.DecimalField`
|
|
||||||
* :class:`~mongoengine.DateTimeField`
|
|
||||||
* :class:`~mongoengine.ComplexDateTimeField`
|
* :class:`~mongoengine.ComplexDateTimeField`
|
||||||
* :class:`~mongoengine.ListField`
|
* :class:`~mongoengine.DateTimeField`
|
||||||
* :class:`~mongoengine.SortedListField`
|
* :class:`~mongoengine.DecimalField`
|
||||||
* :class:`~mongoengine.DictField`
|
* :class:`~mongoengine.DictField`
|
||||||
|
* :class:`~mongoengine.DynamicField`
|
||||||
|
* :class:`~mongoengine.EmailField`
|
||||||
|
* :class:`~mongoengine.EmbeddedDocumentField`
|
||||||
|
* :class:`~mongoengine.FileField`
|
||||||
|
* :class:`~mongoengine.FloatField`
|
||||||
|
* :class:`~mongoengine.GenericEmbeddedDocumentField`
|
||||||
|
* :class:`~mongoengine.GenericReferenceField`
|
||||||
|
* :class:`~mongoengine.GeoPointField`
|
||||||
|
* :class:`~mongoengine.ImageField`
|
||||||
|
* :class:`~mongoengine.IntField`
|
||||||
|
* :class:`~mongoengine.ListField`
|
||||||
* :class:`~mongoengine.MapField`
|
* :class:`~mongoengine.MapField`
|
||||||
* :class:`~mongoengine.ObjectIdField`
|
* :class:`~mongoengine.ObjectIdField`
|
||||||
* :class:`~mongoengine.ReferenceField`
|
* :class:`~mongoengine.ReferenceField`
|
||||||
* :class:`~mongoengine.GenericReferenceField`
|
|
||||||
* :class:`~mongoengine.EmbeddedDocumentField`
|
|
||||||
* :class:`~mongoengine.GenericEmbeddedDocumentField`
|
|
||||||
* :class:`~mongoengine.BooleanField`
|
|
||||||
* :class:`~mongoengine.FileField`
|
|
||||||
* :class:`~mongoengine.BinaryField`
|
|
||||||
* :class:`~mongoengine.GeoPointField`
|
|
||||||
* :class:`~mongoengine.SequenceField`
|
* :class:`~mongoengine.SequenceField`
|
||||||
|
* :class:`~mongoengine.SortedListField`
|
||||||
|
* :class:`~mongoengine.StringField`
|
||||||
|
* :class:`~mongoengine.URLField`
|
||||||
|
* :class:`~mongoengine.UUIDField`
|
||||||
|
|
||||||
Field arguments
|
Field arguments
|
||||||
---------------
|
---------------
|
||||||
@@ -132,7 +135,8 @@ arguments can be set on all fields:
|
|||||||
field, will not have two documents in the collection with the same value.
|
field, will not have two documents in the collection with the same value.
|
||||||
|
|
||||||
:attr:`primary_key` (Default: False)
|
:attr:`primary_key` (Default: False)
|
||||||
When True, use this field as a primary key for the collection.
|
When True, use this field as a primary key for the collection. `DictField`
|
||||||
|
and `EmbeddedDocuments` both support being the primary key for a document.
|
||||||
|
|
||||||
:attr:`choices` (Default: None)
|
:attr:`choices` (Default: None)
|
||||||
An iterable (e.g. a list or tuple) of choices to which the value of this
|
An iterable (e.g. a list or tuple) of choices to which the value of this
|
||||||
@@ -256,6 +260,35 @@ as the constructor's argument::
|
|||||||
content = StringField()
|
content = StringField()
|
||||||
|
|
||||||
|
|
||||||
|
.. _one-to-many-with-listfields:
|
||||||
|
|
||||||
|
One to Many with ListFields
|
||||||
|
'''''''''''''''''''''''''''
|
||||||
|
|
||||||
|
If you are implementing a one to many relationship via a list of references,
|
||||||
|
then the references are stored as DBRefs and to query you need to pass an
|
||||||
|
instance of the object to the query::
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Page(Document):
|
||||||
|
content = StringField()
|
||||||
|
authors = ListField(ReferenceField(User))
|
||||||
|
|
||||||
|
bob = User(name="Bob Jones").save()
|
||||||
|
john = User(name="John Smith").save()
|
||||||
|
|
||||||
|
Page(content="Test Page", authors=[bob, john]).save()
|
||||||
|
Page(content="Another Page", authors=[john]).save()
|
||||||
|
|
||||||
|
# Find all pages Bob authored
|
||||||
|
Page.objects(authors__in=[bob])
|
||||||
|
|
||||||
|
# Find all pages that both Bob and John have authored
|
||||||
|
Page.objects(authors__all=[bob, john])
|
||||||
|
|
||||||
|
|
||||||
Dealing with deletion of referred documents
|
Dealing with deletion of referred documents
|
||||||
'''''''''''''''''''''''''''''''''''''''''''
|
'''''''''''''''''''''''''''''''''''''''''''
|
||||||
By default, MongoDB doesn't check the integrity of your data, so deleting
|
By default, MongoDB doesn't check the integrity of your data, so deleting
|
||||||
@@ -289,6 +322,10 @@ Its value can take any of the following constants:
|
|||||||
:const:`mongoengine.CASCADE`
|
:const:`mongoengine.CASCADE`
|
||||||
Any object containing fields that are refererring to the object being deleted
|
Any object containing fields that are refererring to the object being deleted
|
||||||
are deleted first.
|
are deleted first.
|
||||||
|
:const:`mongoengine.PULL`
|
||||||
|
Removes the reference to the object (using MongoDB's "pull" operation)
|
||||||
|
from any object's fields of
|
||||||
|
:class:`~mongoengine.ListField` (:class:`~mongoengine.ReferenceField`).
|
||||||
|
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
@@ -308,6 +345,10 @@ Its value can take any of the following constants:
|
|||||||
their :file:`models.py` in the :const:`INSTALLED_APPS` tuple.
|
their :file:`models.py` in the :const:`INSTALLED_APPS` tuple.
|
||||||
|
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
Signals are not triggered when doing cascading updates / deletes - if this
|
||||||
|
is required you must manually handle the update / delete.
|
||||||
|
|
||||||
Generic reference fields
|
Generic reference fields
|
||||||
''''''''''''''''''''''''
|
''''''''''''''''''''''''
|
||||||
A second kind of reference field also exists,
|
A second kind of reference field also exists,
|
||||||
@@ -401,6 +442,7 @@ The following example shows a :class:`Log` document that will be limited to
|
|||||||
|
|
||||||
Indexes
|
Indexes
|
||||||
=======
|
=======
|
||||||
|
|
||||||
You can specify indexes on collections to make querying faster. This is done
|
You can specify indexes on collections to make querying faster. This is done
|
||||||
by creating a list of index specifications called :attr:`indexes` in the
|
by creating a list of index specifications called :attr:`indexes` in the
|
||||||
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
|
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
|
||||||
@@ -421,25 +463,34 @@ If a dictionary is passed then the following options are available:
|
|||||||
:attr:`fields` (Default: None)
|
:attr:`fields` (Default: None)
|
||||||
The fields to index. Specified in the same format as described above.
|
The fields to index. Specified in the same format as described above.
|
||||||
|
|
||||||
:attr:`types` (Default: True)
|
:attr:`cls` (Default: True)
|
||||||
Whether the index should have the :attr:`_types` field added automatically
|
If you have polymorphic models that inherit and have
|
||||||
to the start of the index.
|
:attr:`allow_inheritance` turned on, you can configure whether the index
|
||||||
|
should have the :attr:`_cls` field added automatically to the start of the
|
||||||
|
index.
|
||||||
|
|
||||||
:attr:`sparse` (Default: False)
|
:attr:`sparse` (Default: False)
|
||||||
Whether the index should be sparse.
|
Whether the index should be sparse.
|
||||||
|
|
||||||
:attr:`unique` (Default: False)
|
:attr:`unique` (Default: False)
|
||||||
Whether the index should be sparse.
|
Whether the index should be unique.
|
||||||
|
|
||||||
.. warning::
|
.. note::
|
||||||
|
|
||||||
|
Inheritance adds extra fields indices see: :ref:`document-inheritance`.
|
||||||
|
|
||||||
Inheritance adds extra indices.
|
Compound Indexes and Indexing sub documents
|
||||||
If don't need inheritance for a document turn inheritance off - see :ref:`document-inheritance`.
|
-------------------------------------------
|
||||||
|
|
||||||
|
Compound indexes can be created by adding the Embedded field or dictionary
|
||||||
|
field name to the index definition.
|
||||||
|
|
||||||
|
Sometimes its more efficient to index parts of Embeedded / dictionary fields,
|
||||||
|
in this case use 'dot' notation to identify the value to index eg: `rank.title`
|
||||||
|
|
||||||
Geospatial indexes
|
Geospatial indexes
|
||||||
---------------------------
|
------------------
|
||||||
|
|
||||||
Geospatial indexes will be automatically created for all
|
Geospatial indexes will be automatically created for all
|
||||||
:class:`~mongoengine.GeoPointField`\ s
|
:class:`~mongoengine.GeoPointField`\ s
|
||||||
|
|
||||||
@@ -527,7 +578,9 @@ defined, you may subclass it and add any extra fields or methods you may need.
|
|||||||
As this is new class is not a direct subclass of
|
As this is new class is not a direct subclass of
|
||||||
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
|
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
|
||||||
will use the same collection as its superclass uses. This allows for more
|
will use the same collection as its superclass uses. This allows for more
|
||||||
convenient and efficient retrieval of related documents::
|
convenient and efficient retrieval of related documents - all you need do is
|
||||||
|
set :attr:`allow_inheritance` to True in the :attr:`meta` data for a
|
||||||
|
document.::
|
||||||
|
|
||||||
# Stored in a collection named 'page'
|
# Stored in a collection named 'page'
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
@@ -539,25 +592,26 @@ convenient and efficient retrieval of related documents::
|
|||||||
class DatedPage(Page):
|
class DatedPage(Page):
|
||||||
date = DateTimeField()
|
date = DateTimeField()
|
||||||
|
|
||||||
.. note:: From 0.7 onwards you must declare `allow_inheritance` in the document meta.
|
.. note:: From 0.8 onwards you must declare :attr:`allow_inheritance` defaults
|
||||||
|
to False, meaning you must set it to True to use inheritance.
|
||||||
|
|
||||||
|
|
||||||
Working with existing data
|
Working with existing data
|
||||||
--------------------------
|
--------------------------
|
||||||
To enable correct retrieval of documents involved in this kind of heirarchy,
|
As MongoEngine no longer defaults to needing :attr:`_cls` you can quickly and
|
||||||
two extra attributes are stored on each document in the database: :attr:`_cls`
|
easily get working with existing data. Just define the document to match
|
||||||
and :attr:`_types`. These are hidden from the user through the MongoEngine
|
the expected schema in your database ::
|
||||||
interface, but may not be present if you are trying to use MongoEngine with
|
|
||||||
an existing database. For this reason, you may disable this inheritance
|
|
||||||
mechansim, removing the dependency of :attr:`_cls` and :attr:`_types`, enabling
|
|
||||||
you to work with existing databases. To disable inheritance on a document
|
|
||||||
class, set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
|
|
||||||
dictionary::
|
|
||||||
|
|
||||||
# Will work with data in an existing collection named 'cmsPage'
|
# Will work with data in an existing collection named 'cmsPage'
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
title = StringField(max_length=200, required=True)
|
title = StringField(max_length=200, required=True)
|
||||||
meta = {
|
meta = {
|
||||||
'collection': 'cmsPage',
|
'collection': 'cmsPage'
|
||||||
'allow_inheritance': False,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
If you have wildly varying schemas then using a
|
||||||
|
:class:`~mongoengine.DynamicDocument` might be more appropriate, instead of
|
||||||
|
defining all possible field types.
|
||||||
|
|
||||||
|
If you use :class:`~mongoengine.Document` and the database contains data that
|
||||||
|
isn't defined then that data will be stored in the `document._data` dictionary.
|
||||||
|
|||||||
@@ -38,6 +38,34 @@ already exist, then any changes will be updated atomically. For example::
|
|||||||
.. seealso::
|
.. seealso::
|
||||||
:ref:`guide-atomic-updates`
|
:ref:`guide-atomic-updates`
|
||||||
|
|
||||||
|
Pre save data validation and cleaning
|
||||||
|
-------------------------------------
|
||||||
|
MongoEngine allows you to create custom cleaning rules for your documents when
|
||||||
|
calling :meth:`~mongoengine.Document.save`. By providing a custom
|
||||||
|
:meth:`~mongoengine.Document.clean` method you can do any pre validation / data
|
||||||
|
cleaning.
|
||||||
|
|
||||||
|
This might be useful if you want to ensure a default value based on other
|
||||||
|
document values for example::
|
||||||
|
|
||||||
|
class Essay(Document):
|
||||||
|
status = StringField(choices=('Published', 'Draft'), required=True)
|
||||||
|
pub_date = DateTimeField()
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
"""Ensures that only published essays have a `pub_date` and
|
||||||
|
automatically sets the pub_date if published and not set"""
|
||||||
|
if self.status == 'Draft' and self.pub_date is not None:
|
||||||
|
msg = 'Draft entries should not have a publication date.'
|
||||||
|
raise ValidationError(msg)
|
||||||
|
# Set the pub_date for published items if not set.
|
||||||
|
if self.status == 'Published' and self.pub_date is None:
|
||||||
|
self.pub_date = datetime.now()
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
Cleaning is only called if validation is turned on and when calling
|
||||||
|
:meth:`~mongoengine.Document.save`.
|
||||||
|
|
||||||
Cascading Saves
|
Cascading Saves
|
||||||
---------------
|
---------------
|
||||||
If your document contains :class:`~mongoengine.ReferenceField` or
|
If your document contains :class:`~mongoengine.ReferenceField` or
|
||||||
|
|||||||
@@ -18,20 +18,10 @@ a document is created to store details about animals, including a photo::
|
|||||||
family = StringField()
|
family = StringField()
|
||||||
photo = FileField()
|
photo = FileField()
|
||||||
|
|
||||||
marmot = Animal('Marmota', 'Sciuridae')
|
marmot = Animal(genus='Marmota', family='Sciuridae')
|
||||||
|
|
||||||
marmot_photo = open('marmot.jpg', 'r') # Retrieve a photo from disk
|
|
||||||
marmot.photo = marmot_photo # Store photo in the document
|
|
||||||
marmot.photo.content_type = 'image/jpeg' # Store metadata
|
|
||||||
|
|
||||||
marmot.save()
|
|
||||||
|
|
||||||
Another way of writing to a :class:`~mongoengine.FileField` is to use the
|
|
||||||
:func:`put` method. This allows for metadata to be stored in the same call as
|
|
||||||
the file::
|
|
||||||
|
|
||||||
marmot.photo.put(marmot_photo, content_type='image/jpeg')
|
|
||||||
|
|
||||||
|
marmot_photo = open('marmot.jpg', 'r')
|
||||||
|
marmot.photo.put(marmot_photo, content_type = 'image/jpeg')
|
||||||
marmot.save()
|
marmot.save()
|
||||||
|
|
||||||
Retrieval
|
Retrieval
|
||||||
@@ -65,12 +55,13 @@ Deleting stored files is achieved with the :func:`delete` method::
|
|||||||
|
|
||||||
marmot.photo.delete()
|
marmot.photo.delete()
|
||||||
|
|
||||||
.. note::
|
.. warning::
|
||||||
|
|
||||||
The FileField in a Document actually only stores the ID of a file in a
|
The FileField in a Document actually only stores the ID of a file in a
|
||||||
separate GridFS collection. This means that `Animal.drop_collection()` will
|
separate GridFS collection. This means that deleting a document
|
||||||
not delete any files. Care should be taken to manually remove associated
|
with a defined FileField does not actually delete the file. You must be
|
||||||
files before dropping a collection.
|
careful to delete any files in a Document as above before deleting the
|
||||||
|
Document itself.
|
||||||
|
|
||||||
|
|
||||||
Replacing files
|
Replacing files
|
||||||
|
|||||||
@@ -92,6 +92,8 @@ may used with :class:`~mongoengine.GeoPointField`\ s:
|
|||||||
* ``within_polygon`` -- filter documents to those within a given polygon (e.g.
|
* ``within_polygon`` -- filter documents to those within a given polygon (e.g.
|
||||||
[(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]).
|
[(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]).
|
||||||
.. note:: Requires Mongo Server 2.0
|
.. note:: Requires Mongo Server 2.0
|
||||||
|
* ``max_distance`` -- can be added to your location queries to set a maximum
|
||||||
|
distance.
|
||||||
|
|
||||||
|
|
||||||
Querying lists
|
Querying lists
|
||||||
@@ -179,9 +181,11 @@ Retrieving unique results
|
|||||||
-------------------------
|
-------------------------
|
||||||
To retrieve a result that should be unique in the collection, use
|
To retrieve a result that should be unique in the collection, use
|
||||||
:meth:`~mongoengine.queryset.QuerySet.get`. This will raise
|
:meth:`~mongoengine.queryset.QuerySet.get`. This will raise
|
||||||
:class:`~mongoengine.queryset.DoesNotExist` if no document matches the query,
|
:class:`~mongoengine.queryset.DoesNotExist` if
|
||||||
and :class:`~mongoengine.queryset.MultipleObjectsReturned` if more than one
|
no document matches the query, and
|
||||||
document matched the query.
|
:class:`~mongoengine.queryset.MultipleObjectsReturned`
|
||||||
|
if more than one document matched the query. These exceptions are merged into
|
||||||
|
your document defintions eg: `MyDoc.DoesNotExist`
|
||||||
|
|
||||||
A variation of this method exists,
|
A variation of this method exists,
|
||||||
:meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new
|
:meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new
|
||||||
@@ -232,7 +236,7 @@ custom manager methods as you like::
|
|||||||
BlogPost(title='test1', published=False).save()
|
BlogPost(title='test1', published=False).save()
|
||||||
BlogPost(title='test2', published=True).save()
|
BlogPost(title='test2', published=True).save()
|
||||||
assert len(BlogPost.objects) == 2
|
assert len(BlogPost.objects) == 2
|
||||||
assert len(BlogPost.live_posts) == 1
|
assert len(BlogPost.live_posts()) == 1
|
||||||
|
|
||||||
Custom QuerySets
|
Custom QuerySets
|
||||||
================
|
================
|
||||||
@@ -243,11 +247,16 @@ a document, set ``queryset_class`` to the custom class in a
|
|||||||
:class:`~mongoengine.Document`\ s ``meta`` dictionary::
|
:class:`~mongoengine.Document`\ s ``meta`` dictionary::
|
||||||
|
|
||||||
class AwesomerQuerySet(QuerySet):
|
class AwesomerQuerySet(QuerySet):
|
||||||
pass
|
|
||||||
|
def get_awesome(self):
|
||||||
|
return self.filter(awesome=True)
|
||||||
|
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
meta = {'queryset_class': AwesomerQuerySet}
|
meta = {'queryset_class': AwesomerQuerySet}
|
||||||
|
|
||||||
|
# To call:
|
||||||
|
Page.objects.get_awesome()
|
||||||
|
|
||||||
.. versionadded:: 0.4
|
.. versionadded:: 0.4
|
||||||
|
|
||||||
Aggregation
|
Aggregation
|
||||||
@@ -360,6 +369,27 @@ references to the depth of 1 level. If you have more complicated documents and
|
|||||||
want to dereference more of the object at once then increasing the :attr:`max_depth`
|
want to dereference more of the object at once then increasing the :attr:`max_depth`
|
||||||
will dereference more levels of the document.
|
will dereference more levels of the document.
|
||||||
|
|
||||||
|
Turning off dereferencing
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
Sometimes for performance reasons you don't want to automatically dereference
|
||||||
|
data. To turn off dereferencing of the results of a query use
|
||||||
|
:func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so::
|
||||||
|
|
||||||
|
post = Post.objects.no_dereference().first()
|
||||||
|
assert(isinstance(post.author, ObjectId))
|
||||||
|
|
||||||
|
You can also turn off all dereferencing for a fixed period by using the
|
||||||
|
:class:`~mongoengine.context_managers.no_dereference` context manager::
|
||||||
|
|
||||||
|
with no_dereference(Post) as Post:
|
||||||
|
post = Post.objects.first()
|
||||||
|
assert(isinstance(post.author, ObjectId))
|
||||||
|
|
||||||
|
# Outside the context manager dereferencing occurs.
|
||||||
|
assert(isinstance(post.author, User))
|
||||||
|
|
||||||
|
|
||||||
Advanced queries
|
Advanced queries
|
||||||
================
|
================
|
||||||
Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword
|
Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword
|
||||||
|
|||||||
@@ -50,4 +50,11 @@ Example usage::
|
|||||||
signals.post_save.connect(Author.post_save, sender=Author)
|
signals.post_save.connect(Author.post_save, sender=Author)
|
||||||
|
|
||||||
|
|
||||||
|
ReferenceFields and signals
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
Currently `reverse_delete_rules` do not trigger signals on the other part of
|
||||||
|
the relationship. If this is required you must manually handled the
|
||||||
|
reverse deletion.
|
||||||
|
|
||||||
.. _blinker: http://pypi.python.org/pypi/blinker
|
.. _blinker: http://pypi.python.org/pypi/blinker
|
||||||
|
|||||||
@@ -34,10 +34,10 @@ To get help with using MongoEngine, use the `MongoEngine Users mailing list
|
|||||||
Contributing
|
Contributing
|
||||||
------------
|
------------
|
||||||
|
|
||||||
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ and
|
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ and
|
||||||
contributions are always encouraged. Contributions can be as simple as
|
contributions are always encouraged. Contributions can be as simple as
|
||||||
minor tweaks to this documentation. To contribute, fork the project on
|
minor tweaks to this documentation. To contribute, fork the project on
|
||||||
`GitHub <http://github.com/hmarr/mongoengine>`_ and send a
|
`GitHub <http://github.com/MongoEngine/mongoengine>`_ and send a
|
||||||
pull request.
|
pull request.
|
||||||
|
|
||||||
Also, you can join the developers' `mailing list
|
Also, you can join the developers' `mailing list
|
||||||
|
|||||||
@@ -84,12 +84,15 @@ using* the new fields we need to support video posts. This fits with the
|
|||||||
Object-Oriented principle of *inheritance* nicely. We can think of
|
Object-Oriented principle of *inheritance* nicely. We can think of
|
||||||
:class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and
|
:class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and
|
||||||
:class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports
|
:class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports
|
||||||
this kind of modelling out of the box::
|
this kind of modelling out of the box - all you need do is turn on inheritance
|
||||||
|
by setting :attr:`allow_inheritance` to True in the :attr:`meta`::
|
||||||
|
|
||||||
class Post(Document):
|
class Post(Document):
|
||||||
title = StringField(max_length=120, required=True)
|
title = StringField(max_length=120, required=True)
|
||||||
author = ReferenceField(User)
|
author = ReferenceField(User)
|
||||||
|
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
class TextPost(Post):
|
class TextPost(Post):
|
||||||
content = StringField()
|
content = StringField()
|
||||||
|
|
||||||
@@ -167,7 +170,7 @@ To delete all the posts if a user is deleted set the rule::
|
|||||||
|
|
||||||
See :class:`~mongoengine.ReferenceField` for more information.
|
See :class:`~mongoengine.ReferenceField` for more information.
|
||||||
|
|
||||||
..note::
|
.. note::
|
||||||
MapFields and DictFields currently don't support automatic handling of
|
MapFields and DictFields currently don't support automatic handling of
|
||||||
deleted references
|
deleted references
|
||||||
|
|
||||||
|
|||||||
213
docs/upgrade.rst
213
docs/upgrade.rst
@@ -2,21 +2,200 @@
|
|||||||
Upgrading
|
Upgrading
|
||||||
=========
|
=========
|
||||||
|
|
||||||
|
0.7 to 0.8
|
||||||
|
==========
|
||||||
|
|
||||||
|
Inheritance
|
||||||
|
-----------
|
||||||
|
|
||||||
|
Data Model
|
||||||
|
~~~~~~~~~~
|
||||||
|
|
||||||
|
The inheritance model has changed, we no longer need to store an array of
|
||||||
|
:attr:`types` with the model we can just use the classname in :attr:`_cls`.
|
||||||
|
This means that you will have to update your indexes for each of your
|
||||||
|
inherited classes like so: ::
|
||||||
|
|
||||||
|
# 1. Declaration of the class
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'indexes': ['name']
|
||||||
|
}
|
||||||
|
|
||||||
|
# 2. Remove _types
|
||||||
|
collection = Animal._get_collection()
|
||||||
|
collection.update({}, {"$unset": {"_types": 1}}, multi=True)
|
||||||
|
|
||||||
|
# 3. Confirm extra data is removed
|
||||||
|
count = collection.find({'_types': {"$exists": True}}).count()
|
||||||
|
assert count == 0
|
||||||
|
|
||||||
|
# 4. Remove indexes
|
||||||
|
info = collection.index_information()
|
||||||
|
indexes_to_drop = [key for key, value in info.iteritems()
|
||||||
|
if '_types' in dict(value['key'])]
|
||||||
|
for index in indexes_to_drop:
|
||||||
|
collection.drop_index(index)
|
||||||
|
|
||||||
|
# 5. Recreate indexes
|
||||||
|
Animal.ensure_indexes()
|
||||||
|
|
||||||
|
|
||||||
|
Document Definition
|
||||||
|
~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The default for inheritance has changed - its now off by default and
|
||||||
|
:attr:`_cls` will not be stored automatically with the class. So if you extend
|
||||||
|
your :class:`~mongoengine.Document` or :class:`~mongoengine.EmbeddedDocuments`
|
||||||
|
you will need to declare :attr:`allow_inheritance` in the meta data like so: ::
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
Previously, if you had data the database that wasn't defined in the Document
|
||||||
|
definition, it would set it as an attribute on the document. This is no longer
|
||||||
|
the case and the data is set only in the ``document._data`` dictionary: ::
|
||||||
|
|
||||||
|
>>> from mongoengine import *
|
||||||
|
>>> class Animal(Document):
|
||||||
|
... name = StringField()
|
||||||
|
...
|
||||||
|
>>> cat = Animal(name="kit", size="small")
|
||||||
|
|
||||||
|
# 0.7
|
||||||
|
>>> cat.size
|
||||||
|
u'small'
|
||||||
|
|
||||||
|
# 0.8
|
||||||
|
>>> cat.size
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "<stdin>", line 1, in <module>
|
||||||
|
AttributeError: 'Animal' object has no attribute 'size'
|
||||||
|
|
||||||
|
Querysets
|
||||||
|
~~~~~~~~~
|
||||||
|
|
||||||
|
Querysets now return clones and should no longer be considered editable in
|
||||||
|
place. This brings us in line with how Django's querysets work and removes a
|
||||||
|
long running gotcha. If you edit your querysets inplace you will have to
|
||||||
|
update your code like so: ::
|
||||||
|
|
||||||
|
# Old code:
|
||||||
|
mammals = Animal.objects(type="mammal")
|
||||||
|
mammals.filter(order="Carnivora") # Returns a cloned queryset that isn't assigned to anything - so this will break in 0.8
|
||||||
|
[m for m in mammals] # This will return all mammals in 0.8 as the 2nd filter returned a new queryset
|
||||||
|
|
||||||
|
# Update example a) assign queryset after a change:
|
||||||
|
mammals = Animal.objects(type="mammal")
|
||||||
|
carnivores = mammals.filter(order="Carnivora") # Reassign the new queryset so fitler can be applied
|
||||||
|
[m for m in carnivores] # This will return all carnivores
|
||||||
|
|
||||||
|
# Update example b) chain the queryset:
|
||||||
|
mammals = Animal.objects(type="mammal").filter(order="Carnivora") # The final queryset is assgined to mammals
|
||||||
|
[m for m in mammals] # This will return all carnivores
|
||||||
|
|
||||||
|
Indexes
|
||||||
|
-------
|
||||||
|
|
||||||
|
Index methods are no longer tied to querysets but rather to the document class.
|
||||||
|
Although `QuerySet._ensure_indexes` and `QuerySet.ensure_index` still exist.
|
||||||
|
They should be replaced with :func:`~mongoengine.Document.ensure_indexes` /
|
||||||
|
:func:`~mongoengine.Document.ensure_index`.
|
||||||
|
|
||||||
|
SequenceFields
|
||||||
|
--------------
|
||||||
|
|
||||||
|
:class:`~mongoengine.fields.SequenceField` now inherits from `BaseField` to
|
||||||
|
allow flexible storage of the calculated value. As such MIN and MAX settings
|
||||||
|
are no longer handled.
|
||||||
|
|
||||||
|
0.6 to 0.7
|
||||||
|
==========
|
||||||
|
|
||||||
|
Cascade saves
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Saves will raise a `FutureWarning` if they cascade and cascade hasn't been set
|
||||||
|
to True. This is because in 0.8 it will default to False. If you require
|
||||||
|
cascading saves then either set it in the `meta` or pass
|
||||||
|
via `save` eg ::
|
||||||
|
|
||||||
|
# At the class level:
|
||||||
|
class Person(Document):
|
||||||
|
meta = {'cascade': True}
|
||||||
|
|
||||||
|
# Or in code:
|
||||||
|
my_document.save(cascade=True)
|
||||||
|
|
||||||
|
.. note ::
|
||||||
|
Remember: cascading saves **do not** cascade through lists.
|
||||||
|
|
||||||
|
ReferenceFields
|
||||||
|
---------------
|
||||||
|
|
||||||
|
ReferenceFields now can store references as ObjectId strings instead of DBRefs.
|
||||||
|
This will become the default in 0.8 and if `dbref` is not set a `FutureWarning`
|
||||||
|
will be raised.
|
||||||
|
|
||||||
|
|
||||||
|
To explicitly continue to use DBRefs change the `dbref` flag
|
||||||
|
to True ::
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
groups = ListField(ReferenceField(Group, dbref=True))
|
||||||
|
|
||||||
|
To migrate to using strings instead of DBRefs you will have to manually
|
||||||
|
migrate ::
|
||||||
|
|
||||||
|
# Step 1 - Migrate the model definition
|
||||||
|
class Group(Document):
|
||||||
|
author = ReferenceField(User, dbref=False)
|
||||||
|
members = ListField(ReferenceField(User, dbref=False))
|
||||||
|
|
||||||
|
# Step 2 - Migrate the data
|
||||||
|
for g in Group.objects():
|
||||||
|
g.author = g.author
|
||||||
|
g.members = g.members
|
||||||
|
g.save()
|
||||||
|
|
||||||
|
|
||||||
|
item_frequencies
|
||||||
|
----------------
|
||||||
|
|
||||||
|
In the 0.6 series we added support for null / zero / false values in
|
||||||
|
item_frequencies. A side effect was to return keys in the value they are
|
||||||
|
stored in rather than as string representations. Your code may need to be
|
||||||
|
updated to handle native types rather than strings keys for the results of
|
||||||
|
item frequency queries.
|
||||||
|
|
||||||
|
BinaryFields
|
||||||
|
------------
|
||||||
|
|
||||||
|
Binary fields have been updated so that they are native binary types. If you
|
||||||
|
previously were doing `str` comparisons with binary field values you will have
|
||||||
|
to update and wrap the value in a `str`.
|
||||||
|
|
||||||
0.5 to 0.6
|
0.5 to 0.6
|
||||||
==========
|
==========
|
||||||
|
|
||||||
Embedded Documents - if you had a `pk` field you will have to rename it from `_id`
|
Embedded Documents - if you had a `pk` field you will have to rename it from
|
||||||
to `pk` as pk is no longer a property of Embedded Documents.
|
`_id` to `pk` as pk is no longer a property of Embedded Documents.
|
||||||
|
|
||||||
Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw
|
Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw
|
||||||
an InvalidDocument error as they aren't currently supported.
|
an InvalidDocument error as they aren't currently supported.
|
||||||
|
|
||||||
Document._get_subclasses - Is no longer used and the class method has been removed.
|
Document._get_subclasses - Is no longer used and the class method has been
|
||||||
|
removed.
|
||||||
|
|
||||||
Document.objects.with_id - now raises an InvalidQueryError if used with a filter.
|
Document.objects.with_id - now raises an InvalidQueryError if used with a
|
||||||
|
filter.
|
||||||
|
|
||||||
FutureWarning - A future warning has been added to all inherited classes that
|
FutureWarning - A future warning has been added to all inherited classes that
|
||||||
don't define `allow_inheritance` in their meta.
|
don't define :attr:`allow_inheritance` in their meta.
|
||||||
|
|
||||||
You may need to update pyMongo to 2.0 for use with Sharding.
|
You may need to update pyMongo to 2.0 for use with Sharding.
|
||||||
|
|
||||||
@@ -37,11 +216,11 @@ human-readable name for the option.
|
|||||||
PyMongo / MongoDB
|
PyMongo / MongoDB
|
||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
map reduce now requires pymongo 1.11+- The pymongo merge_output and reduce_output
|
map reduce now requires pymongo 1.11+- The pymongo `merge_output` and
|
||||||
parameters, have been depreciated.
|
`reduce_output` parameters, have been depreciated.
|
||||||
|
|
||||||
More methods now use map_reduce as db.eval is not supported for sharding as such
|
More methods now use map_reduce as db.eval is not supported for sharding as
|
||||||
the following have been changed:
|
such the following have been changed:
|
||||||
|
|
||||||
* :meth:`~mongoengine.queryset.QuerySet.sum`
|
* :meth:`~mongoengine.queryset.QuerySet.sum`
|
||||||
* :meth:`~mongoengine.queryset.QuerySet.average`
|
* :meth:`~mongoengine.queryset.QuerySet.average`
|
||||||
@@ -51,8 +230,8 @@ the following have been changed:
|
|||||||
Default collection naming
|
Default collection naming
|
||||||
-------------------------
|
-------------------------
|
||||||
|
|
||||||
Previously it was just lowercase, its now much more pythonic and readable as its
|
Previously it was just lowercase, its now much more pythonic and readable as
|
||||||
lowercase and underscores, previously ::
|
its lowercase and underscores, previously ::
|
||||||
|
|
||||||
class MyAceDocument(Document):
|
class MyAceDocument(Document):
|
||||||
pass
|
pass
|
||||||
@@ -88,7 +267,8 @@ Alternatively, you can rename your collections eg ::
|
|||||||
|
|
||||||
failure = False
|
failure = False
|
||||||
|
|
||||||
collection_names = [d._get_collection_name() for d in _document_registry.values()]
|
collection_names = [d._get_collection_name()
|
||||||
|
for d in _document_registry.values()]
|
||||||
|
|
||||||
for new_style_name in collection_names:
|
for new_style_name in collection_names:
|
||||||
if not new_style_name: # embedded documents don't have collections
|
if not new_style_name: # embedded documents don't have collections
|
||||||
@@ -106,10 +286,17 @@ Alternatively, you can rename your collections eg ::
|
|||||||
old_style_name, new_style_name)
|
old_style_name, new_style_name)
|
||||||
else:
|
else:
|
||||||
db[old_style_name].rename(new_style_name)
|
db[old_style_name].rename(new_style_name)
|
||||||
print "Renamed: %s to %s" % (old_style_name, new_style_name)
|
print "Renamed: %s to %s" % (old_style_name,
|
||||||
|
new_style_name)
|
||||||
|
|
||||||
if failure:
|
if failure:
|
||||||
print "Upgrading collection names failed"
|
print "Upgrading collection names failed"
|
||||||
else:
|
else:
|
||||||
print "Upgraded collection names"
|
print "Upgraded collection names"
|
||||||
|
|
||||||
|
|
||||||
|
mongodb 1.8 > 2.0 +
|
||||||
|
===================
|
||||||
|
|
||||||
|
Its been reported that indexes may need to be recreated to the newer version of indexes.
|
||||||
|
To do this drop indexes and call ``ensure_indexes`` on each model.
|
||||||
|
|||||||
@@ -8,17 +8,17 @@ import queryset
|
|||||||
from queryset import *
|
from queryset import *
|
||||||
import signals
|
import signals
|
||||||
from signals import *
|
from signals import *
|
||||||
|
import django
|
||||||
|
|
||||||
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
__all__ = (list(document.__all__) + fields.__all__ + connection.__all__ +
|
||||||
queryset.__all__ + signals.__all__)
|
list(queryset.__all__) + signals.__all__)
|
||||||
|
|
||||||
VERSION = (0, 6, 8)
|
VERSION = (0, 8, 0, '+')
|
||||||
|
|
||||||
|
|
||||||
def get_version():
|
def get_version():
|
||||||
version = '%s.%s' % (VERSION[0], VERSION[1])
|
if isinstance(VERSION[-1], basestring):
|
||||||
if VERSION[2]:
|
return '.'.join(map(str, VERSION[:-1])) + VERSION[-1]
|
||||||
version = '%s.%s' % (version, VERSION[2])
|
return '.'.join(map(str, VERSION))
|
||||||
return version
|
|
||||||
|
|
||||||
__version__ = get_version()
|
__version__ = get_version()
|
||||||
|
|||||||
1382
mongoengine/base.py
1382
mongoengine/base.py
File diff suppressed because it is too large
Load Diff
5
mongoengine/base/__init__.py
Normal file
5
mongoengine/base/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from mongoengine.base.common import *
|
||||||
|
from mongoengine.base.datastructures import *
|
||||||
|
from mongoengine.base.document import *
|
||||||
|
from mongoengine.base.fields import *
|
||||||
|
from mongoengine.base.metaclasses import *
|
||||||
26
mongoengine/base/common.py
Normal file
26
mongoengine/base/common.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
from mongoengine.errors import NotRegistered
|
||||||
|
|
||||||
|
__all__ = ('ALLOW_INHERITANCE', 'get_document', '_document_registry')
|
||||||
|
|
||||||
|
ALLOW_INHERITANCE = False
|
||||||
|
|
||||||
|
_document_registry = {}
|
||||||
|
|
||||||
|
|
||||||
|
def get_document(name):
|
||||||
|
doc = _document_registry.get(name, None)
|
||||||
|
if not doc:
|
||||||
|
# Possible old style name
|
||||||
|
single_end = name.split('.')[-1]
|
||||||
|
compound_end = '.%s' % single_end
|
||||||
|
possible_match = [k for k in _document_registry.keys()
|
||||||
|
if k.endswith(compound_end) or k == single_end]
|
||||||
|
if len(possible_match) == 1:
|
||||||
|
doc = _document_registry.get(possible_match.pop(), None)
|
||||||
|
if not doc:
|
||||||
|
raise NotRegistered("""
|
||||||
|
`%s` has not been registered in the document registry.
|
||||||
|
Importing the document class automatically registers it, has it
|
||||||
|
been imported?
|
||||||
|
""".strip() % name)
|
||||||
|
return doc
|
||||||
142
mongoengine/base/datastructures.py
Normal file
142
mongoengine/base/datastructures.py
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
import weakref
|
||||||
|
from mongoengine.common import _import_class
|
||||||
|
|
||||||
|
__all__ = ("BaseDict", "BaseList")
|
||||||
|
|
||||||
|
|
||||||
|
class BaseDict(dict):
|
||||||
|
"""A special dict so we can watch any changes
|
||||||
|
"""
|
||||||
|
|
||||||
|
_dereferenced = False
|
||||||
|
_instance = None
|
||||||
|
_name = None
|
||||||
|
|
||||||
|
def __init__(self, dict_items, instance, name):
|
||||||
|
self._instance = weakref.proxy(instance)
|
||||||
|
self._name = name
|
||||||
|
return super(BaseDict, self).__init__(dict_items)
|
||||||
|
|
||||||
|
def __getitem__(self, *args, **kwargs):
|
||||||
|
value = super(BaseDict, self).__getitem__(*args, **kwargs)
|
||||||
|
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||||
|
value._instance = self._instance
|
||||||
|
return value
|
||||||
|
|
||||||
|
def __setitem__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).__setitem__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __delete__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).__delete__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __delitem__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).__delitem__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __delattr__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).__delattr__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __getstate__(self):
|
||||||
|
self.instance = None
|
||||||
|
self._dereferenced = False
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __setstate__(self, state):
|
||||||
|
self = state
|
||||||
|
return self
|
||||||
|
|
||||||
|
def clear(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).clear(*args, **kwargs)
|
||||||
|
|
||||||
|
def pop(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).pop(*args, **kwargs)
|
||||||
|
|
||||||
|
def popitem(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).popitem(*args, **kwargs)
|
||||||
|
|
||||||
|
def update(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseDict, self).update(*args, **kwargs)
|
||||||
|
|
||||||
|
def _mark_as_changed(self):
|
||||||
|
if hasattr(self._instance, '_mark_as_changed'):
|
||||||
|
self._instance._mark_as_changed(self._name)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseList(list):
|
||||||
|
"""A special list so we can watch any changes
|
||||||
|
"""
|
||||||
|
|
||||||
|
_dereferenced = False
|
||||||
|
_instance = None
|
||||||
|
_name = None
|
||||||
|
|
||||||
|
def __init__(self, list_items, instance, name):
|
||||||
|
self._instance = weakref.proxy(instance)
|
||||||
|
self._name = name
|
||||||
|
return super(BaseList, self).__init__(list_items)
|
||||||
|
|
||||||
|
def __getitem__(self, *args, **kwargs):
|
||||||
|
value = super(BaseList, self).__getitem__(*args, **kwargs)
|
||||||
|
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||||
|
value._instance = self._instance
|
||||||
|
return value
|
||||||
|
|
||||||
|
def __setitem__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).__setitem__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __delitem__(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).__delitem__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __getstate__(self):
|
||||||
|
self.instance = None
|
||||||
|
self._dereferenced = False
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __setstate__(self, state):
|
||||||
|
self = state
|
||||||
|
return self
|
||||||
|
|
||||||
|
def append(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).append(*args, **kwargs)
|
||||||
|
|
||||||
|
def extend(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).extend(*args, **kwargs)
|
||||||
|
|
||||||
|
def insert(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).insert(*args, **kwargs)
|
||||||
|
|
||||||
|
def pop(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).pop(*args, **kwargs)
|
||||||
|
|
||||||
|
def remove(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).remove(*args, **kwargs)
|
||||||
|
|
||||||
|
def reverse(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).reverse(*args, **kwargs)
|
||||||
|
|
||||||
|
def sort(self, *args, **kwargs):
|
||||||
|
self._mark_as_changed()
|
||||||
|
return super(BaseList, self).sort(*args, **kwargs)
|
||||||
|
|
||||||
|
def _mark_as_changed(self):
|
||||||
|
if hasattr(self._instance, '_mark_as_changed'):
|
||||||
|
self._instance._mark_as_changed(self._name)
|
||||||
801
mongoengine/base/document.py
Normal file
801
mongoengine/base/document.py
Normal file
@@ -0,0 +1,801 @@
|
|||||||
|
import copy
|
||||||
|
import operator
|
||||||
|
import numbers
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
import pymongo
|
||||||
|
from bson import json_util
|
||||||
|
from bson.dbref import DBRef
|
||||||
|
|
||||||
|
from mongoengine import signals
|
||||||
|
from mongoengine.common import _import_class
|
||||||
|
from mongoengine.errors import (ValidationError, InvalidDocumentError,
|
||||||
|
LookUpError)
|
||||||
|
from mongoengine.python_support import (PY3, UNICODE_KWARGS, txt_type,
|
||||||
|
to_str_keys_recursive)
|
||||||
|
|
||||||
|
from mongoengine.base.common import get_document, ALLOW_INHERITANCE
|
||||||
|
from mongoengine.base.datastructures import BaseDict, BaseList
|
||||||
|
from mongoengine.base.fields import ComplexBaseField
|
||||||
|
|
||||||
|
__all__ = ('BaseDocument', 'NON_FIELD_ERRORS')
|
||||||
|
|
||||||
|
NON_FIELD_ERRORS = '__all__'
|
||||||
|
|
||||||
|
|
||||||
|
class BaseDocument(object):
|
||||||
|
|
||||||
|
_dynamic = False
|
||||||
|
_created = True
|
||||||
|
_dynamic_lock = True
|
||||||
|
_initialised = False
|
||||||
|
|
||||||
|
def __init__(self, *args, **values):
|
||||||
|
"""
|
||||||
|
Initialise a document or embedded document
|
||||||
|
|
||||||
|
:param __auto_convert: Try and will cast python objects to Object types
|
||||||
|
:param values: A dictionary of values for the document
|
||||||
|
"""
|
||||||
|
if args:
|
||||||
|
# Combine positional arguments with named arguments.
|
||||||
|
# We only want named arguments.
|
||||||
|
field = iter(self._fields_ordered)
|
||||||
|
for value in args:
|
||||||
|
name = next(field)
|
||||||
|
if name in values:
|
||||||
|
raise TypeError("Multiple values for keyword argument '" + name + "'")
|
||||||
|
values[name] = value
|
||||||
|
__auto_convert = values.pop("__auto_convert", True)
|
||||||
|
signals.pre_init.send(self.__class__, document=self, values=values)
|
||||||
|
|
||||||
|
self._data = {}
|
||||||
|
|
||||||
|
# Assign default values to instance
|
||||||
|
for key, field in self._fields.iteritems():
|
||||||
|
if self._db_field_map.get(key, key) in values:
|
||||||
|
continue
|
||||||
|
value = getattr(self, key, None)
|
||||||
|
setattr(self, key, value)
|
||||||
|
|
||||||
|
# Set passed values after initialisation
|
||||||
|
if self._dynamic:
|
||||||
|
self._dynamic_fields = {}
|
||||||
|
dynamic_data = {}
|
||||||
|
for key, value in values.iteritems():
|
||||||
|
if key in self._fields or key == '_id':
|
||||||
|
setattr(self, key, value)
|
||||||
|
elif self._dynamic:
|
||||||
|
dynamic_data[key] = value
|
||||||
|
else:
|
||||||
|
FileField = _import_class('FileField')
|
||||||
|
for key, value in values.iteritems():
|
||||||
|
if key == '__auto_convert':
|
||||||
|
continue
|
||||||
|
key = self._reverse_db_field_map.get(key, key)
|
||||||
|
if key in self._fields or key in ('id', 'pk', '_cls'):
|
||||||
|
if __auto_convert and value is not None:
|
||||||
|
field = self._fields.get(key)
|
||||||
|
if field and not isinstance(field, FileField):
|
||||||
|
value = field.to_python(value)
|
||||||
|
setattr(self, key, value)
|
||||||
|
else:
|
||||||
|
self._data[key] = value
|
||||||
|
|
||||||
|
# Set any get_fieldname_display methods
|
||||||
|
self.__set_field_display()
|
||||||
|
|
||||||
|
if self._dynamic:
|
||||||
|
self._dynamic_lock = False
|
||||||
|
for key, value in dynamic_data.iteritems():
|
||||||
|
setattr(self, key, value)
|
||||||
|
|
||||||
|
# Flag initialised
|
||||||
|
self._initialised = True
|
||||||
|
signals.post_init.send(self.__class__, document=self)
|
||||||
|
|
||||||
|
def __delattr__(self, *args, **kwargs):
|
||||||
|
"""Handle deletions of fields"""
|
||||||
|
field_name = args[0]
|
||||||
|
if field_name in self._fields:
|
||||||
|
default = self._fields[field_name].default
|
||||||
|
if callable(default):
|
||||||
|
default = default()
|
||||||
|
setattr(self, field_name, default)
|
||||||
|
else:
|
||||||
|
super(BaseDocument, self).__delattr__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __setattr__(self, name, value):
|
||||||
|
# Handle dynamic data only if an initialised dynamic document
|
||||||
|
if self._dynamic and not self._dynamic_lock:
|
||||||
|
|
||||||
|
field = None
|
||||||
|
if not hasattr(self, name) and not name.startswith('_'):
|
||||||
|
DynamicField = _import_class("DynamicField")
|
||||||
|
field = DynamicField(db_field=name)
|
||||||
|
field.name = name
|
||||||
|
self._dynamic_fields[name] = field
|
||||||
|
|
||||||
|
if not name.startswith('_'):
|
||||||
|
value = self.__expand_dynamic_values(name, value)
|
||||||
|
|
||||||
|
# Handle marking data as changed
|
||||||
|
if name in self._dynamic_fields:
|
||||||
|
self._data[name] = value
|
||||||
|
if hasattr(self, '_changed_fields'):
|
||||||
|
self._mark_as_changed(name)
|
||||||
|
|
||||||
|
if (self._is_document and not self._created and
|
||||||
|
name in self._meta.get('shard_key', tuple()) and
|
||||||
|
self._data.get(name) != value):
|
||||||
|
OperationError = _import_class('OperationError')
|
||||||
|
msg = "Shard Keys are immutable. Tried to update %s" % name
|
||||||
|
raise OperationError(msg)
|
||||||
|
|
||||||
|
# Check if the user has created a new instance of a class
|
||||||
|
if (self._is_document and self._initialised
|
||||||
|
and self._created and name == self._meta['id_field']):
|
||||||
|
super(BaseDocument, self).__setattr__('_created', False)
|
||||||
|
|
||||||
|
super(BaseDocument, self).__setattr__(name, value)
|
||||||
|
|
||||||
|
def __getstate__(self):
|
||||||
|
removals = ("get_%s_display" % k
|
||||||
|
for k, v in self._fields.items() if v.choices)
|
||||||
|
for k in removals:
|
||||||
|
if hasattr(self, k):
|
||||||
|
delattr(self, k)
|
||||||
|
return self.__dict__
|
||||||
|
|
||||||
|
def __setstate__(self, __dict__):
|
||||||
|
self.__dict__ = __dict__
|
||||||
|
self.__set_field_display()
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
if 'id' in self._fields and 'id' not in self._fields_ordered:
|
||||||
|
return iter(('id', ) + self._fields_ordered)
|
||||||
|
|
||||||
|
return iter(self._fields_ordered)
|
||||||
|
|
||||||
|
def __getitem__(self, name):
|
||||||
|
"""Dictionary-style field access, return a field's value if present.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if name in self._fields:
|
||||||
|
return getattr(self, name)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
raise KeyError(name)
|
||||||
|
|
||||||
|
def __setitem__(self, name, value):
|
||||||
|
"""Dictionary-style field access, set a field's value.
|
||||||
|
"""
|
||||||
|
# Ensure that the field exists before settings its value
|
||||||
|
if name not in self._fields:
|
||||||
|
raise KeyError(name)
|
||||||
|
return setattr(self, name, value)
|
||||||
|
|
||||||
|
def __contains__(self, name):
|
||||||
|
try:
|
||||||
|
val = getattr(self, name)
|
||||||
|
return val is not None
|
||||||
|
except AttributeError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._data)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
try:
|
||||||
|
u = self.__str__()
|
||||||
|
except (UnicodeEncodeError, UnicodeDecodeError):
|
||||||
|
u = '[Bad Unicode data]'
|
||||||
|
repr_type = type(u)
|
||||||
|
return repr_type('<%s: %s>' % (self.__class__.__name__, u))
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
if hasattr(self, '__unicode__'):
|
||||||
|
if PY3:
|
||||||
|
return self.__unicode__()
|
||||||
|
else:
|
||||||
|
return unicode(self).encode('utf-8')
|
||||||
|
return txt_type('%s object' % self.__class__.__name__)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, self.__class__) and hasattr(other, 'id'):
|
||||||
|
if self.id == other.id:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self.__eq__(other)
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
if self.pk is None:
|
||||||
|
# For new object
|
||||||
|
return super(BaseDocument, self).__hash__()
|
||||||
|
else:
|
||||||
|
return hash(self.pk)
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
"""
|
||||||
|
Hook for doing document level data cleaning before validation is run.
|
||||||
|
|
||||||
|
Any ValidationError raised by this method will not be associated with
|
||||||
|
a particular field; it will have a special-case association with the
|
||||||
|
field defined by NON_FIELD_ERRORS.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def to_mongo(self):
|
||||||
|
"""Return data dictionary ready for use with MongoDB.
|
||||||
|
"""
|
||||||
|
data = {}
|
||||||
|
for field_name, field in self._fields.iteritems():
|
||||||
|
value = self._data.get(field_name, None)
|
||||||
|
if value is not None:
|
||||||
|
value = field.to_mongo(value)
|
||||||
|
|
||||||
|
# Handle self generating fields
|
||||||
|
if value is None and field._auto_gen:
|
||||||
|
value = field.generate()
|
||||||
|
self._data[field_name] = value
|
||||||
|
|
||||||
|
if value is not None:
|
||||||
|
data[field.db_field] = value
|
||||||
|
|
||||||
|
# Only add _cls if allow_inheritance is True
|
||||||
|
if (hasattr(self, '_meta') and
|
||||||
|
self._meta.get('allow_inheritance', ALLOW_INHERITANCE) == True):
|
||||||
|
data['_cls'] = self._class_name
|
||||||
|
|
||||||
|
if '_id' in data and data['_id'] is None:
|
||||||
|
del data['_id']
|
||||||
|
|
||||||
|
if not self._dynamic:
|
||||||
|
return data
|
||||||
|
|
||||||
|
for name, field in self._dynamic_fields.items():
|
||||||
|
data[name] = field.to_mongo(self._data.get(name, None))
|
||||||
|
return data
|
||||||
|
|
||||||
|
def validate(self, clean=True):
|
||||||
|
"""Ensure that all fields' values are valid and that required fields
|
||||||
|
are present.
|
||||||
|
"""
|
||||||
|
# Ensure that each field is matched to a valid value
|
||||||
|
errors = {}
|
||||||
|
if clean:
|
||||||
|
try:
|
||||||
|
self.clean()
|
||||||
|
except ValidationError, error:
|
||||||
|
errors[NON_FIELD_ERRORS] = error
|
||||||
|
|
||||||
|
# Get a list of tuples of field names and their current values
|
||||||
|
fields = [(field, self._data.get(name))
|
||||||
|
for name, field in self._fields.items()]
|
||||||
|
if self._dynamic:
|
||||||
|
fields += [(field, self._data.get(name))
|
||||||
|
for name, field in self._dynamic_fields.items()]
|
||||||
|
|
||||||
|
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
|
||||||
|
GenericEmbeddedDocumentField = _import_class("GenericEmbeddedDocumentField")
|
||||||
|
|
||||||
|
for field, value in fields:
|
||||||
|
if value is not None:
|
||||||
|
try:
|
||||||
|
if isinstance(field, (EmbeddedDocumentField,
|
||||||
|
GenericEmbeddedDocumentField)):
|
||||||
|
field._validate(value, clean=clean)
|
||||||
|
else:
|
||||||
|
field._validate(value)
|
||||||
|
except ValidationError, error:
|
||||||
|
errors[field.name] = error.errors or error
|
||||||
|
except (ValueError, AttributeError, AssertionError), error:
|
||||||
|
errors[field.name] = error
|
||||||
|
elif field.required and not getattr(field, '_auto_gen', False):
|
||||||
|
errors[field.name] = ValidationError('Field is required',
|
||||||
|
field_name=field.name)
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
pk = "None"
|
||||||
|
if hasattr(self, 'pk'):
|
||||||
|
pk = self.pk
|
||||||
|
elif self._instance:
|
||||||
|
pk = self._instance.pk
|
||||||
|
message = "ValidationError (%s:%s) " % (self._class_name, pk)
|
||||||
|
raise ValidationError(message, errors=errors)
|
||||||
|
|
||||||
|
def to_json(self):
|
||||||
|
"""Converts a document to JSON"""
|
||||||
|
return json_util.dumps(self.to_mongo())
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_json(cls, json_data):
|
||||||
|
"""Converts json data to an unsaved document instance"""
|
||||||
|
return cls._from_son(json_util.loads(json_data))
|
||||||
|
|
||||||
|
def __expand_dynamic_values(self, name, value):
|
||||||
|
"""expand any dynamic values to their correct types / values"""
|
||||||
|
if not isinstance(value, (dict, list, tuple)):
|
||||||
|
return value
|
||||||
|
|
||||||
|
is_list = False
|
||||||
|
if not hasattr(value, 'items'):
|
||||||
|
is_list = True
|
||||||
|
value = dict([(k, v) for k, v in enumerate(value)])
|
||||||
|
|
||||||
|
if not is_list and '_cls' in value:
|
||||||
|
cls = get_document(value['_cls'])
|
||||||
|
return cls(**value)
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
for k, v in value.items():
|
||||||
|
key = name if is_list else k
|
||||||
|
data[k] = self.__expand_dynamic_values(key, v)
|
||||||
|
|
||||||
|
if is_list: # Convert back to a list
|
||||||
|
data_items = sorted(data.items(), key=operator.itemgetter(0))
|
||||||
|
value = [v for k, v in data_items]
|
||||||
|
else:
|
||||||
|
value = data
|
||||||
|
|
||||||
|
# Convert lists / values so we can watch for any changes on them
|
||||||
|
if (isinstance(value, (list, tuple)) and
|
||||||
|
not isinstance(value, BaseList)):
|
||||||
|
value = BaseList(value, self, name)
|
||||||
|
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||||
|
value = BaseDict(value, self, name)
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
def _mark_as_changed(self, key):
|
||||||
|
"""Marks a key as explicitly changed by the user
|
||||||
|
"""
|
||||||
|
if not key:
|
||||||
|
return
|
||||||
|
key = self._db_field_map.get(key, key)
|
||||||
|
if (hasattr(self, '_changed_fields') and
|
||||||
|
key not in self._changed_fields):
|
||||||
|
self._changed_fields.append(key)
|
||||||
|
|
||||||
|
def _clear_changed_fields(self):
|
||||||
|
self._changed_fields = []
|
||||||
|
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
|
||||||
|
for field_name, field in self._fields.iteritems():
|
||||||
|
if (isinstance(field, ComplexBaseField) and
|
||||||
|
isinstance(field.field, EmbeddedDocumentField)):
|
||||||
|
field_value = getattr(self, field_name, None)
|
||||||
|
if field_value:
|
||||||
|
for idx in (field_value if isinstance(field_value, dict)
|
||||||
|
else xrange(len(field_value))):
|
||||||
|
field_value[idx]._clear_changed_fields()
|
||||||
|
elif isinstance(field, EmbeddedDocumentField):
|
||||||
|
field_value = getattr(self, field_name, None)
|
||||||
|
if field_value:
|
||||||
|
field_value._clear_changed_fields()
|
||||||
|
|
||||||
|
def _get_changed_fields(self, key='', inspected=None):
|
||||||
|
"""Returns a list of all fields that have explicitly been changed.
|
||||||
|
"""
|
||||||
|
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||||
|
DynamicEmbeddedDocument = _import_class("DynamicEmbeddedDocument")
|
||||||
|
_changed_fields = []
|
||||||
|
_changed_fields += getattr(self, '_changed_fields', [])
|
||||||
|
|
||||||
|
inspected = inspected or set()
|
||||||
|
if hasattr(self, 'id'):
|
||||||
|
if self.id in inspected:
|
||||||
|
return _changed_fields
|
||||||
|
inspected.add(self.id)
|
||||||
|
|
||||||
|
field_list = self._fields.copy()
|
||||||
|
if self._dynamic:
|
||||||
|
field_list.update(self._dynamic_fields)
|
||||||
|
|
||||||
|
for field_name in field_list:
|
||||||
|
|
||||||
|
db_field_name = self._db_field_map.get(field_name, field_name)
|
||||||
|
key = '%s.' % db_field_name
|
||||||
|
field = self._data.get(field_name, None)
|
||||||
|
if hasattr(field, 'id'):
|
||||||
|
if field.id in inspected:
|
||||||
|
continue
|
||||||
|
inspected.add(field.id)
|
||||||
|
|
||||||
|
if (isinstance(field, (EmbeddedDocument, DynamicEmbeddedDocument))
|
||||||
|
and db_field_name not in _changed_fields):
|
||||||
|
# Find all embedded fields that have been changed
|
||||||
|
changed = field._get_changed_fields(key, inspected)
|
||||||
|
_changed_fields += ["%s%s" % (key, k) for k in changed if k]
|
||||||
|
elif (isinstance(field, (list, tuple, dict)) and
|
||||||
|
db_field_name not in _changed_fields):
|
||||||
|
# Loop list / dict fields as they contain documents
|
||||||
|
# Determine the iterator to use
|
||||||
|
if not hasattr(field, 'items'):
|
||||||
|
iterator = enumerate(field)
|
||||||
|
else:
|
||||||
|
iterator = field.iteritems()
|
||||||
|
for index, value in iterator:
|
||||||
|
if not hasattr(value, '_get_changed_fields'):
|
||||||
|
continue
|
||||||
|
list_key = "%s%s." % (key, index)
|
||||||
|
changed = value._get_changed_fields(list_key, inspected)
|
||||||
|
_changed_fields += ["%s%s" % (list_key, k)
|
||||||
|
for k in changed if k]
|
||||||
|
return _changed_fields
|
||||||
|
|
||||||
|
def _delta(self):
|
||||||
|
"""Returns the delta (set, unset) of the changes for a document.
|
||||||
|
Gets any values that have been explicitly changed.
|
||||||
|
"""
|
||||||
|
# Handles cases where not loaded from_son but has _id
|
||||||
|
doc = self.to_mongo()
|
||||||
|
|
||||||
|
set_fields = self._get_changed_fields()
|
||||||
|
set_data = {}
|
||||||
|
unset_data = {}
|
||||||
|
parts = []
|
||||||
|
if hasattr(self, '_changed_fields'):
|
||||||
|
set_data = {}
|
||||||
|
# Fetch each set item from its path
|
||||||
|
for path in set_fields:
|
||||||
|
parts = path.split('.')
|
||||||
|
d = doc
|
||||||
|
new_path = []
|
||||||
|
for p in parts:
|
||||||
|
if isinstance(d, DBRef):
|
||||||
|
break
|
||||||
|
elif isinstance(d, list) and p.isdigit():
|
||||||
|
d = d[int(p)]
|
||||||
|
elif hasattr(d, 'get'):
|
||||||
|
d = d.get(p)
|
||||||
|
new_path.append(p)
|
||||||
|
path = '.'.join(new_path)
|
||||||
|
set_data[path] = d
|
||||||
|
else:
|
||||||
|
set_data = doc
|
||||||
|
if '_id' in set_data:
|
||||||
|
del(set_data['_id'])
|
||||||
|
|
||||||
|
# Determine if any changed items were actually unset.
|
||||||
|
for path, value in set_data.items():
|
||||||
|
if value or isinstance(value, (numbers.Number, bool)):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If we've set a value that ain't the default value dont unset it.
|
||||||
|
default = None
|
||||||
|
if (self._dynamic and len(parts) and parts[0] in
|
||||||
|
self._dynamic_fields):
|
||||||
|
del(set_data[path])
|
||||||
|
unset_data[path] = 1
|
||||||
|
continue
|
||||||
|
elif path in self._fields:
|
||||||
|
default = self._fields[path].default
|
||||||
|
else: # Perform a full lookup for lists / embedded lookups
|
||||||
|
d = self
|
||||||
|
parts = path.split('.')
|
||||||
|
db_field_name = parts.pop()
|
||||||
|
for p in parts:
|
||||||
|
if isinstance(d, list) and p.isdigit():
|
||||||
|
d = d[int(p)]
|
||||||
|
elif (hasattr(d, '__getattribute__') and
|
||||||
|
not isinstance(d, dict)):
|
||||||
|
real_path = d._reverse_db_field_map.get(p, p)
|
||||||
|
d = getattr(d, real_path)
|
||||||
|
else:
|
||||||
|
d = d.get(p)
|
||||||
|
|
||||||
|
if hasattr(d, '_fields'):
|
||||||
|
field_name = d._reverse_db_field_map.get(db_field_name,
|
||||||
|
db_field_name)
|
||||||
|
if field_name in d._fields:
|
||||||
|
default = d._fields.get(field_name).default
|
||||||
|
else:
|
||||||
|
default = None
|
||||||
|
|
||||||
|
if default is not None:
|
||||||
|
if callable(default):
|
||||||
|
default = default()
|
||||||
|
|
||||||
|
if default != value:
|
||||||
|
continue
|
||||||
|
|
||||||
|
del(set_data[path])
|
||||||
|
unset_data[path] = 1
|
||||||
|
return set_data, unset_data
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_collection_name(cls):
|
||||||
|
"""Returns the collection name for this class.
|
||||||
|
"""
|
||||||
|
return cls._meta.get('collection', None)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _from_son(cls, son, _auto_dereference=True):
|
||||||
|
"""Create an instance of a Document (subclass) from a PyMongo SON.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# get the class name from the document, falling back to the given
|
||||||
|
# class if unavailable
|
||||||
|
class_name = son.get('_cls', cls._class_name)
|
||||||
|
data = dict(("%s" % key, value) for key, value in son.iteritems())
|
||||||
|
if not UNICODE_KWARGS:
|
||||||
|
# python 2.6.4 and lower cannot handle unicode keys
|
||||||
|
# passed to class constructor example: cls(**data)
|
||||||
|
to_str_keys_recursive(data)
|
||||||
|
|
||||||
|
# Return correct subclass for document type
|
||||||
|
if class_name != cls._class_name:
|
||||||
|
cls = get_document(class_name)
|
||||||
|
|
||||||
|
changed_fields = []
|
||||||
|
errors_dict = {}
|
||||||
|
|
||||||
|
fields = cls._fields
|
||||||
|
if not _auto_dereference:
|
||||||
|
fields = copy.copy(fields)
|
||||||
|
|
||||||
|
for field_name, field in fields.iteritems():
|
||||||
|
field._auto_dereference = _auto_dereference
|
||||||
|
if field.db_field in data:
|
||||||
|
value = data[field.db_field]
|
||||||
|
try:
|
||||||
|
data[field_name] = (value if value is None
|
||||||
|
else field.to_python(value))
|
||||||
|
if field_name != field.db_field:
|
||||||
|
del data[field.db_field]
|
||||||
|
except (AttributeError, ValueError), e:
|
||||||
|
errors_dict[field_name] = e
|
||||||
|
elif field.default:
|
||||||
|
default = field.default
|
||||||
|
if callable(default):
|
||||||
|
default = default()
|
||||||
|
if isinstance(default, BaseDocument):
|
||||||
|
changed_fields.append(field_name)
|
||||||
|
|
||||||
|
if errors_dict:
|
||||||
|
errors = "\n".join(["%s - %s" % (k, v)
|
||||||
|
for k, v in errors_dict.items()])
|
||||||
|
msg = ("Invalid data to create a `%s` instance.\n%s"
|
||||||
|
% (cls._class_name, errors))
|
||||||
|
raise InvalidDocumentError(msg)
|
||||||
|
|
||||||
|
obj = cls(__auto_convert=False, **data)
|
||||||
|
obj._changed_fields = changed_fields
|
||||||
|
obj._created = False
|
||||||
|
if not _auto_dereference:
|
||||||
|
obj._fields = fields
|
||||||
|
return obj
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _build_index_specs(cls, meta_indexes):
|
||||||
|
"""Generate and merge the full index specs
|
||||||
|
"""
|
||||||
|
|
||||||
|
geo_indices = cls._geo_indices()
|
||||||
|
unique_indices = cls._unique_with_indexes()
|
||||||
|
index_specs = [cls._build_index_spec(spec)
|
||||||
|
for spec in meta_indexes]
|
||||||
|
|
||||||
|
def merge_index_specs(index_specs, indices):
|
||||||
|
if not indices:
|
||||||
|
return index_specs
|
||||||
|
|
||||||
|
spec_fields = [v['fields']
|
||||||
|
for k, v in enumerate(index_specs)]
|
||||||
|
# Merge unqiue_indexes with existing specs
|
||||||
|
for k, v in enumerate(indices):
|
||||||
|
if v['fields'] in spec_fields:
|
||||||
|
index_specs[spec_fields.index(v['fields'])].update(v)
|
||||||
|
else:
|
||||||
|
index_specs.append(v)
|
||||||
|
return index_specs
|
||||||
|
|
||||||
|
index_specs = merge_index_specs(index_specs, geo_indices)
|
||||||
|
index_specs = merge_index_specs(index_specs, unique_indices)
|
||||||
|
return index_specs
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _build_index_spec(cls, spec):
|
||||||
|
"""Build a PyMongo index spec from a MongoEngine index spec.
|
||||||
|
"""
|
||||||
|
if isinstance(spec, basestring):
|
||||||
|
spec = {'fields': [spec]}
|
||||||
|
elif isinstance(spec, (list, tuple)):
|
||||||
|
spec = {'fields': list(spec)}
|
||||||
|
elif isinstance(spec, dict):
|
||||||
|
spec = dict(spec)
|
||||||
|
|
||||||
|
index_list = []
|
||||||
|
direction = None
|
||||||
|
|
||||||
|
# Check to see if we need to include _cls
|
||||||
|
allow_inheritance = cls._meta.get('allow_inheritance',
|
||||||
|
ALLOW_INHERITANCE)
|
||||||
|
include_cls = allow_inheritance and not spec.get('sparse', False)
|
||||||
|
|
||||||
|
for key in spec['fields']:
|
||||||
|
# If inherited spec continue
|
||||||
|
if isinstance(key, (list, tuple)):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# ASCENDING from +,
|
||||||
|
# DESCENDING from -
|
||||||
|
# GEO2D from *
|
||||||
|
direction = pymongo.ASCENDING
|
||||||
|
if key.startswith("-"):
|
||||||
|
direction = pymongo.DESCENDING
|
||||||
|
elif key.startswith("*"):
|
||||||
|
direction = pymongo.GEO2D
|
||||||
|
if key.startswith(("+", "-", "*")):
|
||||||
|
key = key[1:]
|
||||||
|
|
||||||
|
# Use real field name, do it manually because we need field
|
||||||
|
# objects for the next part (list field checking)
|
||||||
|
parts = key.split('.')
|
||||||
|
if parts in (['pk'], ['id'], ['_id']):
|
||||||
|
key = '_id'
|
||||||
|
fields = []
|
||||||
|
else:
|
||||||
|
fields = cls._lookup_field(parts)
|
||||||
|
parts = [field if field == '_id' else field.db_field
|
||||||
|
for field in fields]
|
||||||
|
key = '.'.join(parts)
|
||||||
|
index_list.append((key, direction))
|
||||||
|
|
||||||
|
# Don't add cls to a geo index
|
||||||
|
if include_cls and direction is not pymongo.GEO2D:
|
||||||
|
index_list.insert(0, ('_cls', 1))
|
||||||
|
|
||||||
|
spec['fields'] = index_list
|
||||||
|
if spec.get('sparse', False) and len(spec['fields']) > 1:
|
||||||
|
raise ValueError(
|
||||||
|
'Sparse indexes can only have one field in them. '
|
||||||
|
'See https://jira.mongodb.org/browse/SERVER-2193')
|
||||||
|
|
||||||
|
return spec
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _unique_with_indexes(cls, namespace=""):
|
||||||
|
"""
|
||||||
|
Find and set unique indexes
|
||||||
|
"""
|
||||||
|
unique_indexes = []
|
||||||
|
for field_name, field in cls._fields.items():
|
||||||
|
sparse = False
|
||||||
|
# Generate a list of indexes needed by uniqueness constraints
|
||||||
|
if field.unique:
|
||||||
|
field.required = True
|
||||||
|
unique_fields = [field.db_field]
|
||||||
|
|
||||||
|
# Add any unique_with fields to the back of the index spec
|
||||||
|
if field.unique_with:
|
||||||
|
if isinstance(field.unique_with, basestring):
|
||||||
|
field.unique_with = [field.unique_with]
|
||||||
|
|
||||||
|
# Convert unique_with field names to real field names
|
||||||
|
unique_with = []
|
||||||
|
for other_name in field.unique_with:
|
||||||
|
parts = other_name.split('.')
|
||||||
|
# Lookup real name
|
||||||
|
parts = cls._lookup_field(parts)
|
||||||
|
name_parts = [part.db_field for part in parts]
|
||||||
|
unique_with.append('.'.join(name_parts))
|
||||||
|
# Unique field should be required
|
||||||
|
parts[-1].required = True
|
||||||
|
sparse = (not sparse and
|
||||||
|
parts[-1].name not in cls.__dict__)
|
||||||
|
unique_fields += unique_with
|
||||||
|
|
||||||
|
# Add the new index to the list
|
||||||
|
fields = [("%s%s" % (namespace, f), pymongo.ASCENDING)
|
||||||
|
for f in unique_fields]
|
||||||
|
index = {'fields': fields, 'unique': True, 'sparse': sparse}
|
||||||
|
unique_indexes.append(index)
|
||||||
|
|
||||||
|
# Grab any embedded document field unique indexes
|
||||||
|
if (field.__class__.__name__ == "EmbeddedDocumentField" and
|
||||||
|
field.document_type != cls):
|
||||||
|
field_namespace = "%s." % field_name
|
||||||
|
doc_cls = field.document_type
|
||||||
|
unique_indexes += doc_cls._unique_with_indexes(field_namespace)
|
||||||
|
|
||||||
|
return unique_indexes
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _geo_indices(cls, inspected=None):
|
||||||
|
inspected = inspected or []
|
||||||
|
geo_indices = []
|
||||||
|
inspected.append(cls)
|
||||||
|
|
||||||
|
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
|
||||||
|
GeoPointField = _import_class("GeoPointField")
|
||||||
|
|
||||||
|
for field in cls._fields.values():
|
||||||
|
if not isinstance(field, (EmbeddedDocumentField, GeoPointField)):
|
||||||
|
continue
|
||||||
|
if hasattr(field, 'document_type'):
|
||||||
|
field_cls = field.document_type
|
||||||
|
if field_cls in inspected:
|
||||||
|
continue
|
||||||
|
if hasattr(field_cls, '_geo_indices'):
|
||||||
|
geo_indices += field_cls._geo_indices(inspected)
|
||||||
|
elif field._geo_index:
|
||||||
|
geo_indices.append({'fields':
|
||||||
|
[(field.db_field, pymongo.GEO2D)]})
|
||||||
|
return geo_indices
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _lookup_field(cls, parts):
|
||||||
|
"""Lookup a field based on its attribute and return a list containing
|
||||||
|
the field's parents and the field.
|
||||||
|
"""
|
||||||
|
if not isinstance(parts, (list, tuple)):
|
||||||
|
parts = [parts]
|
||||||
|
fields = []
|
||||||
|
field = None
|
||||||
|
|
||||||
|
for field_name in parts:
|
||||||
|
# Handle ListField indexing:
|
||||||
|
if field_name.isdigit():
|
||||||
|
new_field = field.field
|
||||||
|
fields.append(field_name)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if field is None:
|
||||||
|
# Look up first field from the document
|
||||||
|
if field_name == 'pk':
|
||||||
|
# Deal with "primary key" alias
|
||||||
|
field_name = cls._meta['id_field']
|
||||||
|
if field_name in cls._fields:
|
||||||
|
field = cls._fields[field_name]
|
||||||
|
elif cls._dynamic:
|
||||||
|
DynamicField = _import_class('DynamicField')
|
||||||
|
field = DynamicField(db_field=field_name)
|
||||||
|
else:
|
||||||
|
raise LookUpError('Cannot resolve field "%s"'
|
||||||
|
% field_name)
|
||||||
|
else:
|
||||||
|
ReferenceField = _import_class('ReferenceField')
|
||||||
|
GenericReferenceField = _import_class('GenericReferenceField')
|
||||||
|
if isinstance(field, (ReferenceField, GenericReferenceField)):
|
||||||
|
raise LookUpError('Cannot perform join in mongoDB: %s' %
|
||||||
|
'__'.join(parts))
|
||||||
|
if hasattr(getattr(field, 'field', None), 'lookup_member'):
|
||||||
|
new_field = field.field.lookup_member(field_name)
|
||||||
|
else:
|
||||||
|
# Look up subfield on the previous field
|
||||||
|
new_field = field.lookup_member(field_name)
|
||||||
|
if not new_field and isinstance(field, ComplexBaseField):
|
||||||
|
fields.append(field_name)
|
||||||
|
continue
|
||||||
|
elif not new_field:
|
||||||
|
raise LookUpError('Cannot resolve field "%s"'
|
||||||
|
% field_name)
|
||||||
|
field = new_field # update field to the new field type
|
||||||
|
fields.append(field)
|
||||||
|
return fields
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _translate_field_name(cls, field, sep='.'):
|
||||||
|
"""Translate a field attribute name to a database field name.
|
||||||
|
"""
|
||||||
|
parts = field.split(sep)
|
||||||
|
parts = [f.db_field for f in cls._lookup_field(parts)]
|
||||||
|
return '.'.join(parts)
|
||||||
|
|
||||||
|
def __set_field_display(self):
|
||||||
|
"""Dynamically set the display value for a field with choices"""
|
||||||
|
for attr_name, field in self._fields.items():
|
||||||
|
if field.choices:
|
||||||
|
setattr(self,
|
||||||
|
'get_%s_display' % attr_name,
|
||||||
|
partial(self.__get_field_display, field=field))
|
||||||
|
|
||||||
|
def __get_field_display(self, field):
|
||||||
|
"""Returns the display value for a choice field"""
|
||||||
|
value = getattr(self, field.name)
|
||||||
|
if field.choices and isinstance(field.choices[0], (list, tuple)):
|
||||||
|
return dict(field.choices).get(value, value)
|
||||||
|
return value
|
||||||
395
mongoengine/base/fields.py
Normal file
395
mongoengine/base/fields.py
Normal file
@@ -0,0 +1,395 @@
|
|||||||
|
import operator
|
||||||
|
import warnings
|
||||||
|
import weakref
|
||||||
|
|
||||||
|
from bson import DBRef, ObjectId
|
||||||
|
|
||||||
|
from mongoengine.common import _import_class
|
||||||
|
from mongoengine.errors import ValidationError
|
||||||
|
|
||||||
|
from mongoengine.base.common import ALLOW_INHERITANCE
|
||||||
|
from mongoengine.base.datastructures import BaseDict, BaseList
|
||||||
|
|
||||||
|
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField")
|
||||||
|
|
||||||
|
|
||||||
|
class BaseField(object):
|
||||||
|
"""A base class for fields in a MongoDB document. Instances of this class
|
||||||
|
may be added to subclasses of `Document` to define a document's schema.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.5 - added verbose and help text
|
||||||
|
"""
|
||||||
|
|
||||||
|
name = None
|
||||||
|
_geo_index = False
|
||||||
|
_auto_gen = False # Call `generate` to generate a value
|
||||||
|
_auto_dereference = True
|
||||||
|
|
||||||
|
# These track each time a Field instance is created. Used to retain order.
|
||||||
|
# The auto_creation_counter is used for fields that MongoEngine implicitly
|
||||||
|
# creates, creation_counter is used for all user-specified fields.
|
||||||
|
creation_counter = 0
|
||||||
|
auto_creation_counter = -1
|
||||||
|
|
||||||
|
def __init__(self, db_field=None, name=None, required=False, default=None,
|
||||||
|
unique=False, unique_with=None, primary_key=False,
|
||||||
|
validation=None, choices=None, verbose_name=None,
|
||||||
|
help_text=None):
|
||||||
|
self.db_field = (db_field or name) if not primary_key else '_id'
|
||||||
|
if name:
|
||||||
|
msg = "Fields' 'name' attribute deprecated in favour of 'db_field'"
|
||||||
|
warnings.warn(msg, DeprecationWarning)
|
||||||
|
self.required = required or primary_key
|
||||||
|
self.default = default
|
||||||
|
self.unique = bool(unique or unique_with)
|
||||||
|
self.unique_with = unique_with
|
||||||
|
self.primary_key = primary_key
|
||||||
|
self.validation = validation
|
||||||
|
self.choices = choices
|
||||||
|
self.verbose_name = verbose_name
|
||||||
|
self.help_text = help_text
|
||||||
|
|
||||||
|
# Adjust the appropriate creation counter, and save our local copy.
|
||||||
|
if self.db_field == '_id':
|
||||||
|
self.creation_counter = BaseField.auto_creation_counter
|
||||||
|
BaseField.auto_creation_counter -= 1
|
||||||
|
else:
|
||||||
|
self.creation_counter = BaseField.creation_counter
|
||||||
|
BaseField.creation_counter += 1
|
||||||
|
|
||||||
|
def __get__(self, instance, owner):
|
||||||
|
"""Descriptor for retrieving a value from a field in a document. Do
|
||||||
|
any necessary conversion between Python and MongoDB types.
|
||||||
|
"""
|
||||||
|
if instance is None:
|
||||||
|
# Document class being used rather than a document object
|
||||||
|
return self
|
||||||
|
# Get value from document instance if available, if not use default
|
||||||
|
value = instance._data.get(self.name)
|
||||||
|
|
||||||
|
if value is None:
|
||||||
|
value = self.default
|
||||||
|
# Allow callable default values
|
||||||
|
if callable(value):
|
||||||
|
value = value()
|
||||||
|
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||||
|
value._instance = weakref.proxy(instance)
|
||||||
|
return value
|
||||||
|
|
||||||
|
def __set__(self, instance, value):
|
||||||
|
"""Descriptor for assigning a value to a field in a document.
|
||||||
|
"""
|
||||||
|
changed = False
|
||||||
|
if (self.name not in instance._data or
|
||||||
|
instance._data[self.name] != value):
|
||||||
|
changed = True
|
||||||
|
instance._data[self.name] = value
|
||||||
|
if changed and instance._initialised:
|
||||||
|
instance._mark_as_changed(self.name)
|
||||||
|
|
||||||
|
def error(self, message="", errors=None, field_name=None):
|
||||||
|
"""Raises a ValidationError.
|
||||||
|
"""
|
||||||
|
field_name = field_name if field_name else self.name
|
||||||
|
raise ValidationError(message, errors=errors, field_name=field_name)
|
||||||
|
|
||||||
|
def to_python(self, value):
|
||||||
|
"""Convert a MongoDB-compatible type to a Python type.
|
||||||
|
"""
|
||||||
|
return value
|
||||||
|
|
||||||
|
def to_mongo(self, value):
|
||||||
|
"""Convert a Python type to a MongoDB-compatible type.
|
||||||
|
"""
|
||||||
|
return self.to_python(value)
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
"""Prepare a value that is being used in a query for PyMongo.
|
||||||
|
"""
|
||||||
|
return value
|
||||||
|
|
||||||
|
def validate(self, value, clean=True):
|
||||||
|
"""Perform validation on a value.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _validate(self, value, **kwargs):
|
||||||
|
Document = _import_class('Document')
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
# check choices
|
||||||
|
if self.choices:
|
||||||
|
is_cls = isinstance(value, (Document, EmbeddedDocument))
|
||||||
|
value_to_check = value.__class__ if is_cls else value
|
||||||
|
err_msg = 'an instance' if is_cls else 'one'
|
||||||
|
if isinstance(self.choices[0], (list, tuple)):
|
||||||
|
option_keys = [k for k, v in self.choices]
|
||||||
|
if value_to_check not in option_keys:
|
||||||
|
msg = ('Value must be %s of %s' %
|
||||||
|
(err_msg, unicode(option_keys)))
|
||||||
|
self.error(msg)
|
||||||
|
elif value_to_check not in self.choices:
|
||||||
|
msg = ('Value must be %s of %s' %
|
||||||
|
(err_msg, unicode(self.choices)))
|
||||||
|
self.error(msg)
|
||||||
|
|
||||||
|
# check validation argument
|
||||||
|
if self.validation is not None:
|
||||||
|
if callable(self.validation):
|
||||||
|
if not self.validation(value):
|
||||||
|
self.error('Value does not match custom validation method')
|
||||||
|
else:
|
||||||
|
raise ValueError('validation argument for "%s" must be a '
|
||||||
|
'callable.' % self.name)
|
||||||
|
|
||||||
|
self.validate(value, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class ComplexBaseField(BaseField):
|
||||||
|
"""Handles complex fields, such as lists / dictionaries.
|
||||||
|
|
||||||
|
Allows for nesting of embedded documents inside complex types.
|
||||||
|
Handles the lazy dereferencing of a queryset by lazily dereferencing all
|
||||||
|
items in a list / dict rather than one at a time.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
field = None
|
||||||
|
__dereference = False
|
||||||
|
|
||||||
|
def __get__(self, instance, owner):
|
||||||
|
"""Descriptor to automatically dereference references.
|
||||||
|
"""
|
||||||
|
if instance is None:
|
||||||
|
# Document class being used rather than a document object
|
||||||
|
return self
|
||||||
|
|
||||||
|
ReferenceField = _import_class('ReferenceField')
|
||||||
|
GenericReferenceField = _import_class('GenericReferenceField')
|
||||||
|
dereference = (self._auto_dereference and
|
||||||
|
(self.field is None or isinstance(self.field,
|
||||||
|
(GenericReferenceField, ReferenceField))))
|
||||||
|
|
||||||
|
self._auto_dereference = instance._fields[self.name]._auto_dereference
|
||||||
|
if not self.__dereference and instance._initialised and dereference:
|
||||||
|
instance._data[self.name] = self._dereference(
|
||||||
|
instance._data.get(self.name), max_depth=1, instance=instance,
|
||||||
|
name=self.name
|
||||||
|
)
|
||||||
|
|
||||||
|
value = super(ComplexBaseField, self).__get__(instance, owner)
|
||||||
|
|
||||||
|
# Convert lists / values so we can watch for any changes on them
|
||||||
|
if (isinstance(value, (list, tuple)) and
|
||||||
|
not isinstance(value, BaseList)):
|
||||||
|
value = BaseList(value, instance, self.name)
|
||||||
|
instance._data[self.name] = value
|
||||||
|
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||||
|
value = BaseDict(value, instance, self.name)
|
||||||
|
instance._data[self.name] = value
|
||||||
|
|
||||||
|
if (self._auto_dereference and instance._initialised and
|
||||||
|
isinstance(value, (BaseList, BaseDict))
|
||||||
|
and not value._dereferenced):
|
||||||
|
value = self._dereference(
|
||||||
|
value, max_depth=1, instance=instance, name=self.name
|
||||||
|
)
|
||||||
|
value._dereferenced = True
|
||||||
|
instance._data[self.name] = value
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
def __set__(self, instance, value):
|
||||||
|
"""Descriptor for assigning a value to a field in a document.
|
||||||
|
"""
|
||||||
|
instance._data[self.name] = value
|
||||||
|
instance._mark_as_changed(self.name)
|
||||||
|
|
||||||
|
def to_python(self, value):
|
||||||
|
"""Convert a MongoDB-compatible type to a Python type.
|
||||||
|
"""
|
||||||
|
Document = _import_class('Document')
|
||||||
|
|
||||||
|
if isinstance(value, basestring):
|
||||||
|
return value
|
||||||
|
|
||||||
|
if hasattr(value, 'to_python'):
|
||||||
|
return value.to_python()
|
||||||
|
|
||||||
|
is_list = False
|
||||||
|
if not hasattr(value, 'items'):
|
||||||
|
try:
|
||||||
|
is_list = True
|
||||||
|
value = dict([(k, v) for k, v in enumerate(value)])
|
||||||
|
except TypeError: # Not iterable return the value
|
||||||
|
return value
|
||||||
|
|
||||||
|
if self.field:
|
||||||
|
value_dict = dict([(key, self.field.to_python(item))
|
||||||
|
for key, item in value.items()])
|
||||||
|
else:
|
||||||
|
value_dict = {}
|
||||||
|
for k, v in value.items():
|
||||||
|
if isinstance(v, Document):
|
||||||
|
# We need the id from the saved object to create the DBRef
|
||||||
|
if v.pk is None:
|
||||||
|
self.error('You can only reference documents once they'
|
||||||
|
' have been saved to the database')
|
||||||
|
collection = v._get_collection_name()
|
||||||
|
value_dict[k] = DBRef(collection, v.pk)
|
||||||
|
elif hasattr(v, 'to_python'):
|
||||||
|
value_dict[k] = v.to_python()
|
||||||
|
else:
|
||||||
|
value_dict[k] = self.to_python(v)
|
||||||
|
|
||||||
|
if is_list: # Convert back to a list
|
||||||
|
return [v for k, v in sorted(value_dict.items(),
|
||||||
|
key=operator.itemgetter(0))]
|
||||||
|
return value_dict
|
||||||
|
|
||||||
|
def to_mongo(self, value):
|
||||||
|
"""Convert a Python type to a MongoDB-compatible type.
|
||||||
|
"""
|
||||||
|
Document = _import_class("Document")
|
||||||
|
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||||
|
GenericReferenceField = _import_class("GenericReferenceField")
|
||||||
|
|
||||||
|
if isinstance(value, basestring):
|
||||||
|
return value
|
||||||
|
|
||||||
|
if hasattr(value, 'to_mongo'):
|
||||||
|
if isinstance(value, Document):
|
||||||
|
return GenericReferenceField().to_mongo(value)
|
||||||
|
cls = value.__class__
|
||||||
|
val = value.to_mongo()
|
||||||
|
# If we its a document thats not inherited add _cls
|
||||||
|
if (isinstance(value, EmbeddedDocument)):
|
||||||
|
val['_cls'] = cls.__name__
|
||||||
|
return val
|
||||||
|
|
||||||
|
is_list = False
|
||||||
|
if not hasattr(value, 'items'):
|
||||||
|
try:
|
||||||
|
is_list = True
|
||||||
|
value = dict([(k, v) for k, v in enumerate(value)])
|
||||||
|
except TypeError: # Not iterable return the value
|
||||||
|
return value
|
||||||
|
|
||||||
|
if self.field:
|
||||||
|
value_dict = dict([(key, self.field.to_mongo(item))
|
||||||
|
for key, item in value.iteritems()])
|
||||||
|
else:
|
||||||
|
value_dict = {}
|
||||||
|
for k, v in value.iteritems():
|
||||||
|
if isinstance(v, Document):
|
||||||
|
# We need the id from the saved object to create the DBRef
|
||||||
|
if v.pk is None:
|
||||||
|
self.error('You can only reference documents once they'
|
||||||
|
' have been saved to the database')
|
||||||
|
|
||||||
|
# If its a document that is not inheritable it won't have
|
||||||
|
# any _cls data so make it a generic reference allows
|
||||||
|
# us to dereference
|
||||||
|
meta = getattr(v, '_meta', {})
|
||||||
|
allow_inheritance = (
|
||||||
|
meta.get('allow_inheritance', ALLOW_INHERITANCE)
|
||||||
|
== True)
|
||||||
|
if not allow_inheritance and not self.field:
|
||||||
|
value_dict[k] = GenericReferenceField().to_mongo(v)
|
||||||
|
else:
|
||||||
|
collection = v._get_collection_name()
|
||||||
|
value_dict[k] = DBRef(collection, v.pk)
|
||||||
|
elif hasattr(v, 'to_mongo'):
|
||||||
|
cls = v.__class__
|
||||||
|
val = v.to_mongo()
|
||||||
|
# If we its a document thats not inherited add _cls
|
||||||
|
if (isinstance(v, (Document, EmbeddedDocument))):
|
||||||
|
val['_cls'] = cls.__name__
|
||||||
|
value_dict[k] = val
|
||||||
|
else:
|
||||||
|
value_dict[k] = self.to_mongo(v)
|
||||||
|
|
||||||
|
if is_list: # Convert back to a list
|
||||||
|
return [v for k, v in sorted(value_dict.items(),
|
||||||
|
key=operator.itemgetter(0))]
|
||||||
|
return value_dict
|
||||||
|
|
||||||
|
def validate(self, value):
|
||||||
|
"""If field is provided ensure the value is valid.
|
||||||
|
"""
|
||||||
|
errors = {}
|
||||||
|
if self.field:
|
||||||
|
if hasattr(value, 'iteritems') or hasattr(value, 'items'):
|
||||||
|
sequence = value.iteritems()
|
||||||
|
else:
|
||||||
|
sequence = enumerate(value)
|
||||||
|
for k, v in sequence:
|
||||||
|
try:
|
||||||
|
self.field._validate(v)
|
||||||
|
except ValidationError, error:
|
||||||
|
errors[k] = error.errors or error
|
||||||
|
except (ValueError, AssertionError), error:
|
||||||
|
errors[k] = error
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
field_class = self.field.__class__.__name__
|
||||||
|
self.error('Invalid %s item (%s)' % (field_class, value),
|
||||||
|
errors=errors)
|
||||||
|
# Don't allow empty values if required
|
||||||
|
if self.required and not value:
|
||||||
|
self.error('Field is required and cannot be empty')
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
return self.to_mongo(value)
|
||||||
|
|
||||||
|
def lookup_member(self, member_name):
|
||||||
|
if self.field:
|
||||||
|
return self.field.lookup_member(member_name)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _set_owner_document(self, owner_document):
|
||||||
|
if self.field:
|
||||||
|
self.field.owner_document = owner_document
|
||||||
|
self._owner_document = owner_document
|
||||||
|
|
||||||
|
def _get_owner_document(self, owner_document):
|
||||||
|
self._owner_document = owner_document
|
||||||
|
|
||||||
|
owner_document = property(_get_owner_document, _set_owner_document)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _dereference(self,):
|
||||||
|
if not self.__dereference:
|
||||||
|
DeReference = _import_class("DeReference")
|
||||||
|
self.__dereference = DeReference() # Cached
|
||||||
|
return self.__dereference
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectIdField(BaseField):
|
||||||
|
"""A field wrapper around MongoDB's ObjectIds.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def to_python(self, value):
|
||||||
|
if not isinstance(value, ObjectId):
|
||||||
|
value = ObjectId(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
def to_mongo(self, value):
|
||||||
|
if not isinstance(value, ObjectId):
|
||||||
|
try:
|
||||||
|
return ObjectId(unicode(value))
|
||||||
|
except Exception, e:
|
||||||
|
# e.message attribute has been deprecated since Python 2.6
|
||||||
|
self.error(unicode(e))
|
||||||
|
return value
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
return self.to_mongo(value)
|
||||||
|
|
||||||
|
def validate(self, value):
|
||||||
|
try:
|
||||||
|
ObjectId(unicode(value))
|
||||||
|
except:
|
||||||
|
self.error('Invalid Object ID')
|
||||||
396
mongoengine/base/metaclasses.py
Normal file
396
mongoengine/base/metaclasses.py
Normal file
@@ -0,0 +1,396 @@
|
|||||||
|
import warnings
|
||||||
|
|
||||||
|
import pymongo
|
||||||
|
|
||||||
|
from mongoengine.common import _import_class
|
||||||
|
from mongoengine.errors import InvalidDocumentError
|
||||||
|
from mongoengine.python_support import PY3
|
||||||
|
from mongoengine.queryset import (DO_NOTHING, DoesNotExist,
|
||||||
|
MultipleObjectsReturned,
|
||||||
|
QuerySet, QuerySetManager)
|
||||||
|
|
||||||
|
from mongoengine.base.common import _document_registry, ALLOW_INHERITANCE
|
||||||
|
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
|
||||||
|
|
||||||
|
__all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass')
|
||||||
|
|
||||||
|
|
||||||
|
class DocumentMetaclass(type):
|
||||||
|
"""Metaclass for all documents.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __new__(cls, name, bases, attrs):
|
||||||
|
flattened_bases = cls._get_bases(bases)
|
||||||
|
super_new = super(DocumentMetaclass, cls).__new__
|
||||||
|
|
||||||
|
# If a base class just call super
|
||||||
|
metaclass = attrs.get('my_metaclass')
|
||||||
|
if metaclass and issubclass(metaclass, DocumentMetaclass):
|
||||||
|
return super_new(cls, name, bases, attrs)
|
||||||
|
|
||||||
|
attrs['_is_document'] = attrs.get('_is_document', False)
|
||||||
|
|
||||||
|
# EmbeddedDocuments could have meta data for inheritance
|
||||||
|
if 'meta' in attrs:
|
||||||
|
attrs['_meta'] = attrs.pop('meta')
|
||||||
|
|
||||||
|
# EmbeddedDocuments should inherit meta data
|
||||||
|
if '_meta' not in attrs:
|
||||||
|
meta = MetaDict()
|
||||||
|
for base in flattened_bases[::-1]:
|
||||||
|
# Add any mixin metadata from plain objects
|
||||||
|
if hasattr(base, 'meta'):
|
||||||
|
meta.merge(base.meta)
|
||||||
|
elif hasattr(base, '_meta'):
|
||||||
|
meta.merge(base._meta)
|
||||||
|
attrs['_meta'] = meta
|
||||||
|
|
||||||
|
# Handle document Fields
|
||||||
|
|
||||||
|
# Merge all fields from subclasses
|
||||||
|
doc_fields = {}
|
||||||
|
for base in flattened_bases[::-1]:
|
||||||
|
if hasattr(base, '_fields'):
|
||||||
|
doc_fields.update(base._fields)
|
||||||
|
|
||||||
|
# Standard object mixin - merge in any Fields
|
||||||
|
if not hasattr(base, '_meta'):
|
||||||
|
base_fields = {}
|
||||||
|
for attr_name, attr_value in base.__dict__.iteritems():
|
||||||
|
if not isinstance(attr_value, BaseField):
|
||||||
|
continue
|
||||||
|
attr_value.name = attr_name
|
||||||
|
if not attr_value.db_field:
|
||||||
|
attr_value.db_field = attr_name
|
||||||
|
base_fields[attr_name] = attr_value
|
||||||
|
|
||||||
|
doc_fields.update(base_fields)
|
||||||
|
|
||||||
|
# Discover any document fields
|
||||||
|
field_names = {}
|
||||||
|
for attr_name, attr_value in attrs.iteritems():
|
||||||
|
if not isinstance(attr_value, BaseField):
|
||||||
|
continue
|
||||||
|
attr_value.name = attr_name
|
||||||
|
if not attr_value.db_field:
|
||||||
|
attr_value.db_field = attr_name
|
||||||
|
doc_fields[attr_name] = attr_value
|
||||||
|
|
||||||
|
# Count names to ensure no db_field redefinitions
|
||||||
|
field_names[attr_value.db_field] = field_names.get(
|
||||||
|
attr_value.db_field, 0) + 1
|
||||||
|
|
||||||
|
# Ensure no duplicate db_fields
|
||||||
|
duplicate_db_fields = [k for k, v in field_names.items() if v > 1]
|
||||||
|
if duplicate_db_fields:
|
||||||
|
msg = ("Multiple db_fields defined for: %s " %
|
||||||
|
", ".join(duplicate_db_fields))
|
||||||
|
raise InvalidDocumentError(msg)
|
||||||
|
|
||||||
|
# Set _fields and db_field maps
|
||||||
|
attrs['_fields'] = doc_fields
|
||||||
|
attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k))
|
||||||
|
for k, v in doc_fields.iteritems()])
|
||||||
|
attrs['_fields_ordered'] = tuple(i[1] for i in sorted(
|
||||||
|
(v.creation_counter, v.name)
|
||||||
|
for v in doc_fields.itervalues()))
|
||||||
|
attrs['_reverse_db_field_map'] = dict(
|
||||||
|
(v, k) for k, v in attrs['_db_field_map'].iteritems())
|
||||||
|
|
||||||
|
#
|
||||||
|
# Set document hierarchy
|
||||||
|
#
|
||||||
|
superclasses = ()
|
||||||
|
class_name = [name]
|
||||||
|
for base in flattened_bases:
|
||||||
|
if (not getattr(base, '_is_base_cls', True) and
|
||||||
|
not getattr(base, '_meta', {}).get('abstract', True)):
|
||||||
|
# Collate heirarchy for _cls and _subclasses
|
||||||
|
class_name.append(base.__name__)
|
||||||
|
|
||||||
|
if hasattr(base, '_meta'):
|
||||||
|
# Warn if allow_inheritance isn't set and prevent
|
||||||
|
# inheritance of classes where inheritance is set to False
|
||||||
|
allow_inheritance = base._meta.get('allow_inheritance',
|
||||||
|
ALLOW_INHERITANCE)
|
||||||
|
if (allow_inheritance is not True and
|
||||||
|
not base._meta.get('abstract')):
|
||||||
|
raise ValueError('Document %s may not be subclassed' %
|
||||||
|
base.__name__)
|
||||||
|
|
||||||
|
# Get superclasses from last base superclass
|
||||||
|
document_bases = [b for b in flattened_bases
|
||||||
|
if hasattr(b, '_class_name')]
|
||||||
|
if document_bases:
|
||||||
|
superclasses = document_bases[0]._superclasses
|
||||||
|
superclasses += (document_bases[0]._class_name, )
|
||||||
|
|
||||||
|
_cls = '.'.join(reversed(class_name))
|
||||||
|
attrs['_class_name'] = _cls
|
||||||
|
attrs['_superclasses'] = superclasses
|
||||||
|
attrs['_subclasses'] = (_cls, )
|
||||||
|
attrs['_types'] = attrs['_subclasses'] # TODO depreciate _types
|
||||||
|
|
||||||
|
# Create the new_class
|
||||||
|
new_class = super_new(cls, name, bases, attrs)
|
||||||
|
|
||||||
|
# Set _subclasses
|
||||||
|
for base in document_bases:
|
||||||
|
if _cls not in base._subclasses:
|
||||||
|
base._subclasses += (_cls,)
|
||||||
|
base._types = base._subclasses # TODO depreciate _types
|
||||||
|
|
||||||
|
# Handle delete rules
|
||||||
|
Document, EmbeddedDocument, DictField = cls._import_classes()
|
||||||
|
for field in new_class._fields.itervalues():
|
||||||
|
f = field
|
||||||
|
f.owner_document = new_class
|
||||||
|
delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING)
|
||||||
|
if isinstance(f, ComplexBaseField) and hasattr(f, 'field'):
|
||||||
|
delete_rule = getattr(f.field,
|
||||||
|
'reverse_delete_rule',
|
||||||
|
DO_NOTHING)
|
||||||
|
if isinstance(f, DictField) and delete_rule != DO_NOTHING:
|
||||||
|
msg = ("Reverse delete rules are not supported "
|
||||||
|
"for %s (field: %s)" %
|
||||||
|
(field.__class__.__name__, field.name))
|
||||||
|
raise InvalidDocumentError(msg)
|
||||||
|
|
||||||
|
f = field.field
|
||||||
|
|
||||||
|
if delete_rule != DO_NOTHING:
|
||||||
|
if issubclass(new_class, EmbeddedDocument):
|
||||||
|
msg = ("Reverse delete rules are not supported for "
|
||||||
|
"EmbeddedDocuments (field: %s)" % field.name)
|
||||||
|
raise InvalidDocumentError(msg)
|
||||||
|
f.document_type.register_delete_rule(new_class,
|
||||||
|
field.name, delete_rule)
|
||||||
|
|
||||||
|
if (field.name and hasattr(Document, field.name) and
|
||||||
|
EmbeddedDocument not in new_class.mro()):
|
||||||
|
msg = ("%s is a document method and not a valid "
|
||||||
|
"field name" % field.name)
|
||||||
|
raise InvalidDocumentError(msg)
|
||||||
|
|
||||||
|
if issubclass(new_class, Document):
|
||||||
|
new_class._collection = None
|
||||||
|
|
||||||
|
# Add class to the _document_registry
|
||||||
|
_document_registry[new_class._class_name] = new_class
|
||||||
|
|
||||||
|
# In Python 2, User-defined methods objects have special read-only
|
||||||
|
# attributes 'im_func' and 'im_self' which contain the function obj
|
||||||
|
# and class instance object respectively. With Python 3 these special
|
||||||
|
# attributes have been replaced by __func__ and __self__. The Blinker
|
||||||
|
# module continues to use im_func and im_self, so the code below
|
||||||
|
# copies __func__ into im_func and __self__ into im_self for
|
||||||
|
# classmethod objects in Document derived classes.
|
||||||
|
if PY3:
|
||||||
|
for key, val in new_class.__dict__.items():
|
||||||
|
if isinstance(val, classmethod):
|
||||||
|
f = val.__get__(new_class)
|
||||||
|
if hasattr(f, '__func__') and not hasattr(f, 'im_func'):
|
||||||
|
f.__dict__.update({'im_func': getattr(f, '__func__')})
|
||||||
|
if hasattr(f, '__self__') and not hasattr(f, 'im_self'):
|
||||||
|
f.__dict__.update({'im_self': getattr(f, '__self__')})
|
||||||
|
|
||||||
|
return new_class
|
||||||
|
|
||||||
|
def add_to_class(self, name, value):
|
||||||
|
setattr(self, name, value)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_bases(cls, bases):
|
||||||
|
if isinstance(bases, BasesTuple):
|
||||||
|
return bases
|
||||||
|
seen = []
|
||||||
|
bases = cls.__get_bases(bases)
|
||||||
|
unique_bases = (b for b in bases if not (b in seen or seen.append(b)))
|
||||||
|
return BasesTuple(unique_bases)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __get_bases(cls, bases):
|
||||||
|
for base in bases:
|
||||||
|
if base is object:
|
||||||
|
continue
|
||||||
|
yield base
|
||||||
|
for child_base in cls.__get_bases(base.__bases__):
|
||||||
|
yield child_base
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _import_classes(cls):
|
||||||
|
Document = _import_class('Document')
|
||||||
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
|
DictField = _import_class('DictField')
|
||||||
|
return (Document, EmbeddedDocument, DictField)
|
||||||
|
|
||||||
|
|
||||||
|
class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||||
|
"""Metaclass for top-level documents (i.e. documents that have their own
|
||||||
|
collection in the database.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __new__(cls, name, bases, attrs):
|
||||||
|
flattened_bases = cls._get_bases(bases)
|
||||||
|
super_new = super(TopLevelDocumentMetaclass, cls).__new__
|
||||||
|
|
||||||
|
# Set default _meta data if base class, otherwise get user defined meta
|
||||||
|
if (attrs.get('my_metaclass') == TopLevelDocumentMetaclass):
|
||||||
|
# defaults
|
||||||
|
attrs['_meta'] = {
|
||||||
|
'abstract': True,
|
||||||
|
'max_documents': None,
|
||||||
|
'max_size': None,
|
||||||
|
'ordering': [], # default ordering applied at runtime
|
||||||
|
'indexes': [], # indexes to be ensured at runtime
|
||||||
|
'id_field': None,
|
||||||
|
'index_background': False,
|
||||||
|
'index_drop_dups': False,
|
||||||
|
'index_opts': None,
|
||||||
|
'delete_rules': None,
|
||||||
|
'allow_inheritance': None,
|
||||||
|
}
|
||||||
|
attrs['_is_base_cls'] = True
|
||||||
|
attrs['_meta'].update(attrs.get('meta', {}))
|
||||||
|
else:
|
||||||
|
attrs['_meta'] = attrs.get('meta', {})
|
||||||
|
# Explictly set abstract to false unless set
|
||||||
|
attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False)
|
||||||
|
attrs['_is_base_cls'] = False
|
||||||
|
|
||||||
|
# Set flag marking as document class - as opposed to an object mixin
|
||||||
|
attrs['_is_document'] = True
|
||||||
|
|
||||||
|
# Ensure queryset_class is inherited
|
||||||
|
if 'objects' in attrs:
|
||||||
|
manager = attrs['objects']
|
||||||
|
if hasattr(manager, 'queryset_class'):
|
||||||
|
attrs['_meta']['queryset_class'] = manager.queryset_class
|
||||||
|
|
||||||
|
# Clean up top level meta
|
||||||
|
if 'meta' in attrs:
|
||||||
|
del(attrs['meta'])
|
||||||
|
|
||||||
|
# Find the parent document class
|
||||||
|
parent_doc_cls = [b for b in flattened_bases
|
||||||
|
if b.__class__ == TopLevelDocumentMetaclass]
|
||||||
|
parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0]
|
||||||
|
|
||||||
|
# Prevent classes setting collection different to their parents
|
||||||
|
# If parent wasn't an abstract class
|
||||||
|
if (parent_doc_cls and 'collection' in attrs.get('_meta', {})
|
||||||
|
and not parent_doc_cls._meta.get('abstract', True)):
|
||||||
|
msg = "Trying to set a collection on a subclass (%s)" % name
|
||||||
|
warnings.warn(msg, SyntaxWarning)
|
||||||
|
del(attrs['_meta']['collection'])
|
||||||
|
|
||||||
|
# Ensure abstract documents have abstract bases
|
||||||
|
if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
|
||||||
|
if (parent_doc_cls and
|
||||||
|
not parent_doc_cls._meta.get('abstract', False)):
|
||||||
|
msg = "Abstract document cannot have non-abstract base"
|
||||||
|
raise ValueError(msg)
|
||||||
|
return super_new(cls, name, bases, attrs)
|
||||||
|
|
||||||
|
# Merge base class metas.
|
||||||
|
# Uses a special MetaDict that handles various merging rules
|
||||||
|
meta = MetaDict()
|
||||||
|
for base in flattened_bases[::-1]:
|
||||||
|
# Add any mixin metadata from plain objects
|
||||||
|
if hasattr(base, 'meta'):
|
||||||
|
meta.merge(base.meta)
|
||||||
|
elif hasattr(base, '_meta'):
|
||||||
|
meta.merge(base._meta)
|
||||||
|
|
||||||
|
# Set collection in the meta if its callable
|
||||||
|
if (getattr(base, '_is_document', False) and
|
||||||
|
not base._meta.get('abstract')):
|
||||||
|
collection = meta.get('collection', None)
|
||||||
|
if callable(collection):
|
||||||
|
meta['collection'] = collection(base)
|
||||||
|
|
||||||
|
meta.merge(attrs.get('_meta', {})) # Top level meta
|
||||||
|
|
||||||
|
# Only simple classes (direct subclasses of Document)
|
||||||
|
# may set allow_inheritance to False
|
||||||
|
simple_class = all([b._meta.get('abstract')
|
||||||
|
for b in flattened_bases if hasattr(b, '_meta')])
|
||||||
|
if (not simple_class and meta['allow_inheritance'] == False and
|
||||||
|
not meta['abstract']):
|
||||||
|
raise ValueError('Only direct subclasses of Document may set '
|
||||||
|
'"allow_inheritance" to False')
|
||||||
|
|
||||||
|
# Set default collection name
|
||||||
|
if 'collection' not in meta:
|
||||||
|
meta['collection'] = ''.join('_%s' % c if c.isupper() else c
|
||||||
|
for c in name).strip('_').lower()
|
||||||
|
attrs['_meta'] = meta
|
||||||
|
|
||||||
|
# Call super and get the new class
|
||||||
|
new_class = super_new(cls, name, bases, attrs)
|
||||||
|
|
||||||
|
meta = new_class._meta
|
||||||
|
|
||||||
|
# Set index specifications
|
||||||
|
meta['index_specs'] = new_class._build_index_specs(meta['indexes'])
|
||||||
|
|
||||||
|
# If collection is a callable - call it and set the value
|
||||||
|
collection = meta.get('collection')
|
||||||
|
if callable(collection):
|
||||||
|
new_class._meta['collection'] = collection(new_class)
|
||||||
|
|
||||||
|
# Provide a default queryset unless one has been set
|
||||||
|
manager = attrs.get('objects', QuerySetManager())
|
||||||
|
new_class.objects = manager
|
||||||
|
|
||||||
|
# Validate the fields and set primary key if needed
|
||||||
|
for field_name, field in new_class._fields.iteritems():
|
||||||
|
if field.primary_key:
|
||||||
|
# Ensure only one primary key is set
|
||||||
|
current_pk = new_class._meta.get('id_field')
|
||||||
|
if current_pk and current_pk != field_name:
|
||||||
|
raise ValueError('Cannot override primary key field')
|
||||||
|
|
||||||
|
# Set primary key
|
||||||
|
if not current_pk:
|
||||||
|
new_class._meta['id_field'] = field_name
|
||||||
|
new_class.id = field
|
||||||
|
|
||||||
|
# Set primary key if not defined by the document
|
||||||
|
if not new_class._meta.get('id_field'):
|
||||||
|
new_class._meta['id_field'] = 'id'
|
||||||
|
new_class._fields['id'] = ObjectIdField(db_field='_id')
|
||||||
|
new_class._fields['id'].name = 'id'
|
||||||
|
new_class.id = new_class._fields['id']
|
||||||
|
|
||||||
|
# Merge in exceptions with parent hierarchy
|
||||||
|
exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned)
|
||||||
|
module = attrs.get('__module__')
|
||||||
|
for exc in exceptions_to_merge:
|
||||||
|
name = exc.__name__
|
||||||
|
parents = tuple(getattr(base, name) for base in flattened_bases
|
||||||
|
if hasattr(base, name)) or (exc,)
|
||||||
|
# Create new exception and set to new_class
|
||||||
|
exception = type(name, parents, {'__module__': module})
|
||||||
|
setattr(new_class, name, exception)
|
||||||
|
|
||||||
|
return new_class
|
||||||
|
|
||||||
|
|
||||||
|
class MetaDict(dict):
|
||||||
|
"""Custom dictionary for meta classes.
|
||||||
|
Handles the merging of set indexes
|
||||||
|
"""
|
||||||
|
_merge_options = ('indexes',)
|
||||||
|
|
||||||
|
def merge(self, new_options):
|
||||||
|
for k, v in new_options.iteritems():
|
||||||
|
if k in self._merge_options:
|
||||||
|
self[k] = self.get(k, []) + v
|
||||||
|
else:
|
||||||
|
self[k] = v
|
||||||
|
|
||||||
|
|
||||||
|
class BasesTuple(tuple):
|
||||||
|
"""Special class to handle introspection of bases tuple in __new__"""
|
||||||
|
pass
|
||||||
36
mongoengine/common.py
Normal file
36
mongoengine/common.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
_class_registry_cache = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _import_class(cls_name):
|
||||||
|
"""Cached mechanism for imports"""
|
||||||
|
if cls_name in _class_registry_cache:
|
||||||
|
return _class_registry_cache.get(cls_name)
|
||||||
|
|
||||||
|
doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument',
|
||||||
|
'MapReduceDocument')
|
||||||
|
field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField',
|
||||||
|
'FileField', 'GenericReferenceField',
|
||||||
|
'GenericEmbeddedDocumentField', 'GeoPointField',
|
||||||
|
'ReferenceField', 'StringField', 'ComplexBaseField')
|
||||||
|
queryset_classes = ('OperationError',)
|
||||||
|
deref_classes = ('DeReference',)
|
||||||
|
|
||||||
|
if cls_name in doc_classes:
|
||||||
|
from mongoengine import document as module
|
||||||
|
import_classes = doc_classes
|
||||||
|
elif cls_name in field_classes:
|
||||||
|
from mongoengine import fields as module
|
||||||
|
import_classes = field_classes
|
||||||
|
elif cls_name in queryset_classes:
|
||||||
|
from mongoengine import queryset as module
|
||||||
|
import_classes = queryset_classes
|
||||||
|
elif cls_name in deref_classes:
|
||||||
|
from mongoengine import dereference as module
|
||||||
|
import_classes = deref_classes
|
||||||
|
else:
|
||||||
|
raise ValueError('No import set for: ' % cls_name)
|
||||||
|
|
||||||
|
for cls in import_classes:
|
||||||
|
_class_registry_cache[cls] = getattr(module, cls)
|
||||||
|
|
||||||
|
return _class_registry_cache.get(cls_name)
|
||||||
@@ -28,8 +28,10 @@ def register_connection(alias, name, host='localhost', port=27017,
|
|||||||
:param name: the name of the specific database to use
|
:param name: the name of the specific database to use
|
||||||
:param host: the host name of the :program:`mongod` instance to connect to
|
:param host: the host name of the :program:`mongod` instance to connect to
|
||||||
:param port: the port that the :program:`mongod` instance is running on
|
:param port: the port that the :program:`mongod` instance is running on
|
||||||
:param is_slave: whether the connection can act as a slave ** Depreciated pymongo 2.0.1+
|
:param is_slave: whether the connection can act as a slave
|
||||||
:param read_preference: The read preference for the collection ** Added pymongo 2.1
|
** Depreciated pymongo 2.0.1+
|
||||||
|
:param read_preference: The read preference for the collection
|
||||||
|
** Added pymongo 2.1
|
||||||
:param slaves: a list of aliases of slave connections; each of these must
|
:param slaves: a list of aliases of slave connections; each of these must
|
||||||
be a registered connection that has :attr:`is_slave` set to ``True``
|
be a registered connection that has :attr:`is_slave` set to ``True``
|
||||||
:param username: username to authenticate with
|
:param username: username to authenticate with
|
||||||
@@ -161,6 +163,7 @@ def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
|||||||
|
|
||||||
return get_connection(alias)
|
return get_connection(alias)
|
||||||
|
|
||||||
|
|
||||||
# Support old naming convention
|
# Support old naming convention
|
||||||
_get_connection = get_connection
|
_get_connection = get_connection
|
||||||
_get_db = get_db
|
_get_db = get_db
|
||||||
|
|||||||
194
mongoengine/context_managers.py
Normal file
194
mongoengine/context_managers.py
Normal file
@@ -0,0 +1,194 @@
|
|||||||
|
from mongoengine.common import _import_class
|
||||||
|
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||||
|
from mongoengine.queryset import OperationError, QuerySet
|
||||||
|
|
||||||
|
__all__ = ("switch_db", "switch_collection", "no_dereference", "query_counter")
|
||||||
|
|
||||||
|
|
||||||
|
class switch_db(object):
|
||||||
|
""" switch_db alias context manager.
|
||||||
|
|
||||||
|
Example ::
|
||||||
|
|
||||||
|
# Register connections
|
||||||
|
register_connection('default', 'mongoenginetest')
|
||||||
|
register_connection('testdb-1', 'mongoenginetest2')
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Group(name="test").save() # Saves in the default db
|
||||||
|
|
||||||
|
with switch_db(Group, 'testdb-1') as Group:
|
||||||
|
Group(name="hello testdb!").save() # Saves in testdb-1
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cls, db_alias):
|
||||||
|
""" Construct the switch_db context manager
|
||||||
|
|
||||||
|
:param cls: the class to change the registered db
|
||||||
|
:param db_alias: the name of the specific database to use
|
||||||
|
"""
|
||||||
|
self.cls = cls
|
||||||
|
self.collection = cls._get_collection()
|
||||||
|
self.db_alias = db_alias
|
||||||
|
self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME)
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
""" change the db_alias and clear the cached collection """
|
||||||
|
self.cls._meta["db_alias"] = self.db_alias
|
||||||
|
self.cls._collection = None
|
||||||
|
return self.cls
|
||||||
|
|
||||||
|
def __exit__(self, t, value, traceback):
|
||||||
|
""" Reset the db_alias and collection """
|
||||||
|
self.cls._meta["db_alias"] = self.ori_db_alias
|
||||||
|
self.cls._collection = self.collection
|
||||||
|
|
||||||
|
|
||||||
|
class switch_collection(object):
|
||||||
|
""" switch_collection alias context manager.
|
||||||
|
|
||||||
|
Example ::
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Group(name="test").save() # Saves in the default db
|
||||||
|
|
||||||
|
with switch_collection(Group, 'group1') as Group:
|
||||||
|
Group(name="hello testdb!").save() # Saves in group1 collection
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cls, collection_name):
|
||||||
|
""" Construct the switch_collection context manager
|
||||||
|
|
||||||
|
:param cls: the class to change the registered db
|
||||||
|
:param collection_name: the name of the collection to use
|
||||||
|
"""
|
||||||
|
self.cls = cls
|
||||||
|
self.ori_collection = cls._get_collection()
|
||||||
|
self.ori_get_collection_name = cls._get_collection_name
|
||||||
|
self.collection_name = collection_name
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
""" change the _get_collection_name and clear the cached collection """
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_collection_name(cls):
|
||||||
|
return self.collection_name
|
||||||
|
|
||||||
|
self.cls._get_collection_name = _get_collection_name
|
||||||
|
self.cls._collection = None
|
||||||
|
return self.cls
|
||||||
|
|
||||||
|
def __exit__(self, t, value, traceback):
|
||||||
|
""" Reset the collection """
|
||||||
|
self.cls._collection = self.ori_collection
|
||||||
|
self.cls._get_collection_name = self.ori_get_collection_name
|
||||||
|
|
||||||
|
|
||||||
|
class no_dereference(object):
|
||||||
|
""" no_dereference context manager.
|
||||||
|
|
||||||
|
Turns off all dereferencing in Documents for the duration of the context
|
||||||
|
manager::
|
||||||
|
|
||||||
|
with no_dereference(Group) as Group:
|
||||||
|
Group.objects.find()
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cls):
|
||||||
|
""" Construct the no_dereference context manager.
|
||||||
|
|
||||||
|
:param cls: the class to turn dereferencing off on
|
||||||
|
"""
|
||||||
|
self.cls = cls
|
||||||
|
|
||||||
|
ReferenceField = _import_class('ReferenceField')
|
||||||
|
GenericReferenceField = _import_class('GenericReferenceField')
|
||||||
|
ComplexBaseField = _import_class('ComplexBaseField')
|
||||||
|
|
||||||
|
self.deref_fields = [k for k, v in self.cls._fields.iteritems()
|
||||||
|
if isinstance(v, (ReferenceField,
|
||||||
|
GenericReferenceField,
|
||||||
|
ComplexBaseField))]
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
""" change the objects default and _auto_dereference values"""
|
||||||
|
for field in self.deref_fields:
|
||||||
|
self.cls._fields[field]._auto_dereference = False
|
||||||
|
return self.cls
|
||||||
|
|
||||||
|
def __exit__(self, t, value, traceback):
|
||||||
|
""" Reset the default and _auto_dereference values"""
|
||||||
|
for field in self.deref_fields:
|
||||||
|
self.cls._fields[field]._auto_dereference = True
|
||||||
|
return self.cls
|
||||||
|
|
||||||
|
|
||||||
|
class QuerySetNoDeRef(QuerySet):
|
||||||
|
"""Special no_dereference QuerySet"""
|
||||||
|
def __dereference(items, max_depth=1, instance=None, name=None):
|
||||||
|
return items
|
||||||
|
|
||||||
|
|
||||||
|
class query_counter(object):
|
||||||
|
""" Query_counter context manager to get the number of queries. """
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
""" Construct the query_counter. """
|
||||||
|
self.counter = 0
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
""" On every with block we need to drop the profile collection. """
|
||||||
|
self.db.set_profiling_level(0)
|
||||||
|
self.db.system.profile.drop()
|
||||||
|
self.db.set_profiling_level(2)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, t, value, traceback):
|
||||||
|
""" Reset the profiling level. """
|
||||||
|
self.db.set_profiling_level(0)
|
||||||
|
|
||||||
|
def __eq__(self, value):
|
||||||
|
""" == Compare querycounter. """
|
||||||
|
return value == self._get_count()
|
||||||
|
|
||||||
|
def __ne__(self, value):
|
||||||
|
""" != Compare querycounter. """
|
||||||
|
return not self.__eq__(value)
|
||||||
|
|
||||||
|
def __lt__(self, value):
|
||||||
|
""" < Compare querycounter. """
|
||||||
|
return self._get_count() < value
|
||||||
|
|
||||||
|
def __le__(self, value):
|
||||||
|
""" <= Compare querycounter. """
|
||||||
|
return self._get_count() <= value
|
||||||
|
|
||||||
|
def __gt__(self, value):
|
||||||
|
""" > Compare querycounter. """
|
||||||
|
return self._get_count() > value
|
||||||
|
|
||||||
|
def __ge__(self, value):
|
||||||
|
""" >= Compare querycounter. """
|
||||||
|
return self._get_count() >= value
|
||||||
|
|
||||||
|
def __int__(self):
|
||||||
|
""" int representation. """
|
||||||
|
return self._get_count()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
""" repr query_counter as the number of queries. """
|
||||||
|
return u"%s" % self._get_count()
|
||||||
|
|
||||||
|
def _get_count(self):
|
||||||
|
""" Get the number of queries. """
|
||||||
|
count = self.db.system.profile.find().count() - self.counter
|
||||||
|
self.counter += 1
|
||||||
|
return count
|
||||||
@@ -31,15 +31,34 @@ class DeReference(object):
|
|||||||
items = [i for i in items]
|
items = [i for i in items]
|
||||||
|
|
||||||
self.max_depth = max_depth
|
self.max_depth = max_depth
|
||||||
|
|
||||||
doc_type = None
|
doc_type = None
|
||||||
if instance and instance._fields:
|
|
||||||
doc_type = instance._fields[name].field
|
if instance and isinstance(instance, (Document, TopLevelDocumentMetaclass)):
|
||||||
|
doc_type = instance._fields.get(name)
|
||||||
|
if hasattr(doc_type, 'field'):
|
||||||
|
doc_type = doc_type.field
|
||||||
|
|
||||||
if isinstance(doc_type, ReferenceField):
|
if isinstance(doc_type, ReferenceField):
|
||||||
|
field = doc_type
|
||||||
doc_type = doc_type.document_type
|
doc_type = doc_type.document_type
|
||||||
if all([i.__class__ == doc_type for i in items]):
|
is_list = not hasattr(items, 'items')
|
||||||
|
|
||||||
|
if is_list and all([i.__class__ == doc_type for i in items]):
|
||||||
return items
|
return items
|
||||||
|
elif not is_list and all([i.__class__ == doc_type
|
||||||
|
for i in items.values()]):
|
||||||
|
return items
|
||||||
|
elif not field.dbref:
|
||||||
|
if not hasattr(items, 'items'):
|
||||||
|
items = [field.to_python(v)
|
||||||
|
if not isinstance(v, (DBRef, Document)) else v
|
||||||
|
for v in items]
|
||||||
|
else:
|
||||||
|
items = dict([
|
||||||
|
(k, field.to_python(v))
|
||||||
|
if not isinstance(v, (DBRef, Document)) else (k, v)
|
||||||
|
for k, v in items.iteritems()]
|
||||||
|
)
|
||||||
|
|
||||||
self.reference_map = self._find_references(items)
|
self.reference_map = self._find_references(items)
|
||||||
self.object_map = self._fetch_objects(doc_type=doc_type)
|
self.object_map = self._fetch_objects(doc_type=doc_type)
|
||||||
@@ -65,7 +84,7 @@ class DeReference(object):
|
|||||||
# Recursively find dbreferences
|
# Recursively find dbreferences
|
||||||
depth += 1
|
depth += 1
|
||||||
for k, item in iterator:
|
for k, item in iterator:
|
||||||
if hasattr(item, '_fields'):
|
if isinstance(item, Document):
|
||||||
for field_name, field in item._fields.iteritems():
|
for field_name, field in item._fields.iteritems():
|
||||||
v = item._data.get(field_name, None)
|
v = item._data.get(field_name, None)
|
||||||
if isinstance(v, (DBRef)):
|
if isinstance(v, (DBRef)):
|
||||||
@@ -96,13 +115,16 @@ class DeReference(object):
|
|||||||
object_map = {}
|
object_map = {}
|
||||||
for col, dbrefs in self.reference_map.iteritems():
|
for col, dbrefs in self.reference_map.iteritems():
|
||||||
keys = object_map.keys()
|
keys = object_map.keys()
|
||||||
refs = list(set([dbref for dbref in dbrefs if str(dbref) not in keys]))
|
refs = list(set([dbref for dbref in dbrefs if unicode(dbref).encode('utf-8') not in keys]))
|
||||||
if hasattr(col, 'objects'): # We have a document class for the refs
|
if hasattr(col, 'objects'): # We have a document class for the refs
|
||||||
references = col.objects.in_bulk(refs)
|
references = col.objects.in_bulk(refs)
|
||||||
for key, doc in references.iteritems():
|
for key, doc in references.iteritems():
|
||||||
object_map[key] = doc
|
object_map[key] = doc
|
||||||
else: # Generic reference: use the refs data to convert to document
|
else: # Generic reference: use the refs data to convert to document
|
||||||
if doc_type and not isinstance(doc_type, (ListField, DictField, MapField,) ):
|
if isinstance(doc_type, (ListField, DictField, MapField,)):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if doc_type:
|
||||||
references = doc_type._get_db()[col].find({'_id': {'$in': refs}})
|
references = doc_type._get_db()[col].find({'_id': {'$in': refs}})
|
||||||
for ref in references:
|
for ref in references:
|
||||||
doc = doc_type._from_son(ref)
|
doc = doc_type._from_son(ref)
|
||||||
@@ -145,13 +167,14 @@ class DeReference(object):
|
|||||||
if isinstance(items, (dict, SON)):
|
if isinstance(items, (dict, SON)):
|
||||||
if '_ref' in items:
|
if '_ref' in items:
|
||||||
return self.object_map.get(items['_ref'].id, items)
|
return self.object_map.get(items['_ref'].id, items)
|
||||||
elif '_types' in items and '_cls' in items:
|
elif '_cls' in items:
|
||||||
doc = get_document(items['_cls'])._from_son(items)
|
doc = get_document(items['_cls'])._from_son(items)
|
||||||
doc._data = self._attach_objects(doc._data, depth, doc, name)
|
doc._data = self._attach_objects(doc._data, depth, doc, None)
|
||||||
return doc
|
return doc
|
||||||
|
|
||||||
if not hasattr(items, 'items'):
|
if not hasattr(items, 'items'):
|
||||||
is_list = True
|
is_list = True
|
||||||
|
as_tuple = isinstance(items, tuple)
|
||||||
iterator = enumerate(items)
|
iterator = enumerate(items)
|
||||||
data = []
|
data = []
|
||||||
else:
|
else:
|
||||||
@@ -166,9 +189,9 @@ class DeReference(object):
|
|||||||
else:
|
else:
|
||||||
data[k] = v
|
data[k] = v
|
||||||
|
|
||||||
if k in self.object_map:
|
if k in self.object_map and not is_list:
|
||||||
data[k] = self.object_map[k]
|
data[k] = self.object_map[k]
|
||||||
elif hasattr(v, '_fields'):
|
elif isinstance(v, Document):
|
||||||
for field_name, field in v._fields.iteritems():
|
for field_name, field in v._fields.iteritems():
|
||||||
v = data[k]._data.get(field_name, None)
|
v = data[k]._data.get(field_name, None)
|
||||||
if isinstance(v, (DBRef)):
|
if isinstance(v, (DBRef)):
|
||||||
@@ -186,7 +209,7 @@ class DeReference(object):
|
|||||||
|
|
||||||
if instance and name:
|
if instance and name:
|
||||||
if is_list:
|
if is_list:
|
||||||
return BaseList(data, instance, name)
|
return tuple(data) if as_tuple else BaseList(data, instance, name)
|
||||||
return BaseDict(data, instance, name)
|
return BaseDict(data, instance, name)
|
||||||
depth += 1
|
depth += 1
|
||||||
return data
|
return data
|
||||||
|
|||||||
@@ -1,8 +1,11 @@
|
|||||||
import datetime
|
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
|
|
||||||
from django.utils.encoding import smart_str
|
from django.utils.encoding import smart_str
|
||||||
|
from django.contrib.auth.models import _user_get_all_permissions
|
||||||
|
from django.contrib.auth.models import _user_has_perm
|
||||||
|
from django.db import models
|
||||||
|
from django.contrib.contenttypes.models import ContentTypeManager
|
||||||
|
from django.contrib import auth
|
||||||
from django.contrib.auth.models import AnonymousUser
|
from django.contrib.auth.models import AnonymousUser
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
@@ -31,9 +34,150 @@ except ImportError:
|
|||||||
hash = get_hexdigest(algo, salt, raw_password)
|
hash = get_hexdigest(algo, salt, raw_password)
|
||||||
return '%s$%s$%s' % (algo, salt, hash)
|
return '%s$%s$%s' % (algo, salt, hash)
|
||||||
|
|
||||||
|
from .utils import datetime_now
|
||||||
|
|
||||||
REDIRECT_FIELD_NAME = 'next'
|
REDIRECT_FIELD_NAME = 'next'
|
||||||
|
|
||||||
|
class ContentType(Document):
|
||||||
|
name = StringField(max_length=100)
|
||||||
|
app_label = StringField(max_length=100)
|
||||||
|
model = StringField(max_length=100, verbose_name=_('python model class name'),
|
||||||
|
unique_with='app_label')
|
||||||
|
objects = ContentTypeManager()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _('content type')
|
||||||
|
verbose_name_plural = _('content types')
|
||||||
|
# db_table = 'django_content_type'
|
||||||
|
# ordering = ('name',)
|
||||||
|
# unique_together = (('app_label', 'model'),)
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
def model_class(self):
|
||||||
|
"Returns the Python model class for this type of content."
|
||||||
|
from django.db import models
|
||||||
|
return models.get_model(self.app_label, self.model)
|
||||||
|
|
||||||
|
def get_object_for_this_type(self, **kwargs):
|
||||||
|
"""
|
||||||
|
Returns an object of this type for the keyword arguments given.
|
||||||
|
Basically, this is a proxy around this object_type's get_object() model
|
||||||
|
method. The ObjectNotExist exception, if thrown, will not be caught,
|
||||||
|
so code that calls this method should catch it.
|
||||||
|
"""
|
||||||
|
return self.model_class()._default_manager.using(self._state.db).get(**kwargs)
|
||||||
|
|
||||||
|
def natural_key(self):
|
||||||
|
return (self.app_label, self.model)
|
||||||
|
|
||||||
|
class SiteProfileNotAvailable(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class PermissionManager(models.Manager):
|
||||||
|
def get_by_natural_key(self, codename, app_label, model):
|
||||||
|
return self.get(
|
||||||
|
codename=codename,
|
||||||
|
content_type=ContentType.objects.get_by_natural_key(app_label, model)
|
||||||
|
)
|
||||||
|
|
||||||
|
class Permission(Document):
|
||||||
|
"""The permissions system provides a way to assign permissions to specific users and groups of users.
|
||||||
|
|
||||||
|
The permission system is used by the Django admin site, but may also be useful in your own code. The Django admin site uses permissions as follows:
|
||||||
|
|
||||||
|
- The "add" permission limits the user's ability to view the "add" form and add an object.
|
||||||
|
- The "change" permission limits a user's ability to view the change list, view the "change" form and change an object.
|
||||||
|
- The "delete" permission limits the ability to delete an object.
|
||||||
|
|
||||||
|
Permissions are set globally per type of object, not per specific object instance. It is possible to say "Mary may change news stories," but it's not currently possible to say "Mary may change news stories, but only the ones she created herself" or "Mary may only change news stories that have a certain status or publication date."
|
||||||
|
|
||||||
|
Three basic permissions -- add, change and delete -- are automatically created for each Django model.
|
||||||
|
"""
|
||||||
|
name = StringField(max_length=50, verbose_name=_('username'))
|
||||||
|
content_type = ReferenceField(ContentType)
|
||||||
|
codename = StringField(max_length=100, verbose_name=_('codename'))
|
||||||
|
# FIXME: don't access field of the other class
|
||||||
|
# unique_with=['content_type__app_label', 'content_type__model'])
|
||||||
|
|
||||||
|
objects = PermissionManager()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _('permission')
|
||||||
|
verbose_name_plural = _('permissions')
|
||||||
|
# unique_together = (('content_type', 'codename'),)
|
||||||
|
# ordering = ('content_type__app_label', 'content_type__model', 'codename')
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return u"%s | %s | %s" % (
|
||||||
|
unicode(self.content_type.app_label),
|
||||||
|
unicode(self.content_type),
|
||||||
|
unicode(self.name))
|
||||||
|
|
||||||
|
def natural_key(self):
|
||||||
|
return (self.codename,) + self.content_type.natural_key()
|
||||||
|
natural_key.dependencies = ['contenttypes.contenttype']
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
"""Groups are a generic way of categorizing users to apply permissions, or some other label, to those users. A user can belong to any number of groups.
|
||||||
|
|
||||||
|
A user in a group automatically has all the permissions granted to that group. For example, if the group Site editors has the permission can_edit_home_page, any user in that group will have that permission.
|
||||||
|
|
||||||
|
Beyond permissions, groups are a convenient way to categorize users to apply some label, or extended functionality, to them. For example, you could create a group 'Special users', and you could write code that would do special things to those users -- such as giving them access to a members-only portion of your site, or sending them members-only e-mail messages.
|
||||||
|
"""
|
||||||
|
name = StringField(max_length=80, unique=True, verbose_name=_('name'))
|
||||||
|
# permissions = models.ManyToManyField(Permission, verbose_name=_('permissions'), blank=True)
|
||||||
|
permissions = ListField(ReferenceField(Permission, verbose_name=_('permissions'), required=False))
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _('group')
|
||||||
|
verbose_name_plural = _('groups')
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
class UserManager(models.Manager):
|
||||||
|
def create_user(self, username, email, password=None):
|
||||||
|
"""
|
||||||
|
Creates and saves a User with the given username, e-mail and password.
|
||||||
|
"""
|
||||||
|
now = datetime_now()
|
||||||
|
|
||||||
|
# Normalize the address by lowercasing the domain part of the email
|
||||||
|
# address.
|
||||||
|
try:
|
||||||
|
email_name, domain_part = email.strip().split('@', 1)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
email = '@'.join([email_name, domain_part.lower()])
|
||||||
|
|
||||||
|
user = self.model(username=username, email=email, is_staff=False,
|
||||||
|
is_active=True, is_superuser=False, last_login=now,
|
||||||
|
date_joined=now)
|
||||||
|
|
||||||
|
user.set_password(password)
|
||||||
|
user.save(using=self._db)
|
||||||
|
return user
|
||||||
|
|
||||||
|
def create_superuser(self, username, email, password):
|
||||||
|
u = self.create_user(username, email, password)
|
||||||
|
u.is_staff = True
|
||||||
|
u.is_active = True
|
||||||
|
u.is_superuser = True
|
||||||
|
u.save(using=self._db)
|
||||||
|
return u
|
||||||
|
|
||||||
|
def make_random_password(self, length=10, allowed_chars='abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789'):
|
||||||
|
"Generates a random password with the given length and given allowed_chars"
|
||||||
|
# Note that default value of allowed_chars does not have "I" or letters
|
||||||
|
# that look like it -- just to avoid confusion.
|
||||||
|
from random import choice
|
||||||
|
return ''.join([choice(allowed_chars) for i in range(length)])
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class User(Document):
|
class User(Document):
|
||||||
"""A User document that aims to mirror most of the API specified by Django
|
"""A User document that aims to mirror most of the API specified by Django
|
||||||
at http://docs.djangoproject.com/en/dev/topics/auth/#users
|
at http://docs.djangoproject.com/en/dev/topics/auth/#users
|
||||||
@@ -60,15 +204,15 @@ class User(Document):
|
|||||||
is_superuser = BooleanField(default=False,
|
is_superuser = BooleanField(default=False,
|
||||||
verbose_name=_('superuser status'),
|
verbose_name=_('superuser status'),
|
||||||
help_text=_("Designates that this user has all permissions without explicitly assigning them."))
|
help_text=_("Designates that this user has all permissions without explicitly assigning them."))
|
||||||
last_login = DateTimeField(default=datetime.datetime.now,
|
last_login = DateTimeField(default=datetime_now,
|
||||||
verbose_name=_('last login'))
|
verbose_name=_('last login'))
|
||||||
date_joined = DateTimeField(default=datetime.datetime.now,
|
date_joined = DateTimeField(default=datetime_now,
|
||||||
verbose_name=_('date joined'))
|
verbose_name=_('date joined'))
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
'allow_inheritance': True,
|
'allow_inheritance': True,
|
||||||
'indexes': [
|
'indexes': [
|
||||||
{'fields': ['username'], 'unique': True}
|
{'fields': ['username'], 'unique': True, 'sparse': True}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -104,12 +248,31 @@ class User(Document):
|
|||||||
"""
|
"""
|
||||||
return check_password(raw_password, self.password)
|
return check_password(raw_password, self.password)
|
||||||
|
|
||||||
|
def get_all_permissions(self, obj=None):
|
||||||
|
return _user_get_all_permissions(self, obj)
|
||||||
|
|
||||||
|
def has_perm(self, perm, obj=None):
|
||||||
|
"""
|
||||||
|
Returns True if the user has the specified permission. This method
|
||||||
|
queries all available auth backends, but returns immediately if any
|
||||||
|
backend returns True. Thus, a user who has permission from a single
|
||||||
|
auth backend is assumed to have permission in general. If an object is
|
||||||
|
provided, permissions for this specific object are checked.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Active superusers have all permissions.
|
||||||
|
if self.is_active and self.is_superuser:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Otherwise we need to check the backends.
|
||||||
|
return _user_has_perm(self, perm, obj)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_user(cls, username, password, email=None):
|
def create_user(cls, username, password, email=None):
|
||||||
"""Create (and save) a new user with the given username, password and
|
"""Create (and save) a new user with the given username, password and
|
||||||
email address.
|
email address.
|
||||||
"""
|
"""
|
||||||
now = datetime.datetime.now()
|
now = datetime_now()
|
||||||
|
|
||||||
# Normalize the address by lowercasing the domain part of the email
|
# Normalize the address by lowercasing the domain part of the email
|
||||||
# address.
|
# address.
|
||||||
@@ -126,9 +289,111 @@ class User(Document):
|
|||||||
user.save()
|
user.save()
|
||||||
return user
|
return user
|
||||||
|
|
||||||
|
def get_all_permissions(self, obj=None):
|
||||||
|
permissions = set()
|
||||||
|
anon = self.is_anonymous()
|
||||||
|
for backend in auth.get_backends():
|
||||||
|
if not anon or backend.supports_anonymous_user:
|
||||||
|
if hasattr(backend, "get_all_permissions"):
|
||||||
|
if obj is not None:
|
||||||
|
if backend.supports_object_permissions:
|
||||||
|
permissions.update(
|
||||||
|
backend.get_all_permissions(user, obj)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
permissions.update(backend.get_all_permissions(self))
|
||||||
|
return permissions
|
||||||
|
|
||||||
def get_and_delete_messages(self):
|
def get_and_delete_messages(self):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
def has_perm(self, perm, obj=None):
|
||||||
|
anon = self.is_anonymous()
|
||||||
|
active = self.is_active
|
||||||
|
for backend in auth.get_backends():
|
||||||
|
if (not active and not anon and backend.supports_inactive_user) or \
|
||||||
|
(not anon or backend.supports_anonymous_user):
|
||||||
|
if hasattr(backend, "has_perm"):
|
||||||
|
if obj is not None:
|
||||||
|
if (backend.supports_object_permissions and
|
||||||
|
backend.has_perm(self, perm, obj)):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
if backend.has_perm(self, perm):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def has_perms(self, perm_list, obj=None):
|
||||||
|
"""
|
||||||
|
Returns True if the user has each of the specified permissions.
|
||||||
|
If object is passed, it checks if the user has all required perms
|
||||||
|
for this object.
|
||||||
|
"""
|
||||||
|
for perm in perm_list:
|
||||||
|
if not self.has_perm(perm, obj):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def has_module_perms(self, app_label):
|
||||||
|
anon = self.is_anonymous()
|
||||||
|
active = self.is_active
|
||||||
|
for backend in auth.get_backends():
|
||||||
|
if (not active and not anon and backend.supports_inactive_user) or \
|
||||||
|
(not anon or backend.supports_anonymous_user):
|
||||||
|
if hasattr(backend, "has_module_perms"):
|
||||||
|
if backend.has_module_perms(self, app_label):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_and_delete_messages(self):
|
||||||
|
messages = []
|
||||||
|
for m in self.message_set.all():
|
||||||
|
messages.append(m.message)
|
||||||
|
m.delete()
|
||||||
|
return messages
|
||||||
|
|
||||||
|
def email_user(self, subject, message, from_email=None):
|
||||||
|
"Sends an e-mail to this User."
|
||||||
|
from django.core.mail import send_mail
|
||||||
|
send_mail(subject, message, from_email, [self.email])
|
||||||
|
|
||||||
|
def get_profile(self):
|
||||||
|
"""
|
||||||
|
Returns site-specific profile for this user. Raises
|
||||||
|
SiteProfileNotAvailable if this site does not allow profiles.
|
||||||
|
"""
|
||||||
|
if not hasattr(self, '_profile_cache'):
|
||||||
|
from django.conf import settings
|
||||||
|
if not getattr(settings, 'AUTH_PROFILE_MODULE', False):
|
||||||
|
raise SiteProfileNotAvailable('You need to set AUTH_PROFILE_MO'
|
||||||
|
'DULE in your project settings')
|
||||||
|
try:
|
||||||
|
app_label, model_name = settings.AUTH_PROFILE_MODULE.split('.')
|
||||||
|
except ValueError:
|
||||||
|
raise SiteProfileNotAvailable('app_label and model_name should'
|
||||||
|
' be separated by a dot in the AUTH_PROFILE_MODULE set'
|
||||||
|
'ting')
|
||||||
|
|
||||||
|
try:
|
||||||
|
model = models.get_model(app_label, model_name)
|
||||||
|
if model is None:
|
||||||
|
raise SiteProfileNotAvailable('Unable to load the profile '
|
||||||
|
'model, check AUTH_PROFILE_MODULE in your project sett'
|
||||||
|
'ings')
|
||||||
|
self._profile_cache = model._default_manager.using(self._state.db).get(user__id__exact=self.id)
|
||||||
|
self._profile_cache.user = self
|
||||||
|
except (ImportError, ImproperlyConfigured):
|
||||||
|
raise SiteProfileNotAvailable
|
||||||
|
return self._profile_cache
|
||||||
|
|
||||||
|
def _get_message_set(self):
|
||||||
|
import warnings
|
||||||
|
warnings.warn('The user messaging API is deprecated. Please update'
|
||||||
|
' your code to use the new messages framework.',
|
||||||
|
category=DeprecationWarning)
|
||||||
|
return self._message_set
|
||||||
|
message_set = property(_get_message_set)
|
||||||
|
|
||||||
|
|
||||||
class MongoEngineBackend(object):
|
class MongoEngineBackend(object):
|
||||||
"""Authenticate using MongoEngine and mongoengine.django.auth.User.
|
"""Authenticate using MongoEngine and mongoengine.django.auth.User.
|
||||||
@@ -142,6 +407,8 @@ class MongoEngineBackend(object):
|
|||||||
user = User.objects(username=username).first()
|
user = User.objects(username=username).first()
|
||||||
if user:
|
if user:
|
||||||
if password and user.check_password(password):
|
if password and user.check_password(password):
|
||||||
|
backend = auth.get_backends()[0]
|
||||||
|
user.backend = "%s.%s" % (backend.__module__, backend.__class__.__name__)
|
||||||
return user
|
return user
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.sessions.backends.base import SessionBase, CreateError
|
from django.contrib.sessions.backends.base import SessionBase, CreateError
|
||||||
from django.core.exceptions import SuspiciousOperation
|
from django.core.exceptions import SuspiciousOperation
|
||||||
@@ -10,20 +8,44 @@ from mongoengine import fields
|
|||||||
from mongoengine.queryset import OperationError
|
from mongoengine.queryset import OperationError
|
||||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME
|
from mongoengine.connection import DEFAULT_CONNECTION_NAME
|
||||||
|
|
||||||
|
from .utils import datetime_now
|
||||||
|
|
||||||
|
|
||||||
MONGOENGINE_SESSION_DB_ALIAS = getattr(
|
MONGOENGINE_SESSION_DB_ALIAS = getattr(
|
||||||
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
|
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
|
||||||
DEFAULT_CONNECTION_NAME)
|
DEFAULT_CONNECTION_NAME)
|
||||||
|
|
||||||
|
# a setting for the name of the collection used to store sessions
|
||||||
|
MONGOENGINE_SESSION_COLLECTION = getattr(
|
||||||
|
settings, 'MONGOENGINE_SESSION_COLLECTION',
|
||||||
|
'django_session')
|
||||||
|
|
||||||
|
# a setting for whether session data is stored encoded or not
|
||||||
|
MONGOENGINE_SESSION_DATA_ENCODE = getattr(
|
||||||
|
settings, 'MONGOENGINE_SESSION_DATA_ENCODE',
|
||||||
|
True)
|
||||||
|
|
||||||
|
|
||||||
class MongoSession(Document):
|
class MongoSession(Document):
|
||||||
session_key = fields.StringField(primary_key=True, max_length=40)
|
session_key = fields.StringField(primary_key=True, max_length=40)
|
||||||
session_data = fields.StringField()
|
session_data = fields.StringField() if MONGOENGINE_SESSION_DATA_ENCODE \
|
||||||
|
else fields.DictField()
|
||||||
expire_date = fields.DateTimeField()
|
expire_date = fields.DateTimeField()
|
||||||
|
|
||||||
meta = {'collection': 'django_session',
|
meta = {
|
||||||
|
'collection': MONGOENGINE_SESSION_COLLECTION,
|
||||||
'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
|
'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
|
||||||
'allow_inheritance': False}
|
'allow_inheritance': False,
|
||||||
|
'indexes': [
|
||||||
|
{
|
||||||
|
'fields': ['expire_date'],
|
||||||
|
'expireAfterSeconds': settings.SESSION_COOKIE_AGE
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_decoded(self):
|
||||||
|
return SessionStore().decode(self.session_data)
|
||||||
|
|
||||||
|
|
||||||
class SessionStore(SessionBase):
|
class SessionStore(SessionBase):
|
||||||
@@ -33,8 +55,11 @@ class SessionStore(SessionBase):
|
|||||||
def load(self):
|
def load(self):
|
||||||
try:
|
try:
|
||||||
s = MongoSession.objects(session_key=self.session_key,
|
s = MongoSession.objects(session_key=self.session_key,
|
||||||
expire_date__gt=datetime.now())[0]
|
expire_date__gt=datetime_now)[0]
|
||||||
|
if MONGOENGINE_SESSION_DATA_ENCODE:
|
||||||
return self.decode(force_unicode(s.session_data))
|
return self.decode(force_unicode(s.session_data))
|
||||||
|
else:
|
||||||
|
return s.session_data
|
||||||
except (IndexError, SuspiciousOperation):
|
except (IndexError, SuspiciousOperation):
|
||||||
self.create()
|
self.create()
|
||||||
return {}
|
return {}
|
||||||
@@ -57,7 +82,10 @@ class SessionStore(SessionBase):
|
|||||||
if self.session_key is None:
|
if self.session_key is None:
|
||||||
self._session_key = self._get_new_session_key()
|
self._session_key = self._get_new_session_key()
|
||||||
s = MongoSession(session_key=self.session_key)
|
s = MongoSession(session_key=self.session_key)
|
||||||
|
if MONGOENGINE_SESSION_DATA_ENCODE:
|
||||||
s.session_data = self.encode(self._get_session(no_load=must_create))
|
s.session_data = self.encode(self._get_session(no_load=must_create))
|
||||||
|
else:
|
||||||
|
s.session_data = self._get_session(no_load=must_create)
|
||||||
s.expire_date = self.get_expiry_date()
|
s.expire_date = self.get_expiry_date()
|
||||||
try:
|
try:
|
||||||
s.save(force_insert=must_create, safe=True)
|
s.save(force_insert=must_create, safe=True)
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
from django.http import Http404
|
|
||||||
from mongoengine.queryset import QuerySet
|
from mongoengine.queryset import QuerySet
|
||||||
from mongoengine.base import BaseDocument
|
from mongoengine.base import BaseDocument
|
||||||
from mongoengine.base import ValidationError
|
from mongoengine.errors import ValidationError
|
||||||
|
|
||||||
def _get_queryset(cls):
|
def _get_queryset(cls):
|
||||||
"""Inspired by django.shortcuts.*"""
|
"""Inspired by django.shortcuts.*"""
|
||||||
@@ -27,6 +26,7 @@ def get_document_or_404(cls, *args, **kwargs):
|
|||||||
try:
|
try:
|
||||||
return queryset.get(*args, **kwargs)
|
return queryset.get(*args, **kwargs)
|
||||||
except (queryset._document.DoesNotExist, ValidationError):
|
except (queryset._document.DoesNotExist, ValidationError):
|
||||||
|
from django.http import Http404
|
||||||
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
||||||
|
|
||||||
def get_list_or_404(cls, *args, **kwargs):
|
def get_list_or_404(cls, *args, **kwargs):
|
||||||
@@ -42,5 +42,6 @@ def get_list_or_404(cls, *args, **kwargs):
|
|||||||
queryset = _get_queryset(cls)
|
queryset = _get_queryset(cls)
|
||||||
obj_list = list(queryset.filter(*args, **kwargs))
|
obj_list = list(queryset.filter(*args, **kwargs))
|
||||||
if not obj_list:
|
if not obj_list:
|
||||||
|
from django.http import Http404
|
||||||
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
||||||
return obj_list
|
return obj_list
|
||||||
|
|||||||
@@ -1,16 +1,34 @@
|
|||||||
#coding: utf-8
|
#coding: utf-8
|
||||||
from django.test import TestCase
|
from nose.plugins.skip import SkipTest
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
|
from mongoengine.python_support import PY3
|
||||||
from mongoengine import connect
|
from mongoengine import connect
|
||||||
|
|
||||||
|
try:
|
||||||
|
from django.test import TestCase
|
||||||
|
from django.conf import settings
|
||||||
|
except Exception as err:
|
||||||
|
if PY3:
|
||||||
|
from unittest import TestCase
|
||||||
|
# Dummy value so no error
|
||||||
|
class settings:
|
||||||
|
MONGO_DATABASE_NAME = 'dummy'
|
||||||
|
else:
|
||||||
|
raise err
|
||||||
|
|
||||||
|
|
||||||
class MongoTestCase(TestCase):
|
class MongoTestCase(TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('django does not have Python 3 support')
|
||||||
|
|
||||||
"""
|
"""
|
||||||
TestCase class that clear the collection between the tests
|
TestCase class that clear the collection between the tests
|
||||||
"""
|
"""
|
||||||
db_name = 'test_%s' % settings.MONGO_DATABASE_NAME
|
db_name = 'test_%s' % settings.MONGO_DATABASE_NAME
|
||||||
def __init__(self, methodName='runtest'):
|
def __init__(self, methodName='runtest'):
|
||||||
self.db = connect(self.db_name)
|
self.db = connect(self.db_name).get_db()
|
||||||
super(MongoTestCase, self).__init__(methodName)
|
super(MongoTestCase, self).__init__(methodName)
|
||||||
|
|
||||||
def _post_teardown(self):
|
def _post_teardown(self):
|
||||||
|
|||||||
6
mongoengine/django/utils.py
Normal file
6
mongoengine/django/utils.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
try:
|
||||||
|
# django >= 1.4
|
||||||
|
from django.utils.timezone import now as datetime_now
|
||||||
|
except ImportError:
|
||||||
|
from datetime import datetime
|
||||||
|
datetime_now = datetime.now
|
||||||
@@ -1,14 +1,21 @@
|
|||||||
|
from __future__ import with_statement
|
||||||
|
import warnings
|
||||||
|
|
||||||
import pymongo
|
import pymongo
|
||||||
|
import re
|
||||||
|
|
||||||
from bson.dbref import DBRef
|
from bson.dbref import DBRef
|
||||||
|
|
||||||
from mongoengine import signals
|
from mongoengine import signals
|
||||||
from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument,
|
from mongoengine.base import (DocumentMetaclass, TopLevelDocumentMetaclass,
|
||||||
BaseDict, BaseList)
|
BaseDocument, BaseDict, BaseList,
|
||||||
from queryset import OperationError
|
ALLOW_INHERITANCE, get_document)
|
||||||
from connection import get_db, DEFAULT_CONNECTION_NAME
|
from mongoengine.queryset import OperationError, NotUniqueError, QuerySet
|
||||||
|
from mongoengine.connection import get_db, DEFAULT_CONNECTION_NAME
|
||||||
|
from mongoengine.context_managers import switch_db, switch_collection
|
||||||
|
|
||||||
__all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument',
|
__all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument',
|
||||||
'DynamicEmbeddedDocument', 'OperationError', 'InvalidCollectionError']
|
'DynamicEmbeddedDocument', 'OperationError',
|
||||||
|
'InvalidCollectionError', 'NotUniqueError', 'MapReduceDocument')
|
||||||
|
|
||||||
|
|
||||||
class InvalidCollectionError(Exception):
|
class InvalidCollectionError(Exception):
|
||||||
@@ -20,24 +27,31 @@ class EmbeddedDocument(BaseDocument):
|
|||||||
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
|
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
|
||||||
fields on :class:`~mongoengine.Document`\ s through the
|
fields on :class:`~mongoengine.Document`\ s through the
|
||||||
:class:`~mongoengine.EmbeddedDocumentField` field type.
|
:class:`~mongoengine.EmbeddedDocumentField` field type.
|
||||||
|
|
||||||
|
A :class:`~mongoengine.EmbeddedDocument` subclass may be itself subclassed,
|
||||||
|
to create a specialised version of the embedded document that will be
|
||||||
|
stored in the same collection. To facilitate this behaviour a `_cls`
|
||||||
|
field is added to documents (hidden though the MongoEngine interface).
|
||||||
|
To disable this behaviour and remove the dependence on the presence of
|
||||||
|
`_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
|
||||||
|
dictionary.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||||
|
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||||
|
my_metaclass = DocumentMetaclass
|
||||||
__metaclass__ = DocumentMetaclass
|
__metaclass__ = DocumentMetaclass
|
||||||
|
|
||||||
|
_instance = None
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(EmbeddedDocument, self).__init__(*args, **kwargs)
|
super(EmbeddedDocument, self).__init__(*args, **kwargs)
|
||||||
self._changed_fields = []
|
self._changed_fields = []
|
||||||
|
|
||||||
def __delattr__(self, *args, **kwargs):
|
def __eq__(self, other):
|
||||||
"""Handle deletions of fields"""
|
if isinstance(other, self.__class__):
|
||||||
field_name = args[0]
|
return self._data == other._data
|
||||||
if field_name in self._fields:
|
return False
|
||||||
default = self._fields[field_name].default
|
|
||||||
if callable(default):
|
|
||||||
default = default()
|
|
||||||
setattr(self, field_name, default)
|
|
||||||
else:
|
|
||||||
super(EmbeddedDocument, self).__delattr__(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class Document(BaseDocument):
|
class Document(BaseDocument):
|
||||||
@@ -55,11 +69,11 @@ class Document(BaseDocument):
|
|||||||
|
|
||||||
A :class:`~mongoengine.Document` subclass may be itself subclassed, to
|
A :class:`~mongoengine.Document` subclass may be itself subclassed, to
|
||||||
create a specialised version of the document that will be stored in the
|
create a specialised version of the document that will be stored in the
|
||||||
same collection. To facilitate this behaviour, `_cls` and `_types`
|
same collection. To facilitate this behaviour a `_cls`
|
||||||
fields are added to documents (hidden though the MongoEngine interface
|
field is added to documents (hidden though the MongoEngine interface).
|
||||||
though). To disable this behaviour and remove the dependence on the
|
To disable this behaviour and remove the dependence on the presence of
|
||||||
presence of `_cls` and `_types`, set :attr:`allow_inheritance` to
|
`_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
|
||||||
``False`` in the :attr:`meta` dictionary.
|
dictionary.
|
||||||
|
|
||||||
A :class:`~mongoengine.Document` may use a **Capped Collection** by
|
A :class:`~mongoengine.Document` may use a **Capped Collection** by
|
||||||
specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta`
|
specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta`
|
||||||
@@ -77,30 +91,35 @@ class Document(BaseDocument):
|
|||||||
Automatic index creation can be disabled by specifying
|
Automatic index creation can be disabled by specifying
|
||||||
attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to
|
attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to
|
||||||
False then indexes will not be created by MongoEngine. This is useful in
|
False then indexes will not be created by MongoEngine. This is useful in
|
||||||
production systems where index creation is performed as part of a deployment
|
production systems where index creation is performed as part of a
|
||||||
system.
|
deployment system.
|
||||||
|
|
||||||
By default, _types will be added to the start of every index (that
|
By default, _cls will be added to the start of every index (that
|
||||||
doesn't contain a list) if allow_inheritence is True. This can be
|
doesn't contain a list) if allow_inheritance is True. This can be
|
||||||
disabled by either setting types to False on the specific index or
|
disabled by either setting cls to False on the specific index or
|
||||||
by setting index_types to False on the meta dictionary for the document.
|
by setting index_cls to False on the meta dictionary for the document.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||||
|
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||||
|
my_metaclass = TopLevelDocumentMetaclass
|
||||||
__metaclass__ = TopLevelDocumentMetaclass
|
__metaclass__ = TopLevelDocumentMetaclass
|
||||||
|
|
||||||
@apply
|
|
||||||
def pk():
|
def pk():
|
||||||
"""Primary key alias
|
"""Primary key alias
|
||||||
"""
|
"""
|
||||||
def fget(self):
|
def fget(self):
|
||||||
return getattr(self, self._meta['id_field'])
|
return getattr(self, self._meta['id_field'])
|
||||||
|
|
||||||
def fset(self, value):
|
def fset(self, value):
|
||||||
return setattr(self, self._meta['id_field'], value)
|
return setattr(self, self._meta['id_field'], value)
|
||||||
return property(fget, fset)
|
return property(fget, fset)
|
||||||
|
pk = pk()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_db(cls):
|
def _get_db(cls):
|
||||||
"""Some Model using other db_alias"""
|
"""Some Model using other db_alias"""
|
||||||
return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME ))
|
return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME))
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_collection(cls):
|
def _get_collection(cls):
|
||||||
@@ -121,8 +140,9 @@ class Document(BaseDocument):
|
|||||||
options = cls._collection.options()
|
options = cls._collection.options()
|
||||||
if options.get('max') != max_documents or \
|
if options.get('max') != max_documents or \
|
||||||
options.get('size') != max_size:
|
options.get('size') != max_size:
|
||||||
msg = ('Cannot create collection "%s" as a capped '
|
msg = (('Cannot create collection "%s" as a capped '
|
||||||
'collection as it already exists') % cls._collection
|
'collection as it already exists')
|
||||||
|
% cls._collection)
|
||||||
raise InvalidCollectionError(msg)
|
raise InvalidCollectionError(msg)
|
||||||
else:
|
else:
|
||||||
# Create the collection as a capped collection
|
# Create the collection as a capped collection
|
||||||
@@ -134,10 +154,13 @@ class Document(BaseDocument):
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
cls._collection = db[collection_name]
|
cls._collection = db[collection_name]
|
||||||
|
if cls._meta.get('auto_create_index', True):
|
||||||
|
cls.ensure_indexes()
|
||||||
return cls._collection
|
return cls._collection
|
||||||
|
|
||||||
def save(self, safe=True, force_insert=False, validate=True, write_options=None,
|
def save(self, safe=True, force_insert=False, validate=True, clean=True,
|
||||||
cascade=None, cascade_kwargs=None, _refs=None):
|
write_options=None, cascade=None, cascade_kwargs=None,
|
||||||
|
_refs=None, **kwargs):
|
||||||
"""Save the :class:`~mongoengine.Document` to the database. If the
|
"""Save the :class:`~mongoengine.Document` to the database. If the
|
||||||
document already exists, it will be updated, otherwise it will be
|
document already exists, it will be updated, otherwise it will be
|
||||||
created.
|
created.
|
||||||
@@ -149,52 +172,58 @@ class Document(BaseDocument):
|
|||||||
:param force_insert: only try to create a new document, don't allow
|
:param force_insert: only try to create a new document, don't allow
|
||||||
updates of existing documents
|
updates of existing documents
|
||||||
:param validate: validates the document; set to ``False`` to skip.
|
:param validate: validates the document; set to ``False`` to skip.
|
||||||
|
:param clean: call the document clean method, requires `validate` to be
|
||||||
|
True.
|
||||||
:param write_options: Extra keyword arguments are passed down to
|
:param write_options: Extra keyword arguments are passed down to
|
||||||
:meth:`~pymongo.collection.Collection.save` OR
|
:meth:`~pymongo.collection.Collection.save` OR
|
||||||
:meth:`~pymongo.collection.Collection.insert`
|
:meth:`~pymongo.collection.Collection.insert`
|
||||||
which will be used as options for the resultant ``getLastError`` command.
|
which will be used as options for the resultant
|
||||||
For example, ``save(..., write_options={w: 2, fsync: True}, ...)`` will
|
``getLastError`` command. For example,
|
||||||
wait until at least two servers have recorded the write and will force an
|
``save(..., write_options={w: 2, fsync: True}, ...)`` will
|
||||||
fsync on each server being written to.
|
wait until at least two servers have recorded the write and
|
||||||
:param cascade: Sets the flag for cascading saves. You can set a default by setting
|
will force an fsync on the primary server.
|
||||||
"cascade" in the document __meta__
|
:param cascade: Sets the flag for cascading saves. You can set a
|
||||||
:param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves
|
default by setting "cascade" in the document __meta__
|
||||||
|
:param cascade_kwargs: optional kwargs dictionary to be passed throw
|
||||||
|
to cascading saves
|
||||||
:param _refs: A list of processed references used in cascading saves
|
:param _refs: A list of processed references used in cascading saves
|
||||||
|
|
||||||
.. versionchanged:: 0.5
|
.. versionchanged:: 0.5
|
||||||
In existing documents it only saves changed fields using set / unset
|
In existing documents it only saves changed fields using
|
||||||
Saves are cascaded and any :class:`~bson.dbref.DBRef` objects
|
set / unset. Saves are cascaded and any
|
||||||
that have changes are saved as well.
|
:class:`~bson.dbref.DBRef` objects that have changes are
|
||||||
|
saved as well.
|
||||||
.. versionchanged:: 0.6
|
.. versionchanged:: 0.6
|
||||||
Cascade saves are optional = defaults to True, if you want fine grain
|
Cascade saves are optional = defaults to True, if you want
|
||||||
control then you can turn off using document meta['cascade'] = False
|
fine grain control then you can turn off using document
|
||||||
Also you can pass different kwargs to the cascade save using cascade_kwargs
|
meta['cascade'] = False Also you can pass different kwargs to
|
||||||
which overwrites the existing kwargs with custom values
|
the cascade save using cascade_kwargs which overwrites the
|
||||||
|
existing kwargs with custom values
|
||||||
"""
|
"""
|
||||||
signals.pre_save.send(self.__class__, document=self)
|
signals.pre_save.send(self.__class__, document=self)
|
||||||
|
|
||||||
if validate:
|
if validate:
|
||||||
self.validate()
|
self.validate(clean=clean)
|
||||||
|
|
||||||
if not write_options:
|
if not write_options:
|
||||||
write_options = {}
|
write_options = {}
|
||||||
|
|
||||||
doc = self.to_mongo()
|
doc = self.to_mongo()
|
||||||
|
|
||||||
created = force_insert or '_id' not in doc
|
created = ('_id' not in doc or self._created or force_insert)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
collection = self.__class__.objects._collection
|
collection = self._get_collection()
|
||||||
if created:
|
if created:
|
||||||
if force_insert:
|
if force_insert:
|
||||||
object_id = collection.insert(doc, safe=safe, **write_options)
|
object_id = collection.insert(doc, safe=safe,
|
||||||
|
**write_options)
|
||||||
else:
|
else:
|
||||||
object_id = collection.save(doc, safe=safe, **write_options)
|
object_id = collection.save(doc, safe=safe,
|
||||||
|
**write_options)
|
||||||
else:
|
else:
|
||||||
object_id = doc['_id']
|
object_id = doc['_id']
|
||||||
updates, removals = self._delta()
|
updates, removals = self._delta()
|
||||||
|
|
||||||
# Need to add shard key to query, or you get an error
|
# Need to add shard key to query, or you get an error
|
||||||
select_dict = {'_id': object_id}
|
select_dict = {'_id': object_id}
|
||||||
shard_key = self.__class__._meta.get('shard_key', tuple())
|
shard_key = self.__class__._meta.get('shard_key', tuple())
|
||||||
@@ -202,13 +231,28 @@ class Document(BaseDocument):
|
|||||||
actual_key = self._db_field_map.get(k, k)
|
actual_key = self._db_field_map.get(k, k)
|
||||||
select_dict[actual_key] = doc[actual_key]
|
select_dict[actual_key] = doc[actual_key]
|
||||||
|
|
||||||
upsert = self._created
|
def is_new_object(last_error):
|
||||||
if updates:
|
if last_error is not None:
|
||||||
collection.update(select_dict, {"$set": updates}, upsert=upsert, safe=safe, **write_options)
|
updated = last_error.get("updatedExisting")
|
||||||
if removals:
|
if updated is not None:
|
||||||
collection.update(select_dict, {"$unset": removals}, upsert=upsert, safe=safe, **write_options)
|
return not updated
|
||||||
|
return created
|
||||||
|
|
||||||
cascade = self._meta.get('cascade', True) if cascade is None else cascade
|
upsert = self._created
|
||||||
|
update_query = {}
|
||||||
|
|
||||||
|
if updates:
|
||||||
|
update_query["$set"] = updates
|
||||||
|
if removals:
|
||||||
|
update_query["$unset"] = removals
|
||||||
|
if updates or removals:
|
||||||
|
last_error = collection.update(select_dict, update_query,
|
||||||
|
upsert=upsert, safe=safe, **write_options)
|
||||||
|
created = is_new_object(last_error)
|
||||||
|
|
||||||
|
warn_cascade = not cascade and 'cascade' not in self._meta
|
||||||
|
cascade = (self._meta.get('cascade', True)
|
||||||
|
if cascade is None else cascade)
|
||||||
if cascade:
|
if cascade:
|
||||||
kwargs = {
|
kwargs = {
|
||||||
"safe": safe,
|
"safe": safe,
|
||||||
@@ -220,38 +264,73 @@ class Document(BaseDocument):
|
|||||||
if cascade_kwargs: # Allow granular control over cascades
|
if cascade_kwargs: # Allow granular control over cascades
|
||||||
kwargs.update(cascade_kwargs)
|
kwargs.update(cascade_kwargs)
|
||||||
kwargs['_refs'] = _refs
|
kwargs['_refs'] = _refs
|
||||||
self.cascade_save(**kwargs)
|
self.cascade_save(warn_cascade=warn_cascade, **kwargs)
|
||||||
|
|
||||||
except pymongo.errors.OperationFailure, err:
|
except pymongo.errors.OperationFailure, err:
|
||||||
message = 'Could not save document (%s)'
|
message = 'Could not save document (%s)'
|
||||||
if u'duplicate key' in unicode(err):
|
if re.match('^E1100[01] duplicate key', unicode(err)):
|
||||||
|
# E11000 - duplicate key error index
|
||||||
|
# E11001 - duplicate key on update
|
||||||
message = u'Tried to save duplicate unique keys (%s)'
|
message = u'Tried to save duplicate unique keys (%s)'
|
||||||
|
raise NotUniqueError(message % unicode(err))
|
||||||
raise OperationError(message % unicode(err))
|
raise OperationError(message % unicode(err))
|
||||||
id_field = self._meta['id_field']
|
id_field = self._meta['id_field']
|
||||||
|
if id_field not in self._meta.get('shard_key', []):
|
||||||
self[id_field] = self._fields[id_field].to_python(object_id)
|
self[id_field] = self._fields[id_field].to_python(object_id)
|
||||||
|
|
||||||
self._changed_fields = []
|
self._clear_changed_fields()
|
||||||
self._created = False
|
self._created = False
|
||||||
signals.post_save.send(self.__class__, document=self, created=created)
|
signals.post_save.send(self.__class__, document=self, created=created)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def cascade_save(self, *args, **kwargs):
|
def cascade_save(self, warn_cascade=None, *args, **kwargs):
|
||||||
"""Recursively saves any references / generic references on an object"""
|
"""Recursively saves any references /
|
||||||
from fields import ReferenceField, GenericReferenceField
|
generic references on an objects"""
|
||||||
|
import fields
|
||||||
_refs = kwargs.get('_refs', []) or []
|
_refs = kwargs.get('_refs', []) or []
|
||||||
|
|
||||||
for name, cls in self._fields.items():
|
for name, cls in self._fields.items():
|
||||||
if not isinstance(cls, (ReferenceField, GenericReferenceField)):
|
if not isinstance(cls, (fields.ReferenceField,
|
||||||
|
fields.GenericReferenceField)):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ref = getattr(self, name)
|
ref = getattr(self, name)
|
||||||
if not ref:
|
if not ref or isinstance(ref, DBRef):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if not getattr(ref, '_changed_fields', True):
|
||||||
|
continue
|
||||||
|
|
||||||
ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
|
ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
|
||||||
if ref and ref_id not in _refs:
|
if ref and ref_id not in _refs:
|
||||||
|
if warn_cascade:
|
||||||
|
msg = ("Cascading saves will default to off in 0.8, "
|
||||||
|
"please explicitly set `.save(cascade=True)`")
|
||||||
|
warnings.warn(msg, FutureWarning)
|
||||||
_refs.append(ref_id)
|
_refs.append(ref_id)
|
||||||
kwargs["_refs"] = _refs
|
kwargs["_refs"] = _refs
|
||||||
ref.save(**kwargs)
|
ref.save(**kwargs)
|
||||||
ref._changed_fields = []
|
ref._changed_fields = []
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _qs(self):
|
||||||
|
"""
|
||||||
|
Returns the queryset to use for updating / reloading / deletions
|
||||||
|
"""
|
||||||
|
if not hasattr(self, '__objects'):
|
||||||
|
self.__objects = QuerySet(self, self._get_collection())
|
||||||
|
return self.__objects
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _object_key(self):
|
||||||
|
"""Dict to identify object in collection
|
||||||
|
"""
|
||||||
|
select_dict = {'pk': self.pk}
|
||||||
|
shard_key = self.__class__._meta.get('shard_key', tuple())
|
||||||
|
for k in shard_key:
|
||||||
|
select_dict[k] = getattr(self, k)
|
||||||
|
return select_dict
|
||||||
|
|
||||||
def update(self, **kwargs):
|
def update(self, **kwargs):
|
||||||
"""Performs an update on the :class:`~mongoengine.Document`
|
"""Performs an update on the :class:`~mongoengine.Document`
|
||||||
A convenience wrapper to :meth:`~mongoengine.QuerySet.update`.
|
A convenience wrapper to :meth:`~mongoengine.QuerySet.update`.
|
||||||
@@ -263,11 +342,7 @@ class Document(BaseDocument):
|
|||||||
raise OperationError('attempt to update a document not yet saved')
|
raise OperationError('attempt to update a document not yet saved')
|
||||||
|
|
||||||
# Need to add shard key to query, or you get an error
|
# Need to add shard key to query, or you get an error
|
||||||
select_dict = {'pk': self.pk}
|
return self._qs.filter(**self._object_key).update_one(**kwargs)
|
||||||
shard_key = self.__class__._meta.get('shard_key', tuple())
|
|
||||||
for k in shard_key:
|
|
||||||
select_dict[k] = getattr(self, k)
|
|
||||||
return self.__class__.objects(**select_dict).update_one(**kwargs)
|
|
||||||
|
|
||||||
def delete(self, safe=False):
|
def delete(self, safe=False):
|
||||||
"""Delete the :class:`~mongoengine.Document` from the database. This
|
"""Delete the :class:`~mongoengine.Document` from the database. This
|
||||||
@@ -278,26 +353,72 @@ class Document(BaseDocument):
|
|||||||
signals.pre_delete.send(self.__class__, document=self)
|
signals.pre_delete.send(self.__class__, document=self)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for field_name in self._meta['proxy_fields']:
|
self._qs.filter(**self._object_key).delete(safe=safe)
|
||||||
proxy_class = self._meta['proxy_fields'][field_name]
|
|
||||||
if hasattr(proxy_class, 'delete'):
|
|
||||||
proxy = getattr(self, field_name)
|
|
||||||
proxy.delete()
|
|
||||||
self.__class__.objects(pk=self.pk).delete(safe=safe)
|
|
||||||
except pymongo.errors.OperationFailure, err:
|
except pymongo.errors.OperationFailure, err:
|
||||||
message = u'Could not delete document (%s)' % err.message
|
message = u'Could not delete document (%s)' % err.message
|
||||||
raise OperationError(message)
|
raise OperationError(message)
|
||||||
|
|
||||||
signals.post_delete.send(self.__class__, document=self)
|
signals.post_delete.send(self.__class__, document=self)
|
||||||
|
|
||||||
|
def switch_db(self, db_alias):
|
||||||
|
"""
|
||||||
|
Temporarily switch the database for a document instance.
|
||||||
|
|
||||||
|
Only really useful for archiving off data and calling `save()`::
|
||||||
|
|
||||||
|
user = User.objects.get(id=user_id)
|
||||||
|
user.switch_db('archive-db')
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
If you need to read from another database see
|
||||||
|
:class:`~mongoengine.context_managers.switch_db`
|
||||||
|
|
||||||
|
:param db_alias: The database alias to use for saving the document
|
||||||
|
"""
|
||||||
|
with switch_db(self.__class__, db_alias) as cls:
|
||||||
|
collection = cls._get_collection()
|
||||||
|
db = cls._get_db
|
||||||
|
self._get_collection = lambda: collection
|
||||||
|
self._get_db = lambda: db
|
||||||
|
self._collection = collection
|
||||||
|
self._created = True
|
||||||
|
self.__objects = self._qs
|
||||||
|
self.__objects._collection_obj = collection
|
||||||
|
return self
|
||||||
|
|
||||||
|
def switch_collection(self, collection_name):
|
||||||
|
"""
|
||||||
|
Temporarily switch the collection for a document instance.
|
||||||
|
|
||||||
|
Only really useful for archiving off data and calling `save()`::
|
||||||
|
|
||||||
|
user = User.objects.get(id=user_id)
|
||||||
|
user.switch_collection('old-users')
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
If you need to read from another database see
|
||||||
|
:class:`~mongoengine.context_managers.switch_collection`
|
||||||
|
|
||||||
|
:param collection_name: The database alias to use for saving the
|
||||||
|
document
|
||||||
|
"""
|
||||||
|
with switch_collection(self.__class__, collection_name) as cls:
|
||||||
|
collection = cls._get_collection()
|
||||||
|
self._get_collection = lambda: collection
|
||||||
|
self._collection = collection
|
||||||
|
self._created = True
|
||||||
|
self.__objects = self._qs
|
||||||
|
self.__objects._collection_obj = collection
|
||||||
|
return self
|
||||||
|
|
||||||
def select_related(self, max_depth=1):
|
def select_related(self, max_depth=1):
|
||||||
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects to
|
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects to
|
||||||
a maximum depth in order to cut down the number queries to mongodb.
|
a maximum depth in order to cut down the number queries to mongodb.
|
||||||
|
|
||||||
.. versionadded:: 0.5
|
.. versionadded:: 0.5
|
||||||
"""
|
"""
|
||||||
from dereference import DeReference
|
import dereference
|
||||||
self._data = DeReference()(self._data, max_depth)
|
self._data = dereference.DeReference()(self._data, max_depth)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def reload(self, max_depth=1):
|
def reload(self, max_depth=1):
|
||||||
@@ -307,15 +428,21 @@ class Document(BaseDocument):
|
|||||||
.. versionchanged:: 0.6 Now chainable
|
.. versionchanged:: 0.6 Now chainable
|
||||||
"""
|
"""
|
||||||
id_field = self._meta['id_field']
|
id_field = self._meta['id_field']
|
||||||
obj = self.__class__.objects(
|
obj = self._qs.filter(
|
||||||
**{id_field: self[id_field]}
|
**{id_field: self[id_field]}
|
||||||
).first().select_related(max_depth=max_depth)
|
).limit(1).select_related(max_depth=max_depth)
|
||||||
|
if obj:
|
||||||
|
obj = obj[0]
|
||||||
|
else:
|
||||||
|
msg = "Reloaded document has been deleted"
|
||||||
|
raise OperationError(msg)
|
||||||
for field in self._fields:
|
for field in self._fields:
|
||||||
setattr(self, field, self._reload(field, obj[field]))
|
setattr(self, field, self._reload(field, obj[field]))
|
||||||
if self._dynamic:
|
if self._dynamic:
|
||||||
for name in self._dynamic_fields.keys():
|
for name in self._dynamic_fields.keys():
|
||||||
setattr(self, name, self._reload(name, obj._data[name]))
|
setattr(self, name, self._reload(name, obj._data[name]))
|
||||||
self._changed_fields = obj._changed_fields
|
self._changed_fields = obj._changed_fields
|
||||||
|
self._created = False
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
def _reload(self, key, value):
|
def _reload(self, key, value):
|
||||||
@@ -345,24 +472,93 @@ class Document(BaseDocument):
|
|||||||
"""This method registers the delete rules to apply when removing this
|
"""This method registers the delete rules to apply when removing this
|
||||||
object.
|
object.
|
||||||
"""
|
"""
|
||||||
cls._meta['delete_rules'][(document_cls, field_name)] = rule
|
classes = [get_document(class_name)
|
||||||
|
for class_name in cls._subclasses
|
||||||
|
if class_name != cls.__name__] + [cls]
|
||||||
|
documents = [get_document(class_name)
|
||||||
|
for class_name in document_cls._subclasses
|
||||||
|
if class_name != document_cls.__name__] + [document_cls]
|
||||||
|
|
||||||
@classmethod
|
for cls in classes:
|
||||||
def register_proxy_field(cls, field_name, proxy_class):
|
for document_cls in documents:
|
||||||
"""This method registers fields with proxy classes to delete them when
|
delete_rules = cls._meta.get('delete_rules') or {}
|
||||||
removing this object.
|
delete_rules[(document_cls, field_name)] = rule
|
||||||
"""
|
cls._meta['delete_rules'] = delete_rules
|
||||||
cls._meta['proxy_fields'][field_name] = proxy_class
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def drop_collection(cls):
|
def drop_collection(cls):
|
||||||
"""Drops the entire collection associated with this
|
"""Drops the entire collection associated with this
|
||||||
:class:`~mongoengine.Document` type from the database.
|
:class:`~mongoengine.Document` type from the database.
|
||||||
"""
|
"""
|
||||||
from mongoengine.queryset import QuerySet
|
cls._collection = None
|
||||||
db = cls._get_db()
|
db = cls._get_db()
|
||||||
db.drop_collection(cls._get_collection_name())
|
db.drop_collection(cls._get_collection_name())
|
||||||
QuerySet._reset_already_indexed(cls)
|
|
||||||
|
@classmethod
|
||||||
|
def ensure_index(cls, key_or_list, drop_dups=False, background=False,
|
||||||
|
**kwargs):
|
||||||
|
"""Ensure that the given indexes are in place.
|
||||||
|
|
||||||
|
:param key_or_list: a single index key or a list of index keys (to
|
||||||
|
construct a multi-field index); keys may be prefixed with a **+**
|
||||||
|
or a **-** to determine the index ordering
|
||||||
|
"""
|
||||||
|
index_spec = cls._build_index_spec(key_or_list)
|
||||||
|
index_spec = index_spec.copy()
|
||||||
|
fields = index_spec.pop('fields')
|
||||||
|
index_spec['drop_dups'] = drop_dups
|
||||||
|
index_spec['background'] = background
|
||||||
|
index_spec.update(kwargs)
|
||||||
|
|
||||||
|
return cls._get_collection().ensure_index(fields, **index_spec)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def ensure_indexes(cls):
|
||||||
|
"""Checks the document meta data and ensures all the indexes exist.
|
||||||
|
|
||||||
|
.. note:: You can disable automatic index creation by setting
|
||||||
|
`auto_create_index` to False in the documents meta data
|
||||||
|
"""
|
||||||
|
background = cls._meta.get('index_background', False)
|
||||||
|
drop_dups = cls._meta.get('index_drop_dups', False)
|
||||||
|
index_opts = cls._meta.get('index_opts') or {}
|
||||||
|
index_cls = cls._meta.get('index_cls', True)
|
||||||
|
|
||||||
|
collection = cls._get_collection()
|
||||||
|
|
||||||
|
# determine if an index which we are creating includes
|
||||||
|
# _cls as its first field; if so, we can avoid creating
|
||||||
|
# an extra index on _cls, as mongodb will use the existing
|
||||||
|
# index to service queries against _cls
|
||||||
|
cls_indexed = False
|
||||||
|
|
||||||
|
def includes_cls(fields):
|
||||||
|
first_field = None
|
||||||
|
if len(fields):
|
||||||
|
if isinstance(fields[0], basestring):
|
||||||
|
first_field = fields[0]
|
||||||
|
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
|
||||||
|
first_field = fields[0][0]
|
||||||
|
return first_field == '_cls'
|
||||||
|
|
||||||
|
# Ensure document-defined indexes are created
|
||||||
|
if cls._meta['index_specs']:
|
||||||
|
index_spec = cls._meta['index_specs']
|
||||||
|
for spec in index_spec:
|
||||||
|
spec = spec.copy()
|
||||||
|
fields = spec.pop('fields')
|
||||||
|
cls_indexed = cls_indexed or includes_cls(fields)
|
||||||
|
opts = index_opts.copy()
|
||||||
|
opts.update(spec)
|
||||||
|
collection.ensure_index(fields, background=background,
|
||||||
|
drop_dups=drop_dups, **opts)
|
||||||
|
|
||||||
|
# If _cls is being used (for polymorphism), it needs an index,
|
||||||
|
# only if another index doesn't begin with _cls
|
||||||
|
if (index_cls and not cls_indexed and
|
||||||
|
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) == True):
|
||||||
|
collection.ensure_index('_cls', background=background,
|
||||||
|
**index_opts)
|
||||||
|
|
||||||
|
|
||||||
class DynamicDocument(Document):
|
class DynamicDocument(Document):
|
||||||
@@ -371,14 +567,19 @@ class DynamicDocument(Document):
|
|||||||
way as an ordinary document but has expando style properties. Any data
|
way as an ordinary document but has expando style properties. Any data
|
||||||
passed or set against the :class:`~mongoengine.DynamicDocument` that is
|
passed or set against the :class:`~mongoengine.DynamicDocument` that is
|
||||||
not a field is automatically converted into a
|
not a field is automatically converted into a
|
||||||
:class:`~mongoengine.BaseDynamicField` and data can be attributed to that
|
:class:`~mongoengine.DynamicField` and data can be attributed to that
|
||||||
field.
|
field.
|
||||||
|
|
||||||
..note::
|
.. note::
|
||||||
|
|
||||||
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||||
|
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||||
|
my_metaclass = TopLevelDocumentMetaclass
|
||||||
__metaclass__ = TopLevelDocumentMetaclass
|
__metaclass__ = TopLevelDocumentMetaclass
|
||||||
|
|
||||||
_dynamic = True
|
_dynamic = True
|
||||||
|
|
||||||
def __delattr__(self, *args, **kwargs):
|
def __delattr__(self, *args, **kwargs):
|
||||||
@@ -397,13 +598,23 @@ class DynamicEmbeddedDocument(EmbeddedDocument):
|
|||||||
information about dynamic documents.
|
information about dynamic documents.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||||
|
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||||
|
my_metaclass = DocumentMetaclass
|
||||||
__metaclass__ = DocumentMetaclass
|
__metaclass__ = DocumentMetaclass
|
||||||
|
|
||||||
_dynamic = True
|
_dynamic = True
|
||||||
|
|
||||||
def __delattr__(self, *args, **kwargs):
|
def __delattr__(self, *args, **kwargs):
|
||||||
"""Deletes the attribute by setting to None and allowing _delta to unset
|
"""Deletes the attribute by setting to None and allowing _delta to unset
|
||||||
it"""
|
it"""
|
||||||
field_name = args[0]
|
field_name = args[0]
|
||||||
|
if field_name in self._fields:
|
||||||
|
default = self._fields[field_name].default
|
||||||
|
if callable(default):
|
||||||
|
default = default()
|
||||||
|
setattr(self, field_name, default)
|
||||||
|
else:
|
||||||
setattr(self, field_name, None)
|
setattr(self, field_name, None)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
124
mongoengine/errors.py
Normal file
124
mongoengine/errors.py
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from mongoengine.python_support import txt_type
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ('NotRegistered', 'InvalidDocumentError', 'ValidationError')
|
||||||
|
|
||||||
|
|
||||||
|
class NotRegistered(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidDocumentError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class LookUpError(AttributeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DoesNotExist(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class MultipleObjectsReturned(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidQueryError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class OperationError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NotUniqueError(OperationError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ValidationError(AssertionError):
|
||||||
|
"""Validation exception.
|
||||||
|
|
||||||
|
May represent an error validating a field or a
|
||||||
|
document containing fields with validation errors.
|
||||||
|
|
||||||
|
:ivar errors: A dictionary of errors for fields within this
|
||||||
|
document or list, or None if the error is for an
|
||||||
|
individual field.
|
||||||
|
"""
|
||||||
|
|
||||||
|
errors = {}
|
||||||
|
field_name = None
|
||||||
|
_message = None
|
||||||
|
|
||||||
|
def __init__(self, message="", **kwargs):
|
||||||
|
self.errors = kwargs.get('errors', {})
|
||||||
|
self.field_name = kwargs.get('field_name')
|
||||||
|
self.message = message
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return txt_type(self.message)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '%s(%s,)' % (self.__class__.__name__, self.message)
|
||||||
|
|
||||||
|
def __getattribute__(self, name):
|
||||||
|
message = super(ValidationError, self).__getattribute__(name)
|
||||||
|
if name == 'message':
|
||||||
|
if self.field_name:
|
||||||
|
message = '%s' % message
|
||||||
|
if self.errors:
|
||||||
|
message = '%s(%s)' % (message, self._format_errors())
|
||||||
|
return message
|
||||||
|
|
||||||
|
def _get_message(self):
|
||||||
|
return self._message
|
||||||
|
|
||||||
|
def _set_message(self, message):
|
||||||
|
self._message = message
|
||||||
|
|
||||||
|
message = property(_get_message, _set_message)
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
"""Returns a dictionary of all errors within a document
|
||||||
|
|
||||||
|
Keys are field names or list indices and values are the
|
||||||
|
validation error messages, or a nested dictionary of
|
||||||
|
errors for an embedded document or list.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def build_dict(source):
|
||||||
|
errors_dict = {}
|
||||||
|
if not source:
|
||||||
|
return errors_dict
|
||||||
|
if isinstance(source, dict):
|
||||||
|
for field_name, error in source.iteritems():
|
||||||
|
errors_dict[field_name] = build_dict(error)
|
||||||
|
elif isinstance(source, ValidationError) and source.errors:
|
||||||
|
return build_dict(source.errors)
|
||||||
|
else:
|
||||||
|
return unicode(source)
|
||||||
|
return errors_dict
|
||||||
|
if not self.errors:
|
||||||
|
return {}
|
||||||
|
return build_dict(self.errors)
|
||||||
|
|
||||||
|
def _format_errors(self):
|
||||||
|
"""Returns a string listing all errors within a document"""
|
||||||
|
|
||||||
|
def generate_key(value, prefix=''):
|
||||||
|
if isinstance(value, list):
|
||||||
|
value = ' '.join([generate_key(k) for k in value])
|
||||||
|
if isinstance(value, dict):
|
||||||
|
value = ' '.join(
|
||||||
|
[generate_key(v, k) for k, v in value.iteritems()])
|
||||||
|
|
||||||
|
results = "%s.%s" % (prefix, value) if prefix else value
|
||||||
|
return results
|
||||||
|
|
||||||
|
error_dict = defaultdict(list)
|
||||||
|
for k, v in self.to_dict().iteritems():
|
||||||
|
error_dict[generate_key(v)].append(k)
|
||||||
|
return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.iteritems()])
|
||||||
@@ -1,19 +1,25 @@
|
|||||||
import datetime
|
import datetime
|
||||||
import time
|
|
||||||
import decimal
|
import decimal
|
||||||
import gridfs
|
import itertools
|
||||||
import re
|
import re
|
||||||
|
import time
|
||||||
|
import urllib2
|
||||||
|
import urlparse
|
||||||
import uuid
|
import uuid
|
||||||
|
import warnings
|
||||||
|
from operator import itemgetter
|
||||||
|
|
||||||
|
import gridfs
|
||||||
from bson import Binary, DBRef, SON, ObjectId
|
from bson import Binary, DBRef, SON, ObjectId
|
||||||
|
|
||||||
|
from mongoengine.errors import ValidationError
|
||||||
|
from mongoengine.python_support import (PY3, bin_type, txt_type,
|
||||||
|
str_types, StringIO)
|
||||||
from base import (BaseField, ComplexBaseField, ObjectIdField,
|
from base import (BaseField, ComplexBaseField, ObjectIdField,
|
||||||
ValidationError, get_document, BaseDocument)
|
get_document, BaseDocument, ALLOW_INHERITANCE)
|
||||||
from queryset import DO_NOTHING, QuerySet
|
from queryset import DO_NOTHING, QuerySet
|
||||||
from document import Document, EmbeddedDocument
|
from document import Document, EmbeddedDocument
|
||||||
from connection import get_db, DEFAULT_CONNECTION_NAME
|
from connection import get_db, DEFAULT_CONNECTION_NAME
|
||||||
from operator import itemgetter
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from PIL import Image, ImageOps
|
from PIL import Image, ImageOps
|
||||||
@@ -21,16 +27,10 @@ except ImportError:
|
|||||||
Image = None
|
Image = None
|
||||||
ImageOps = None
|
ImageOps = None
|
||||||
|
|
||||||
try:
|
__all__ = ['StringField', 'IntField', 'LongField', 'FloatField', 'BooleanField',
|
||||||
from cStringIO import StringIO
|
|
||||||
except ImportError:
|
|
||||||
from StringIO import StringIO
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField',
|
|
||||||
'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField',
|
'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField',
|
||||||
'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField',
|
'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField',
|
||||||
'DecimalField', 'ComplexDateTimeField', 'URLField',
|
'DecimalField', 'ComplexDateTimeField', 'URLField', 'DynamicField',
|
||||||
'GenericReferenceField', 'FileField', 'BinaryField',
|
'GenericReferenceField', 'FileField', 'BinaryField',
|
||||||
'SortedListField', 'EmailField', 'GeoPointField', 'ImageField',
|
'SortedListField', 'EmailField', 'GeoPointField', 'ImageField',
|
||||||
'SequenceField', 'UUIDField', 'GenericEmbeddedDocumentField']
|
'SequenceField', 'UUIDField', 'GenericEmbeddedDocumentField']
|
||||||
@@ -49,10 +49,16 @@ class StringField(BaseField):
|
|||||||
super(StringField, self).__init__(**kwargs)
|
super(StringField, self).__init__(**kwargs)
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
return unicode(value)
|
if isinstance(value, unicode):
|
||||||
|
return value
|
||||||
|
try:
|
||||||
|
value = value.decode('utf-8')
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
if not isinstance(value, (str, unicode)):
|
if not isinstance(value, basestring):
|
||||||
self.error('StringField only accepts string values')
|
self.error('StringField only accepts string values')
|
||||||
|
|
||||||
if self.max_length is not None and len(value) > self.max_length:
|
if self.max_length is not None and len(value) > self.max_length:
|
||||||
@@ -97,25 +103,30 @@ class URLField(StringField):
|
|||||||
.. versionadded:: 0.3
|
.. versionadded:: 0.3
|
||||||
"""
|
"""
|
||||||
|
|
||||||
URL_REGEX = re.compile(
|
_URL_REGEX = re.compile(
|
||||||
r'^https?://'
|
r'^(?:http|ftp)s?://' # http:// or https://
|
||||||
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?|'
|
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
|
||||||
r'localhost|'
|
r'localhost|' #localhost...
|
||||||
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})'
|
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
|
||||||
r'(?::\d+)?'
|
r'(?::\d+)?' # optional port
|
||||||
r'(?:/?|[/?]\S+)$', re.IGNORECASE
|
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(self, verify_exists=False, **kwargs):
|
def __init__(self, verify_exists=False, url_regex=None, **kwargs):
|
||||||
self.verify_exists = verify_exists
|
self.verify_exists = verify_exists
|
||||||
|
self.url_regex = url_regex or self._URL_REGEX
|
||||||
super(URLField, self).__init__(**kwargs)
|
super(URLField, self).__init__(**kwargs)
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
if not URLField.URL_REGEX.match(value):
|
if not self.url_regex.match(value):
|
||||||
self.error('Invalid URL: %s' % value)
|
self.error('Invalid URL: %s' % value)
|
||||||
|
return
|
||||||
|
|
||||||
if self.verify_exists:
|
if self.verify_exists:
|
||||||
import urllib2
|
warnings.warn(
|
||||||
|
"The URLField verify_exists argument has intractable security "
|
||||||
|
"and performance issues. Accordingly, it has been deprecated.",
|
||||||
|
DeprecationWarning
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
request = urllib2.Request(value)
|
request = urllib2.Request(value)
|
||||||
urllib2.urlopen(request)
|
urllib2.urlopen(request)
|
||||||
@@ -132,16 +143,17 @@ class EmailField(StringField):
|
|||||||
EMAIL_REGEX = re.compile(
|
EMAIL_REGEX = re.compile(
|
||||||
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom
|
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom
|
||||||
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' # quoted-string
|
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' # quoted-string
|
||||||
r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$', re.IGNORECASE # domain
|
r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,253}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$', re.IGNORECASE # domain
|
||||||
)
|
)
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
if not EmailField.EMAIL_REGEX.match(value):
|
if not EmailField.EMAIL_REGEX.match(value):
|
||||||
self.error('Invalid Mail-address: %s' % value)
|
self.error('Invalid Mail-address: %s' % value)
|
||||||
|
super(EmailField, self).validate(value)
|
||||||
|
|
||||||
|
|
||||||
class IntField(BaseField):
|
class IntField(BaseField):
|
||||||
"""An integer field.
|
"""An 32-bit integer field.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, min_value=None, max_value=None, **kwargs):
|
def __init__(self, min_value=None, max_value=None, **kwargs):
|
||||||
@@ -149,7 +161,11 @@ class IntField(BaseField):
|
|||||||
super(IntField, self).__init__(**kwargs)
|
super(IntField, self).__init__(**kwargs)
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
return int(value)
|
try:
|
||||||
|
value = int(value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
try:
|
try:
|
||||||
@@ -164,9 +180,46 @@ class IntField(BaseField):
|
|||||||
self.error('Integer value is too large')
|
self.error('Integer value is too large')
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
|
if value is None:
|
||||||
|
return value
|
||||||
|
|
||||||
return int(value)
|
return int(value)
|
||||||
|
|
||||||
|
|
||||||
|
class LongField(BaseField):
|
||||||
|
"""An 64-bit integer field.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, min_value=None, max_value=None, **kwargs):
|
||||||
|
self.min_value, self.max_value = min_value, max_value
|
||||||
|
super(LongField, self).__init__(**kwargs)
|
||||||
|
|
||||||
|
def to_python(self, value):
|
||||||
|
try:
|
||||||
|
value = long(value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
|
||||||
|
def validate(self, value):
|
||||||
|
try:
|
||||||
|
value = long(value)
|
||||||
|
except:
|
||||||
|
self.error('%s could not be converted to long' % value)
|
||||||
|
|
||||||
|
if self.min_value is not None and value < self.min_value:
|
||||||
|
self.error('Long value is too small')
|
||||||
|
|
||||||
|
if self.max_value is not None and value > self.max_value:
|
||||||
|
self.error('Long value is too large')
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
if value is None:
|
||||||
|
return value
|
||||||
|
|
||||||
|
return long(value)
|
||||||
|
|
||||||
|
|
||||||
class FloatField(BaseField):
|
class FloatField(BaseField):
|
||||||
"""An floating point number field.
|
"""An floating point number field.
|
||||||
"""
|
"""
|
||||||
@@ -176,13 +229,17 @@ class FloatField(BaseField):
|
|||||||
super(FloatField, self).__init__(**kwargs)
|
super(FloatField, self).__init__(**kwargs)
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
return float(value)
|
try:
|
||||||
|
value = float(value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
if isinstance(value, int):
|
if isinstance(value, int):
|
||||||
value = float(value)
|
value = float(value)
|
||||||
if not isinstance(value, float):
|
if not isinstance(value, float):
|
||||||
self.error('FoatField only accepts float values')
|
self.error('FloatField only accepts float values')
|
||||||
|
|
||||||
if self.min_value is not None and value < self.min_value:
|
if self.min_value is not None and value < self.min_value:
|
||||||
self.error('Float value is too small')
|
self.error('Float value is too small')
|
||||||
@@ -191,6 +248,9 @@ class FloatField(BaseField):
|
|||||||
self.error('Float value is too large')
|
self.error('Float value is too large')
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
|
if value is None:
|
||||||
|
return value
|
||||||
|
|
||||||
return float(value)
|
return float(value)
|
||||||
|
|
||||||
|
|
||||||
@@ -205,9 +265,14 @@ class DecimalField(BaseField):
|
|||||||
super(DecimalField, self).__init__(**kwargs)
|
super(DecimalField, self).__init__(**kwargs)
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
|
original_value = value
|
||||||
if not isinstance(value, basestring):
|
if not isinstance(value, basestring):
|
||||||
value = unicode(value)
|
value = unicode(value)
|
||||||
return decimal.Decimal(value)
|
try:
|
||||||
|
value = decimal.Decimal(value)
|
||||||
|
except ValueError:
|
||||||
|
return original_value
|
||||||
|
return value
|
||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
return unicode(value)
|
return unicode(value)
|
||||||
@@ -235,7 +300,11 @@ class BooleanField(BaseField):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
return bool(value)
|
try:
|
||||||
|
value = bool(value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
if not isinstance(value, bool):
|
if not isinstance(value, bool):
|
||||||
@@ -265,6 +334,8 @@ class DateTimeField(BaseField):
|
|||||||
return value
|
return value
|
||||||
if isinstance(value, datetime.date):
|
if isinstance(value, datetime.date):
|
||||||
return datetime.datetime(value.year, value.month, value.day)
|
return datetime.datetime(value.year, value.month, value.day)
|
||||||
|
if callable(value):
|
||||||
|
return value()
|
||||||
|
|
||||||
# Attempt to parse a datetime:
|
# Attempt to parse a datetime:
|
||||||
# value = smart_str(value)
|
# value = smart_str(value)
|
||||||
@@ -279,16 +350,16 @@ class DateTimeField(BaseField):
|
|||||||
usecs = 0
|
usecs = 0
|
||||||
kwargs = {'microsecond': usecs}
|
kwargs = {'microsecond': usecs}
|
||||||
try: # Seconds are optional, so try converting seconds first.
|
try: # Seconds are optional, so try converting seconds first.
|
||||||
return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M:%S')[:6],
|
return datetime.datetime(*time.strptime(value,
|
||||||
**kwargs)
|
'%Y-%m-%d %H:%M:%S')[:6], **kwargs)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
try: # Try without seconds.
|
try: # Try without seconds.
|
||||||
return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M')[:5],
|
return datetime.datetime(*time.strptime(value,
|
||||||
**kwargs)
|
'%Y-%m-%d %H:%M')[:5], **kwargs)
|
||||||
except ValueError: # Try without hour/minutes/seconds.
|
except ValueError: # Try without hour/minutes/seconds.
|
||||||
try:
|
try:
|
||||||
return datetime.datetime(*time.strptime(value, '%Y-%m-%d')[:3],
|
return datetime.datetime(*time.strptime(value,
|
||||||
**kwargs)
|
'%Y-%m-%d')[:3], **kwargs)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -366,21 +437,29 @@ class ComplexDateTimeField(StringField):
|
|||||||
data = super(ComplexDateTimeField, self).__get__(instance, owner)
|
data = super(ComplexDateTimeField, self).__get__(instance, owner)
|
||||||
if data == None:
|
if data == None:
|
||||||
return datetime.datetime.now()
|
return datetime.datetime.now()
|
||||||
|
if isinstance(data, datetime.datetime):
|
||||||
|
return data
|
||||||
return self._convert_from_string(data)
|
return self._convert_from_string(data)
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
value = self._convert_from_datetime(value)
|
value = self._convert_from_datetime(value) if value else value
|
||||||
return super(ComplexDateTimeField, self).__set__(instance, value)
|
return super(ComplexDateTimeField, self).__set__(instance, value)
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
|
value = self.to_python(value)
|
||||||
if not isinstance(value, datetime.datetime):
|
if not isinstance(value, datetime.datetime):
|
||||||
self.error('Only datetime objects may used in a '
|
self.error('Only datetime objects may used in a '
|
||||||
'ComplexDateTimeField')
|
'ComplexDateTimeField')
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
|
original_value = value
|
||||||
|
try:
|
||||||
return self._convert_from_string(value)
|
return self._convert_from_string(value)
|
||||||
|
except:
|
||||||
|
return original_value
|
||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
|
value = self.to_python(value)
|
||||||
return self._convert_from_datetime(value)
|
return self._convert_from_datetime(value)
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
@@ -419,7 +498,7 @@ class EmbeddedDocumentField(BaseField):
|
|||||||
return value
|
return value
|
||||||
return self.document_type.to_mongo(value)
|
return self.document_type.to_mongo(value)
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value, clean=True):
|
||||||
"""Make sure that the document instance is an instance of the
|
"""Make sure that the document instance is an instance of the
|
||||||
EmbeddedDocument subclass provided when the document was defined.
|
EmbeddedDocument subclass provided when the document was defined.
|
||||||
"""
|
"""
|
||||||
@@ -427,7 +506,7 @@ class EmbeddedDocumentField(BaseField):
|
|||||||
if not isinstance(value, self.document_type):
|
if not isinstance(value, self.document_type):
|
||||||
self.error('Invalid embedded document instance provided to an '
|
self.error('Invalid embedded document instance provided to an '
|
||||||
'EmbeddedDocumentField')
|
'EmbeddedDocumentField')
|
||||||
self.document_type.validate(value)
|
self.document_type.validate(value, clean)
|
||||||
|
|
||||||
def lookup_member(self, member_name):
|
def lookup_member(self, member_name):
|
||||||
return self.document_type._fields.get(member_name)
|
return self.document_type._fields.get(member_name)
|
||||||
@@ -442,7 +521,8 @@ class GenericEmbeddedDocumentField(BaseField):
|
|||||||
|
|
||||||
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
|
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
|
||||||
|
|
||||||
..note :: You can use the choices param to limit the acceptable
|
.. note ::
|
||||||
|
You can use the choices param to limit the acceptable
|
||||||
EmbeddedDocument types
|
EmbeddedDocument types
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -456,12 +536,12 @@ class GenericEmbeddedDocumentField(BaseField):
|
|||||||
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value, clean=True):
|
||||||
if not isinstance(value, EmbeddedDocument):
|
if not isinstance(value, EmbeddedDocument):
|
||||||
self.error('Invalid embedded document instance provided to an '
|
self.error('Invalid embedded document instance provided to an '
|
||||||
'GenericEmbeddedDocumentField')
|
'GenericEmbeddedDocumentField')
|
||||||
|
|
||||||
value.validate()
|
value.validate(clean=clean)
|
||||||
|
|
||||||
def to_mongo(self, document):
|
def to_mongo(self, document):
|
||||||
if document is None:
|
if document is None:
|
||||||
@@ -473,17 +553,68 @@ class GenericEmbeddedDocumentField(BaseField):
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
class DynamicField(BaseField):
|
||||||
|
"""A truly dynamic field type capable of handling different and varying
|
||||||
|
types of data.
|
||||||
|
|
||||||
|
Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
|
||||||
|
|
||||||
|
def to_mongo(self, value):
|
||||||
|
"""Convert a Python type to a MongoDBcompatible type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(value, basestring):
|
||||||
|
return value
|
||||||
|
|
||||||
|
if hasattr(value, 'to_mongo'):
|
||||||
|
cls = value.__class__
|
||||||
|
val = value.to_mongo()
|
||||||
|
# If we its a document thats not inherited add _cls
|
||||||
|
if (isinstance(value, (Document, EmbeddedDocument))):
|
||||||
|
val['_cls'] = cls.__name__
|
||||||
|
return val
|
||||||
|
|
||||||
|
if not isinstance(value, (dict, list, tuple)):
|
||||||
|
return value
|
||||||
|
|
||||||
|
is_list = False
|
||||||
|
if not hasattr(value, 'items'):
|
||||||
|
is_list = True
|
||||||
|
value = dict([(k, v) for k, v in enumerate(value)])
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
for k, v in value.iteritems():
|
||||||
|
data[k] = self.to_mongo(v)
|
||||||
|
|
||||||
|
value = data
|
||||||
|
if is_list: # Convert back to a list
|
||||||
|
value = [v for k, v in sorted(data.iteritems(), key=itemgetter(0))]
|
||||||
|
return value
|
||||||
|
|
||||||
|
def lookup_member(self, member_name):
|
||||||
|
return member_name
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
if isinstance(value, basestring):
|
||||||
|
from mongoengine.fields import StringField
|
||||||
|
return StringField().prepare_query_value(op, value)
|
||||||
|
return self.to_mongo(value)
|
||||||
|
|
||||||
|
def validate(self, value, clean=True):
|
||||||
|
if hasattr(value, "validate"):
|
||||||
|
value.validate(clean=clean)
|
||||||
|
|
||||||
|
|
||||||
class ListField(ComplexBaseField):
|
class ListField(ComplexBaseField):
|
||||||
"""A list field that wraps a standard field, allowing multiple instances
|
"""A list field that wraps a standard field, allowing multiple instances
|
||||||
of the field to be used as a list in the database.
|
of the field to be used as a list in the database.
|
||||||
|
|
||||||
|
If using with ReferenceFields see: :ref:`one-to-many-with-listfields`
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
Required means it cannot be empty - as the default for ListFields is []
|
Required means it cannot be empty - as the default for ListFields is []
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# ListFields cannot be indexed with _types - MongoDB doesn't support this
|
|
||||||
_index_with_types = False
|
|
||||||
|
|
||||||
def __init__(self, field=None, **kwargs):
|
def __init__(self, field=None, **kwargs):
|
||||||
self.field = field
|
self.field = field
|
||||||
kwargs.setdefault('default', lambda: [])
|
kwargs.setdefault('default', lambda: [])
|
||||||
@@ -535,7 +666,8 @@ class SortedListField(ListField):
|
|||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
value = super(SortedListField, self).to_mongo(value)
|
value = super(SortedListField, self).to_mongo(value)
|
||||||
if self._ordering is not None:
|
if self._ordering is not None:
|
||||||
return sorted(value, key=itemgetter(self._ordering), reverse=self._order_reverse)
|
return sorted(value, key=itemgetter(self._ordering),
|
||||||
|
reverse=self._order_reverse)
|
||||||
return sorted(value, reverse=self._order_reverse)
|
return sorted(value, reverse=self._order_reverse)
|
||||||
|
|
||||||
|
|
||||||
@@ -565,7 +697,9 @@ class DictField(ComplexBaseField):
|
|||||||
self.error('Only dictionaries may be used in a DictField')
|
self.error('Only dictionaries may be used in a DictField')
|
||||||
|
|
||||||
if any(k for k in value.keys() if not isinstance(k, basestring)):
|
if any(k for k in value.keys() if not isinstance(k, basestring)):
|
||||||
self.error('Invalid dictionary key - documents must have only string keys')
|
msg = ("Invalid dictionary key - documents must "
|
||||||
|
"have only string keys")
|
||||||
|
self.error(msg)
|
||||||
if any(('.' in k or '$' in k) for k in value.keys()):
|
if any(('.' in k or '$' in k) for k in value.keys()):
|
||||||
self.error('Invalid dictionary key name - keys may not contain "."'
|
self.error('Invalid dictionary key name - keys may not contain "."'
|
||||||
' or "$" characters')
|
' or "$" characters')
|
||||||
@@ -581,7 +715,6 @@ class DictField(ComplexBaseField):
|
|||||||
|
|
||||||
if op in match_operators and isinstance(value, basestring):
|
if op in match_operators and isinstance(value, basestring):
|
||||||
return StringField().prepare_query_value(op, value)
|
return StringField().prepare_query_value(op, value)
|
||||||
|
|
||||||
return super(DictField, self).prepare_query_value(op, value)
|
return super(DictField, self).prepare_query_value(op, value)
|
||||||
|
|
||||||
|
|
||||||
@@ -615,6 +748,8 @@ class ReferenceField(BaseField):
|
|||||||
* NULLIFY - Updates the reference to null.
|
* NULLIFY - Updates the reference to null.
|
||||||
* CASCADE - Deletes the documents associated with the reference.
|
* CASCADE - Deletes the documents associated with the reference.
|
||||||
* DENY - Prevent the deletion of the reference object.
|
* DENY - Prevent the deletion of the reference object.
|
||||||
|
* PULL - Pull the reference from a :class:`~mongoengine.ListField`
|
||||||
|
of references
|
||||||
|
|
||||||
Alternative syntax for registering delete rules (useful when implementing
|
Alternative syntax for registering delete rules (useful when implementing
|
||||||
bi-directional delete rules)
|
bi-directional delete rules)
|
||||||
@@ -627,12 +762,19 @@ class ReferenceField(BaseField):
|
|||||||
|
|
||||||
Bar.register_delete_rule(Foo, 'bar', NULLIFY)
|
Bar.register_delete_rule(Foo, 'bar', NULLIFY)
|
||||||
|
|
||||||
|
.. note ::
|
||||||
|
`reverse_delete_rules` do not trigger pre / post delete signals to be
|
||||||
|
triggered.
|
||||||
|
|
||||||
.. versionchanged:: 0.5 added `reverse_delete_rule`
|
.. versionchanged:: 0.5 added `reverse_delete_rule`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, document_type, reverse_delete_rule=DO_NOTHING, **kwargs):
|
def __init__(self, document_type, dbref=None,
|
||||||
|
reverse_delete_rule=DO_NOTHING, **kwargs):
|
||||||
"""Initialises the Reference Field.
|
"""Initialises the Reference Field.
|
||||||
|
|
||||||
|
:param dbref: Store the reference as :class:`~pymongo.dbref.DBRef`
|
||||||
|
or as the :class:`~pymongo.objectid.ObjectId`.id .
|
||||||
:param reverse_delete_rule: Determines what to do when the referring
|
:param reverse_delete_rule: Determines what to do when the referring
|
||||||
object is deleted
|
object is deleted
|
||||||
"""
|
"""
|
||||||
@@ -640,6 +782,13 @@ class ReferenceField(BaseField):
|
|||||||
if not issubclass(document_type, (Document, basestring)):
|
if not issubclass(document_type, (Document, basestring)):
|
||||||
self.error('Argument to ReferenceField constructor must be a '
|
self.error('Argument to ReferenceField constructor must be a '
|
||||||
'document class or a string')
|
'document class or a string')
|
||||||
|
|
||||||
|
if dbref is None:
|
||||||
|
msg = ("ReferenceFields will default to using ObjectId "
|
||||||
|
"in 0.8, set DBRef=True if this isn't desired")
|
||||||
|
warnings.warn(msg, FutureWarning)
|
||||||
|
|
||||||
|
self.dbref = dbref if dbref is not None else True # To change in 0.8
|
||||||
self.document_type_obj = document_type
|
self.document_type_obj = document_type
|
||||||
self.reverse_delete_rule = reverse_delete_rule
|
self.reverse_delete_rule = reverse_delete_rule
|
||||||
super(ReferenceField, self).__init__(**kwargs)
|
super(ReferenceField, self).__init__(**kwargs)
|
||||||
@@ -662,8 +811,9 @@ class ReferenceField(BaseField):
|
|||||||
|
|
||||||
# Get value from document instance if available
|
# Get value from document instance if available
|
||||||
value = instance._data.get(self.name)
|
value = instance._data.get(self.name)
|
||||||
|
self._auto_dereference = instance._fields[self.name]._auto_dereference
|
||||||
# Dereference DBRefs
|
# Dereference DBRefs
|
||||||
if isinstance(value, (DBRef)):
|
if self._auto_dereference and isinstance(value, DBRef):
|
||||||
value = self.document_type._get_db().dereference(value)
|
value = self.document_type._get_db().dereference(value)
|
||||||
if value is not None:
|
if value is not None:
|
||||||
instance._data[self.name] = self.document_type._from_son(value)
|
instance._data[self.name] = self.document_type._from_son(value)
|
||||||
@@ -672,6 +822,10 @@ class ReferenceField(BaseField):
|
|||||||
|
|
||||||
def to_mongo(self, document):
|
def to_mongo(self, document):
|
||||||
if isinstance(document, DBRef):
|
if isinstance(document, DBRef):
|
||||||
|
if not self.dbref:
|
||||||
|
return document.id
|
||||||
|
return document
|
||||||
|
elif not self.dbref and isinstance(document, basestring):
|
||||||
return document
|
return document
|
||||||
|
|
||||||
id_field_name = self.document_type._meta['id_field']
|
id_field_name = self.document_type._meta['id_field']
|
||||||
@@ -679,7 +833,7 @@ class ReferenceField(BaseField):
|
|||||||
|
|
||||||
if isinstance(document, Document):
|
if isinstance(document, Document):
|
||||||
# We need the id from the saved object to create the DBRef
|
# We need the id from the saved object to create the DBRef
|
||||||
id_ = document.id
|
id_ = document.pk
|
||||||
if id_ is None:
|
if id_ is None:
|
||||||
self.error('You can only reference documents once they have'
|
self.error('You can only reference documents once they have'
|
||||||
' been saved to the database')
|
' been saved to the database')
|
||||||
@@ -687,18 +841,30 @@ class ReferenceField(BaseField):
|
|||||||
id_ = document
|
id_ = document
|
||||||
|
|
||||||
id_ = id_field.to_mongo(id_)
|
id_ = id_field.to_mongo(id_)
|
||||||
|
if self.dbref:
|
||||||
collection = self.document_type._get_collection_name()
|
collection = self.document_type._get_collection_name()
|
||||||
return DBRef(collection, id_)
|
return DBRef(collection, id_)
|
||||||
|
|
||||||
|
return id_
|
||||||
|
|
||||||
|
def to_python(self, value):
|
||||||
|
"""Convert a MongoDB-compatible type to a Python type.
|
||||||
|
"""
|
||||||
|
if (not self.dbref and
|
||||||
|
not isinstance(value, (DBRef, Document, EmbeddedDocument))):
|
||||||
|
collection = self.document_type._get_collection_name()
|
||||||
|
value = DBRef(collection, self.document_type.id.to_python(value))
|
||||||
|
return value
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
if value is None:
|
if value is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return self.to_mongo(value)
|
return self.to_mongo(value)
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
|
|
||||||
if not isinstance(value, (self.document_type, DBRef)):
|
if not isinstance(value, (self.document_type, DBRef)):
|
||||||
self.error('A ReferenceField only accepts DBRef')
|
self.error("A ReferenceField only accepts DBRef or documents")
|
||||||
|
|
||||||
if isinstance(value, Document) and value.id is None:
|
if isinstance(value, Document) and value.id is None:
|
||||||
self.error('You can only reference documents once they have been '
|
self.error('You can only reference documents once they have been '
|
||||||
@@ -712,10 +878,12 @@ class GenericReferenceField(BaseField):
|
|||||||
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
|
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
|
||||||
that will be automatically dereferenced on access (lazily).
|
that will be automatically dereferenced on access (lazily).
|
||||||
|
|
||||||
..note :: Any documents used as a generic reference must be registered in the
|
.. note ::
|
||||||
document registry. Importing the model will automatically register it.
|
* Any documents used as a generic reference must be registered in the
|
||||||
|
document registry. Importing the model will automatically register
|
||||||
|
it.
|
||||||
|
|
||||||
..note :: You can use the choices param to limit the acceptable Document types
|
* You can use the choices param to limit the acceptable Document types
|
||||||
|
|
||||||
.. versionadded:: 0.3
|
.. versionadded:: 0.3
|
||||||
"""
|
"""
|
||||||
@@ -725,17 +893,22 @@ class GenericReferenceField(BaseField):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
value = instance._data.get(self.name)
|
value = instance._data.get(self.name)
|
||||||
if isinstance(value, (dict, SON)):
|
self._auto_dereference = instance._fields[self.name]._auto_dereference
|
||||||
|
if self._auto_dereference and isinstance(value, (dict, SON)):
|
||||||
instance._data[self.name] = self.dereference(value)
|
instance._data[self.name] = self.dereference(value)
|
||||||
|
|
||||||
return super(GenericReferenceField, self).__get__(instance, owner)
|
return super(GenericReferenceField, self).__get__(instance, owner)
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
if not isinstance(value, (Document, DBRef)):
|
if not isinstance(value, (Document, DBRef, dict, SON)):
|
||||||
|
self.error('GenericReferences can only contain documents')
|
||||||
|
|
||||||
|
if isinstance(value, (dict, SON)):
|
||||||
|
if '_ref' not in value or '_cls' not in value:
|
||||||
self.error('GenericReferences can only contain documents')
|
self.error('GenericReferences can only contain documents')
|
||||||
|
|
||||||
# We need the id from the saved object to create the DBRef
|
# We need the id from the saved object to create the DBRef
|
||||||
if isinstance(value, Document) and value.id is None:
|
elif isinstance(value, Document) and value.id is None:
|
||||||
self.error('You can only reference documents once they have been'
|
self.error('You can only reference documents once they have been'
|
||||||
' saved to the database')
|
' saved to the database')
|
||||||
|
|
||||||
@@ -786,16 +959,20 @@ class BinaryField(BaseField):
|
|||||||
self.max_bytes = max_bytes
|
self.max_bytes = max_bytes
|
||||||
super(BinaryField, self).__init__(**kwargs)
|
super(BinaryField, self).__init__(**kwargs)
|
||||||
|
|
||||||
|
def __set__(self, instance, value):
|
||||||
|
"""Handle bytearrays in python 3.1"""
|
||||||
|
if PY3 and isinstance(value, bytearray):
|
||||||
|
value = bin_type(value)
|
||||||
|
return super(BinaryField, self).__set__(instance, value)
|
||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
return Binary(value)
|
return Binary(value)
|
||||||
|
|
||||||
def to_python(self, value):
|
|
||||||
# Returns str not unicode as this is binary data
|
|
||||||
return str(value)
|
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
if not isinstance(value, str):
|
if not isinstance(value, (bin_type, txt_type, Binary)):
|
||||||
self.error('BinaryField only accepts string values')
|
self.error("BinaryField only accepts instances of "
|
||||||
|
"(%s, %s, Binary)" % (
|
||||||
|
bin_type.__name__, txt_type.__name__))
|
||||||
|
|
||||||
if self.max_bytes is not None and len(value) > self.max_bytes:
|
if self.max_bytes is not None and len(value) > self.max_bytes:
|
||||||
self.error('Binary value is too long')
|
self.error('Binary value is too long')
|
||||||
@@ -833,7 +1010,7 @@ class GridFSProxy(object):
|
|||||||
if name in attrs:
|
if name in attrs:
|
||||||
return self.__getattribute__(name)
|
return self.__getattribute__(name)
|
||||||
obj = self.get()
|
obj = self.get()
|
||||||
if name in dir(obj):
|
if hasattr(obj, name):
|
||||||
return getattr(obj, name)
|
return getattr(obj, name)
|
||||||
raise AttributeError
|
raise AttributeError
|
||||||
|
|
||||||
@@ -848,6 +1025,25 @@ class GridFSProxy(object):
|
|||||||
self_dict['_fs'] = None
|
self_dict['_fs'] = None
|
||||||
return self_dict
|
return self_dict
|
||||||
|
|
||||||
|
def __copy__(self):
|
||||||
|
copied = GridFSProxy()
|
||||||
|
copied.__dict__.update(self.__getstate__())
|
||||||
|
return copied
|
||||||
|
|
||||||
|
def __deepcopy__(self, memo):
|
||||||
|
return self.__copy__()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '<%s: %s>' % (self.__class__.__name__, self.grid_id)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, GridFSProxy):
|
||||||
|
return ((self.grid_id == other.grid_id) and
|
||||||
|
(self.collection_name == other.collection_name) and
|
||||||
|
(self.db_alias == other.db_alias))
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def fs(self):
|
def fs(self):
|
||||||
if not self._fs:
|
if not self._fs:
|
||||||
@@ -959,7 +1155,8 @@ class FileField(BaseField):
|
|||||||
|
|
||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
key = self.name
|
key = self.name
|
||||||
if (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or isinstance(value, str):
|
if ((hasattr(value, 'read') and not
|
||||||
|
isinstance(value, GridFSProxy)) or isinstance(value, str_types)):
|
||||||
# using "FileField() = file/string" notation
|
# using "FileField() = file/string" notation
|
||||||
grid_file = instance._data.get(self.name)
|
grid_file = instance._data.get(self.name)
|
||||||
# If a file already exists, delete it
|
# If a file already exists, delete it
|
||||||
@@ -1015,6 +1212,7 @@ class ImageGridFsProxy(GridFSProxy):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
img = Image.open(file_obj)
|
img = Image.open(file_obj)
|
||||||
|
img_format = img.format
|
||||||
except:
|
except:
|
||||||
raise ValidationError('Invalid image')
|
raise ValidationError('Invalid image')
|
||||||
|
|
||||||
@@ -1049,20 +1247,20 @@ class ImageGridFsProxy(GridFSProxy):
|
|||||||
|
|
||||||
if thumbnail:
|
if thumbnail:
|
||||||
thumb_id = self._put_thumbnail(thumbnail,
|
thumb_id = self._put_thumbnail(thumbnail,
|
||||||
img.format)
|
img_format)
|
||||||
else:
|
else:
|
||||||
thumb_id = None
|
thumb_id = None
|
||||||
|
|
||||||
w, h = img.size
|
w, h = img.size
|
||||||
|
|
||||||
io = StringIO()
|
io = StringIO()
|
||||||
img.save(io, img.format)
|
img.save(io, img_format)
|
||||||
io.seek(0)
|
io.seek(0)
|
||||||
|
|
||||||
return super(ImageGridFsProxy, self).put(io,
|
return super(ImageGridFsProxy, self).put(io,
|
||||||
width=w,
|
width=w,
|
||||||
height=h,
|
height=h,
|
||||||
format=img.format,
|
format=img_format,
|
||||||
thumbnail_id=thumb_id,
|
thumbnail_id=thumb_id,
|
||||||
**kwargs)
|
**kwargs)
|
||||||
|
|
||||||
@@ -1148,11 +1346,15 @@ class ImageField(FileField):
|
|||||||
params_size = ('width', 'height', 'force')
|
params_size = ('width', 'height', 'force')
|
||||||
extra_args = dict(size=size, thumbnail_size=thumbnail_size)
|
extra_args = dict(size=size, thumbnail_size=thumbnail_size)
|
||||||
for att_name, att in extra_args.items():
|
for att_name, att in extra_args.items():
|
||||||
if att and (isinstance(att, tuple) or isinstance(att, list)):
|
value = None
|
||||||
setattr(self, att_name, dict(
|
if isinstance(att, (tuple, list)):
|
||||||
map(None, params_size, att)))
|
if PY3:
|
||||||
|
value = dict(itertools.zip_longest(params_size, att,
|
||||||
|
fillvalue=None))
|
||||||
else:
|
else:
|
||||||
setattr(self, att_name, None)
|
value = dict(map(None, params_size, att))
|
||||||
|
|
||||||
|
setattr(self, att_name, value)
|
||||||
|
|
||||||
super(ImageField, self).__init__(
|
super(ImageField, self).__init__(
|
||||||
collection_name=collection_name,
|
collection_name=collection_name,
|
||||||
@@ -1181,8 +1383,9 @@ class GeoPointField(BaseField):
|
|||||||
self.error('Both values in point must be float or int')
|
self.error('Both values in point must be float or int')
|
||||||
|
|
||||||
|
|
||||||
class SequenceField(IntField):
|
class SequenceField(BaseField):
|
||||||
"""Provides a sequental counter (see http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers)
|
"""Provides a sequental counter see:
|
||||||
|
http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
@@ -1192,38 +1395,55 @@ class SequenceField(IntField):
|
|||||||
cluster of machines, it is easier to create an object ID than have
|
cluster of machines, it is easier to create an object ID than have
|
||||||
global, uniformly increasing sequence numbers.
|
global, uniformly increasing sequence numbers.
|
||||||
|
|
||||||
|
Use any callable as `value_decorator` to transform calculated counter into
|
||||||
|
any value suitable for your needs, e.g. string or hexadecimal
|
||||||
|
representation of the default integer counter value.
|
||||||
|
|
||||||
.. versionadded:: 0.5
|
.. versionadded:: 0.5
|
||||||
|
|
||||||
|
.. versionchanged:: 0.8 added `value_decorator`
|
||||||
"""
|
"""
|
||||||
def __init__(self, collection_name=None, db_alias = None, *args, **kwargs):
|
|
||||||
self.collection_name = collection_name or 'mongoengine.counters'
|
_auto_gen = True
|
||||||
|
COLLECTION_NAME = 'mongoengine.counters'
|
||||||
|
VALUE_DECORATOR = int
|
||||||
|
|
||||||
|
def __init__(self, collection_name=None, db_alias=None,
|
||||||
|
sequence_name=None, value_decorator=None, *args, **kwargs):
|
||||||
|
self.collection_name = collection_name or self.COLLECTION_NAME
|
||||||
self.db_alias = db_alias or DEFAULT_CONNECTION_NAME
|
self.db_alias = db_alias or DEFAULT_CONNECTION_NAME
|
||||||
|
self.sequence_name = sequence_name
|
||||||
|
self.value_decorator = (callable(value_decorator) and
|
||||||
|
value_decorator or self.VALUE_DECORATOR)
|
||||||
return super(SequenceField, self).__init__(*args, **kwargs)
|
return super(SequenceField, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
def generate_new_value(self):
|
def generate(self):
|
||||||
"""
|
"""
|
||||||
Generate and Increment the counter
|
Generate and Increment the counter
|
||||||
"""
|
"""
|
||||||
sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(),
|
sequence_name = self.get_sequence_name()
|
||||||
self.name)
|
sequence_id = "%s.%s" % (sequence_name, self.name)
|
||||||
collection = get_db(alias = self.db_alias )[self.collection_name]
|
collection = get_db(alias=self.db_alias)[self.collection_name]
|
||||||
counter = collection.find_and_modify(query={"_id": sequence_id},
|
counter = collection.find_and_modify(query={"_id": sequence_id},
|
||||||
update={"$inc": {"next": 1}},
|
update={"$inc": {"next": 1}},
|
||||||
new=True,
|
new=True,
|
||||||
upsert=True)
|
upsert=True)
|
||||||
return counter['next']
|
return self.value_decorator(counter['next'])
|
||||||
|
|
||||||
|
def get_sequence_name(self):
|
||||||
|
if self.sequence_name:
|
||||||
|
return self.sequence_name
|
||||||
|
owner = self.owner_document
|
||||||
|
if issubclass(owner, Document):
|
||||||
|
return owner._get_collection_name()
|
||||||
|
else:
|
||||||
|
return ''.join('_%s' % c if c.isupper() else c
|
||||||
|
for c in owner._class_name).strip('_').lower()
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
def __get__(self, instance, owner):
|
||||||
|
value = super(SequenceField, self).__get__(instance, owner)
|
||||||
if instance is None:
|
if value is None and instance._initialised:
|
||||||
return self
|
value = self.generate()
|
||||||
|
|
||||||
if not instance._data:
|
|
||||||
return
|
|
||||||
|
|
||||||
value = instance._data.get(self.name)
|
|
||||||
|
|
||||||
if not value and instance._initialised:
|
|
||||||
value = self.generate_new_value()
|
|
||||||
instance._data[self.name] = value
|
instance._data[self.name] = value
|
||||||
instance._mark_as_changed(self.name)
|
instance._mark_as_changed(self.name)
|
||||||
|
|
||||||
@@ -1232,13 +1452,13 @@ class SequenceField(IntField):
|
|||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
|
|
||||||
if value is None and instance._initialised:
|
if value is None and instance._initialised:
|
||||||
value = self.generate_new_value()
|
value = self.generate()
|
||||||
|
|
||||||
return super(SequenceField, self).__set__(instance, value)
|
return super(SequenceField, self).__set__(instance, value)
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
if value is None:
|
if value is None:
|
||||||
value = self.generate_new_value()
|
value = self.generate()
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
@@ -1247,17 +1467,44 @@ class UUIDField(BaseField):
|
|||||||
|
|
||||||
.. versionadded:: 0.6
|
.. versionadded:: 0.6
|
||||||
"""
|
"""
|
||||||
|
_binary = None
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, binary=None, **kwargs):
|
||||||
|
"""
|
||||||
|
Store UUID data in the database
|
||||||
|
|
||||||
|
:param binary: (optional) boolean store as binary.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.6.19
|
||||||
|
"""
|
||||||
|
if binary is None:
|
||||||
|
binary = False
|
||||||
|
msg = ("UUIDFields will soon default to store as binary, please "
|
||||||
|
"configure binary=False if you wish to store as a string")
|
||||||
|
warnings.warn(msg, FutureWarning)
|
||||||
|
self._binary = binary
|
||||||
super(UUIDField, self).__init__(**kwargs)
|
super(UUIDField, self).__init__(**kwargs)
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
|
if not self._binary:
|
||||||
|
original_value = value
|
||||||
|
try:
|
||||||
if not isinstance(value, basestring):
|
if not isinstance(value, basestring):
|
||||||
value = unicode(value)
|
value = unicode(value)
|
||||||
return uuid.UUID(value)
|
return uuid.UUID(value)
|
||||||
|
except:
|
||||||
|
return original_value
|
||||||
|
return value
|
||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
|
if not self._binary:
|
||||||
return unicode(value)
|
return unicode(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
return self.to_mongo(value)
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
if not isinstance(value, uuid.UUID):
|
if not isinstance(value, uuid.UUID):
|
||||||
|
|||||||
61
mongoengine/python_support.py
Normal file
61
mongoengine/python_support.py
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
"""Helper functions and types to aid with Python 2.5 - 3 support."""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
PY3 = sys.version_info[0] == 3
|
||||||
|
PY25 = sys.version_info[:2] == (2, 5)
|
||||||
|
UNICODE_KWARGS = int(''.join([str(x) for x in sys.version_info[:3]])) > 264
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
import codecs
|
||||||
|
from io import BytesIO as StringIO
|
||||||
|
# return s converted to binary. b('test') should be equivalent to b'test'
|
||||||
|
def b(s):
|
||||||
|
return codecs.latin_1_encode(s)[0]
|
||||||
|
|
||||||
|
bin_type = bytes
|
||||||
|
txt_type = str
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
from cStringIO import StringIO
|
||||||
|
except ImportError:
|
||||||
|
from StringIO import StringIO
|
||||||
|
|
||||||
|
# Conversion to binary only necessary in Python 3
|
||||||
|
def b(s):
|
||||||
|
return s
|
||||||
|
|
||||||
|
bin_type = str
|
||||||
|
txt_type = unicode
|
||||||
|
|
||||||
|
str_types = (bin_type, txt_type)
|
||||||
|
|
||||||
|
if PY25:
|
||||||
|
def product(*args, **kwds):
|
||||||
|
pools = map(tuple, args) * kwds.get('repeat', 1)
|
||||||
|
result = [[]]
|
||||||
|
for pool in pools:
|
||||||
|
result = [x + [y] for x in result for y in pool]
|
||||||
|
for prod in result:
|
||||||
|
yield tuple(prod)
|
||||||
|
reduce = reduce
|
||||||
|
else:
|
||||||
|
from itertools import product
|
||||||
|
from functools import reduce
|
||||||
|
|
||||||
|
|
||||||
|
# For use with Python 2.5
|
||||||
|
# converts all keys from unicode to str for d and all nested dictionaries
|
||||||
|
def to_str_keys_recursive(d):
|
||||||
|
if isinstance(d, list):
|
||||||
|
for val in d:
|
||||||
|
if isinstance(val, (dict, list)):
|
||||||
|
to_str_keys_recursive(val)
|
||||||
|
elif isinstance(d, dict):
|
||||||
|
for key, val in d.items():
|
||||||
|
if isinstance(val, (dict, list)):
|
||||||
|
to_str_keys_recursive(val)
|
||||||
|
if isinstance(key, unicode):
|
||||||
|
d[str(key)] = d.pop(key)
|
||||||
|
else:
|
||||||
|
raise ValueError("non list/dict parameter not allowed")
|
||||||
File diff suppressed because it is too large
Load Diff
11
mongoengine/queryset/__init__.py
Normal file
11
mongoengine/queryset/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
from mongoengine.errors import (DoesNotExist, MultipleObjectsReturned,
|
||||||
|
InvalidQueryError, OperationError,
|
||||||
|
NotUniqueError)
|
||||||
|
from mongoengine.queryset.field_list import *
|
||||||
|
from mongoengine.queryset.manager import *
|
||||||
|
from mongoengine.queryset.queryset import *
|
||||||
|
from mongoengine.queryset.transform import *
|
||||||
|
from mongoengine.queryset.visitor import *
|
||||||
|
|
||||||
|
__all__ = (field_list.__all__ + manager.__all__ + queryset.__all__ +
|
||||||
|
transform.__all__ + visitor.__all__)
|
||||||
70
mongoengine/queryset/field_list.py
Normal file
70
mongoengine/queryset/field_list.py
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
|
||||||
|
__all__ = ('QueryFieldList',)
|
||||||
|
|
||||||
|
|
||||||
|
class QueryFieldList(object):
|
||||||
|
"""Object that handles combinations of .only() and .exclude() calls"""
|
||||||
|
ONLY = 1
|
||||||
|
EXCLUDE = 0
|
||||||
|
|
||||||
|
def __init__(self, fields=[], value=ONLY, always_include=[]):
|
||||||
|
self.value = value
|
||||||
|
self.fields = set(fields)
|
||||||
|
self.always_include = set(always_include)
|
||||||
|
self._id = None
|
||||||
|
self.slice = {}
|
||||||
|
|
||||||
|
def __add__(self, f):
|
||||||
|
if isinstance(f.value, dict):
|
||||||
|
for field in f.fields:
|
||||||
|
self.slice[field] = f.value
|
||||||
|
if not self.fields:
|
||||||
|
self.fields = f.fields
|
||||||
|
elif not self.fields:
|
||||||
|
self.fields = f.fields
|
||||||
|
self.value = f.value
|
||||||
|
self.slice = {}
|
||||||
|
elif self.value is self.ONLY and f.value is self.ONLY:
|
||||||
|
self._clean_slice()
|
||||||
|
self.fields = self.fields.intersection(f.fields)
|
||||||
|
elif self.value is self.EXCLUDE and f.value is self.EXCLUDE:
|
||||||
|
self.fields = self.fields.union(f.fields)
|
||||||
|
self._clean_slice()
|
||||||
|
elif self.value is self.ONLY and f.value is self.EXCLUDE:
|
||||||
|
self.fields -= f.fields
|
||||||
|
self._clean_slice()
|
||||||
|
elif self.value is self.EXCLUDE and f.value is self.ONLY:
|
||||||
|
self.value = self.ONLY
|
||||||
|
self.fields = f.fields - self.fields
|
||||||
|
self._clean_slice()
|
||||||
|
|
||||||
|
if '_id' in f.fields:
|
||||||
|
self._id = f.value
|
||||||
|
|
||||||
|
if self.always_include:
|
||||||
|
if self.value is self.ONLY and self.fields:
|
||||||
|
self.fields = self.fields.union(self.always_include)
|
||||||
|
else:
|
||||||
|
self.fields -= self.always_include
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __nonzero__(self):
|
||||||
|
return bool(self.fields)
|
||||||
|
|
||||||
|
def as_dict(self):
|
||||||
|
field_list = dict((field, self.value) for field in self.fields)
|
||||||
|
if self.slice:
|
||||||
|
field_list.update(self.slice)
|
||||||
|
if self._id is not None:
|
||||||
|
field_list['_id'] = self._id
|
||||||
|
return field_list
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
self.fields = set([])
|
||||||
|
self.slice = {}
|
||||||
|
self.value = self.ONLY
|
||||||
|
|
||||||
|
def _clean_slice(self):
|
||||||
|
if self.slice:
|
||||||
|
for field in set(self.slice.keys()) - self.fields:
|
||||||
|
del self.slice[field]
|
||||||
57
mongoengine/queryset/manager.py
Normal file
57
mongoengine/queryset/manager.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
from functools import partial
|
||||||
|
from mongoengine.queryset.queryset import QuerySet
|
||||||
|
|
||||||
|
__all__ = ('queryset_manager', 'QuerySetManager')
|
||||||
|
|
||||||
|
|
||||||
|
class QuerySetManager(object):
|
||||||
|
"""
|
||||||
|
The default QuerySet Manager.
|
||||||
|
|
||||||
|
Custom QuerySet Manager functions can extend this class and users can
|
||||||
|
add extra queryset functionality. Any custom manager methods must accept a
|
||||||
|
:class:`~mongoengine.Document` class as its first argument, and a
|
||||||
|
:class:`~mongoengine.queryset.QuerySet` as its second argument.
|
||||||
|
|
||||||
|
The method function should return a :class:`~mongoengine.queryset.QuerySet`
|
||||||
|
, probably the same one that was passed in, but modified in some way.
|
||||||
|
"""
|
||||||
|
|
||||||
|
get_queryset = None
|
||||||
|
default = QuerySet
|
||||||
|
|
||||||
|
def __init__(self, queryset_func=None):
|
||||||
|
if queryset_func:
|
||||||
|
self.get_queryset = queryset_func
|
||||||
|
|
||||||
|
def __get__(self, instance, owner):
|
||||||
|
"""Descriptor for instantiating a new QuerySet object when
|
||||||
|
Document.objects is accessed.
|
||||||
|
"""
|
||||||
|
if instance is not None:
|
||||||
|
# Document class being used rather than a document object
|
||||||
|
return self
|
||||||
|
|
||||||
|
# owner is the document that contains the QuerySetManager
|
||||||
|
queryset_class = owner._meta.get('queryset_class', self.default)
|
||||||
|
queryset = queryset_class(owner, owner._get_collection())
|
||||||
|
if self.get_queryset:
|
||||||
|
arg_count = self.get_queryset.func_code.co_argcount
|
||||||
|
if arg_count == 1:
|
||||||
|
queryset = self.get_queryset(queryset)
|
||||||
|
elif arg_count == 2:
|
||||||
|
queryset = self.get_queryset(owner, queryset)
|
||||||
|
else:
|
||||||
|
queryset = partial(self.get_queryset, owner, queryset)
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
|
def queryset_manager(func):
|
||||||
|
"""Decorator that allows you to define custom QuerySet managers on
|
||||||
|
:class:`~mongoengine.Document` classes. The manager must be a function that
|
||||||
|
accepts a :class:`~mongoengine.Document` class as its first argument, and a
|
||||||
|
:class:`~mongoengine.queryset.QuerySet` as its second argument. The method
|
||||||
|
function should return a :class:`~mongoengine.queryset.QuerySet`, probably
|
||||||
|
the same one that was passed in, but modified in some way.
|
||||||
|
"""
|
||||||
|
return QuerySetManager(func)
|
||||||
1412
mongoengine/queryset/queryset.py
Normal file
1412
mongoengine/queryset/queryset.py
Normal file
File diff suppressed because it is too large
Load Diff
252
mongoengine/queryset/transform.py
Normal file
252
mongoengine/queryset/transform.py
Normal file
@@ -0,0 +1,252 @@
|
|||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from bson import SON
|
||||||
|
|
||||||
|
from mongoengine.common import _import_class
|
||||||
|
from mongoengine.errors import InvalidQueryError, LookUpError
|
||||||
|
|
||||||
|
__all__ = ('query', 'update')
|
||||||
|
|
||||||
|
|
||||||
|
COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
|
||||||
|
'all', 'size', 'exists', 'not')
|
||||||
|
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
|
||||||
|
'within_box', 'within_polygon', 'near', 'near_sphere',
|
||||||
|
'max_distance')
|
||||||
|
STRING_OPERATORS = ('contains', 'icontains', 'startswith',
|
||||||
|
'istartswith', 'endswith', 'iendswith',
|
||||||
|
'exact', 'iexact')
|
||||||
|
CUSTOM_OPERATORS = ('match',)
|
||||||
|
MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
|
||||||
|
STRING_OPERATORS + CUSTOM_OPERATORS)
|
||||||
|
|
||||||
|
UPDATE_OPERATORS = ('set', 'unset', 'inc', 'dec', 'pop', 'push',
|
||||||
|
'push_all', 'pull', 'pull_all', 'add_to_set')
|
||||||
|
|
||||||
|
|
||||||
|
def query(_doc_cls=None, _field_operation=False, **query):
|
||||||
|
"""Transform a query from Django-style format to Mongo format.
|
||||||
|
"""
|
||||||
|
mongo_query = {}
|
||||||
|
merge_query = defaultdict(list)
|
||||||
|
for key, value in sorted(query.items()):
|
||||||
|
if key == "__raw__":
|
||||||
|
mongo_query.update(value)
|
||||||
|
continue
|
||||||
|
|
||||||
|
parts = key.split('__')
|
||||||
|
indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()]
|
||||||
|
parts = [part for part in parts if not part.isdigit()]
|
||||||
|
# Check for an operator and transform to mongo-style if there is
|
||||||
|
op = None
|
||||||
|
if parts[-1] in MATCH_OPERATORS:
|
||||||
|
op = parts.pop()
|
||||||
|
|
||||||
|
negate = False
|
||||||
|
if parts[-1] == 'not':
|
||||||
|
parts.pop()
|
||||||
|
negate = True
|
||||||
|
|
||||||
|
if _doc_cls:
|
||||||
|
# Switch field names to proper names [set in Field(name='foo')]
|
||||||
|
try:
|
||||||
|
fields = _doc_cls._lookup_field(parts)
|
||||||
|
except Exception, e:
|
||||||
|
raise InvalidQueryError(e)
|
||||||
|
parts = []
|
||||||
|
|
||||||
|
cleaned_fields = []
|
||||||
|
for field in fields:
|
||||||
|
append_field = True
|
||||||
|
if isinstance(field, basestring):
|
||||||
|
parts.append(field)
|
||||||
|
append_field = False
|
||||||
|
else:
|
||||||
|
parts.append(field.db_field)
|
||||||
|
if append_field:
|
||||||
|
cleaned_fields.append(field)
|
||||||
|
|
||||||
|
# Convert value to proper value
|
||||||
|
field = cleaned_fields[-1]
|
||||||
|
|
||||||
|
singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not']
|
||||||
|
singular_ops += STRING_OPERATORS
|
||||||
|
if op in singular_ops:
|
||||||
|
if isinstance(field, basestring):
|
||||||
|
if (op in STRING_OPERATORS and
|
||||||
|
isinstance(value, basestring)):
|
||||||
|
StringField = _import_class('StringField')
|
||||||
|
value = StringField.prepare_query_value(op, value)
|
||||||
|
else:
|
||||||
|
value = field
|
||||||
|
else:
|
||||||
|
value = field.prepare_query_value(op, value)
|
||||||
|
elif op in ('in', 'nin', 'all', 'near'):
|
||||||
|
# 'in', 'nin' and 'all' require a list of values
|
||||||
|
value = [field.prepare_query_value(op, v) for v in value]
|
||||||
|
|
||||||
|
# if op and op not in COMPARISON_OPERATORS:
|
||||||
|
if op:
|
||||||
|
if op in GEO_OPERATORS:
|
||||||
|
if op == "within_distance":
|
||||||
|
value = {'$within': {'$center': value}}
|
||||||
|
elif op == "within_spherical_distance":
|
||||||
|
value = {'$within': {'$centerSphere': value}}
|
||||||
|
elif op == "within_polygon":
|
||||||
|
value = {'$within': {'$polygon': value}}
|
||||||
|
elif op == "near":
|
||||||
|
value = {'$near': value}
|
||||||
|
elif op == "near_sphere":
|
||||||
|
value = {'$nearSphere': value}
|
||||||
|
elif op == 'within_box':
|
||||||
|
value = {'$within': {'$box': value}}
|
||||||
|
elif op == "max_distance":
|
||||||
|
value = {'$maxDistance': value}
|
||||||
|
else:
|
||||||
|
raise NotImplementedError("Geo method '%s' has not "
|
||||||
|
"been implemented" % op)
|
||||||
|
elif op in CUSTOM_OPERATORS:
|
||||||
|
if op == 'match':
|
||||||
|
value = {"$elemMatch": value}
|
||||||
|
else:
|
||||||
|
NotImplementedError("Custom method '%s' has not "
|
||||||
|
"been implemented" % op)
|
||||||
|
elif op not in STRING_OPERATORS:
|
||||||
|
value = {'$' + op: value}
|
||||||
|
|
||||||
|
if negate:
|
||||||
|
value = {'$not': value}
|
||||||
|
|
||||||
|
for i, part in indices:
|
||||||
|
parts.insert(i, part)
|
||||||
|
key = '.'.join(parts)
|
||||||
|
if op is None or key not in mongo_query:
|
||||||
|
mongo_query[key] = value
|
||||||
|
elif key in mongo_query:
|
||||||
|
if key in mongo_query and isinstance(mongo_query[key], dict):
|
||||||
|
mongo_query[key].update(value)
|
||||||
|
# $maxDistance needs to come last - convert to SON
|
||||||
|
if '$maxDistance' in mongo_query[key]:
|
||||||
|
value_dict = mongo_query[key]
|
||||||
|
value_son = SON()
|
||||||
|
for k, v in value_dict.iteritems():
|
||||||
|
if k == '$maxDistance':
|
||||||
|
continue
|
||||||
|
value_son[k] = v
|
||||||
|
value_son['$maxDistance'] = value_dict['$maxDistance']
|
||||||
|
mongo_query[key] = value_son
|
||||||
|
else:
|
||||||
|
# Store for manually merging later
|
||||||
|
merge_query[key].append(value)
|
||||||
|
|
||||||
|
# The queryset has been filter in such a way we must manually merge
|
||||||
|
for k, v in merge_query.items():
|
||||||
|
merge_query[k].append(mongo_query[k])
|
||||||
|
del mongo_query[k]
|
||||||
|
if isinstance(v, list):
|
||||||
|
value = [{k:val} for val in v]
|
||||||
|
if '$and' in mongo_query.keys():
|
||||||
|
mongo_query['$and'].append(value)
|
||||||
|
else:
|
||||||
|
mongo_query['$and'] = value
|
||||||
|
|
||||||
|
return mongo_query
|
||||||
|
|
||||||
|
|
||||||
|
def update(_doc_cls=None, **update):
|
||||||
|
"""Transform an update spec from Django-style format to Mongo format.
|
||||||
|
"""
|
||||||
|
mongo_update = {}
|
||||||
|
for key, value in update.items():
|
||||||
|
if key == "__raw__":
|
||||||
|
mongo_update.update(value)
|
||||||
|
continue
|
||||||
|
parts = key.split('__')
|
||||||
|
# Check for an operator and transform to mongo-style if there is
|
||||||
|
op = None
|
||||||
|
if parts[0] in UPDATE_OPERATORS:
|
||||||
|
op = parts.pop(0)
|
||||||
|
# Convert Pythonic names to Mongo equivalents
|
||||||
|
if op in ('push_all', 'pull_all'):
|
||||||
|
op = op.replace('_all', 'All')
|
||||||
|
elif op == 'dec':
|
||||||
|
# Support decrement by flipping a positive value's sign
|
||||||
|
# and using 'inc'
|
||||||
|
op = 'inc'
|
||||||
|
if value > 0:
|
||||||
|
value = -value
|
||||||
|
elif op == 'add_to_set':
|
||||||
|
op = op.replace('_to_set', 'ToSet')
|
||||||
|
|
||||||
|
match = None
|
||||||
|
if parts[-1] in COMPARISON_OPERATORS:
|
||||||
|
match = parts.pop()
|
||||||
|
|
||||||
|
if _doc_cls:
|
||||||
|
# Switch field names to proper names [set in Field(name='foo')]
|
||||||
|
try:
|
||||||
|
fields = _doc_cls._lookup_field(parts)
|
||||||
|
except Exception, e:
|
||||||
|
raise InvalidQueryError(e)
|
||||||
|
parts = []
|
||||||
|
|
||||||
|
cleaned_fields = []
|
||||||
|
for field in fields:
|
||||||
|
append_field = True
|
||||||
|
if isinstance(field, basestring):
|
||||||
|
# Convert the S operator to $
|
||||||
|
if field == 'S':
|
||||||
|
field = '$'
|
||||||
|
parts.append(field)
|
||||||
|
append_field = False
|
||||||
|
else:
|
||||||
|
parts.append(field.db_field)
|
||||||
|
if append_field:
|
||||||
|
cleaned_fields.append(field)
|
||||||
|
|
||||||
|
# Convert value to proper value
|
||||||
|
field = cleaned_fields[-1]
|
||||||
|
|
||||||
|
if op in (None, 'set', 'push', 'pull'):
|
||||||
|
if field.required or value is not None:
|
||||||
|
value = field.prepare_query_value(op, value)
|
||||||
|
elif op in ('pushAll', 'pullAll'):
|
||||||
|
value = [field.prepare_query_value(op, v) for v in value]
|
||||||
|
elif op == 'addToSet':
|
||||||
|
if isinstance(value, (list, tuple, set)):
|
||||||
|
value = [field.prepare_query_value(op, v) for v in value]
|
||||||
|
elif field.required or value is not None:
|
||||||
|
value = field.prepare_query_value(op, value)
|
||||||
|
|
||||||
|
if match:
|
||||||
|
match = '$' + match
|
||||||
|
value = {match: value}
|
||||||
|
|
||||||
|
key = '.'.join(parts)
|
||||||
|
|
||||||
|
if not op:
|
||||||
|
raise InvalidQueryError("Updates must supply an operation "
|
||||||
|
"eg: set__FIELD=value")
|
||||||
|
|
||||||
|
if 'pull' in op and '.' in key:
|
||||||
|
# Dot operators don't work on pull operations
|
||||||
|
# it uses nested dict syntax
|
||||||
|
if op == 'pullAll':
|
||||||
|
raise InvalidQueryError("pullAll operations only support "
|
||||||
|
"a single field depth")
|
||||||
|
|
||||||
|
parts.reverse()
|
||||||
|
for key in parts:
|
||||||
|
value = {key: value}
|
||||||
|
elif op == 'addToSet' and isinstance(value, list):
|
||||||
|
value = {key: {"$each": value}}
|
||||||
|
else:
|
||||||
|
value = {key: value}
|
||||||
|
key = '$' + op
|
||||||
|
|
||||||
|
if key not in mongo_update:
|
||||||
|
mongo_update[key] = value
|
||||||
|
elif key in mongo_update and isinstance(mongo_update[key], dict):
|
||||||
|
mongo_update[key].update(value)
|
||||||
|
|
||||||
|
return mongo_update
|
||||||
155
mongoengine/queryset/visitor.py
Normal file
155
mongoengine/queryset/visitor.py
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
import copy
|
||||||
|
|
||||||
|
from mongoengine.errors import InvalidQueryError
|
||||||
|
from mongoengine.python_support import product, reduce
|
||||||
|
|
||||||
|
from mongoengine.queryset import transform
|
||||||
|
|
||||||
|
__all__ = ('Q',)
|
||||||
|
|
||||||
|
|
||||||
|
class QNodeVisitor(object):
|
||||||
|
"""Base visitor class for visiting Q-object nodes in a query tree.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def visit_combination(self, combination):
|
||||||
|
"""Called by QCombination objects.
|
||||||
|
"""
|
||||||
|
return combination
|
||||||
|
|
||||||
|
def visit_query(self, query):
|
||||||
|
"""Called by (New)Q objects.
|
||||||
|
"""
|
||||||
|
return query
|
||||||
|
|
||||||
|
|
||||||
|
class SimplificationVisitor(QNodeVisitor):
|
||||||
|
"""Simplifies query trees by combinging unnecessary 'and' connection nodes
|
||||||
|
into a single Q-object.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def visit_combination(self, combination):
|
||||||
|
if combination.operation == combination.AND:
|
||||||
|
# The simplification only applies to 'simple' queries
|
||||||
|
if all(isinstance(node, Q) for node in combination.children):
|
||||||
|
queries = [n.query for n in combination.children]
|
||||||
|
return Q(**self._query_conjunction(queries))
|
||||||
|
return combination
|
||||||
|
|
||||||
|
def _query_conjunction(self, queries):
|
||||||
|
"""Merges query dicts - effectively &ing them together.
|
||||||
|
"""
|
||||||
|
query_ops = set()
|
||||||
|
combined_query = {}
|
||||||
|
for query in queries:
|
||||||
|
ops = set(query.keys())
|
||||||
|
# Make sure that the same operation isn't applied more than once
|
||||||
|
# to a single field
|
||||||
|
intersection = ops.intersection(query_ops)
|
||||||
|
if intersection:
|
||||||
|
msg = 'Duplicate query conditions: '
|
||||||
|
raise InvalidQueryError(msg + ', '.join(intersection))
|
||||||
|
|
||||||
|
query_ops.update(ops)
|
||||||
|
combined_query.update(copy.deepcopy(query))
|
||||||
|
return combined_query
|
||||||
|
|
||||||
|
|
||||||
|
class QueryCompilerVisitor(QNodeVisitor):
|
||||||
|
"""Compiles the nodes in a query tree to a PyMongo-compatible query
|
||||||
|
dictionary.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, document):
|
||||||
|
self.document = document
|
||||||
|
|
||||||
|
def visit_combination(self, combination):
|
||||||
|
operator = "$and"
|
||||||
|
if combination.operation == combination.OR:
|
||||||
|
operator = "$or"
|
||||||
|
return {operator: combination.children}
|
||||||
|
|
||||||
|
def visit_query(self, query):
|
||||||
|
return transform.query(self.document, **query.query)
|
||||||
|
|
||||||
|
|
||||||
|
class QNode(object):
|
||||||
|
"""Base class for nodes in query trees.
|
||||||
|
"""
|
||||||
|
|
||||||
|
AND = 0
|
||||||
|
OR = 1
|
||||||
|
|
||||||
|
def to_query(self, document):
|
||||||
|
query = self.accept(SimplificationVisitor())
|
||||||
|
query = query.accept(QueryCompilerVisitor(document))
|
||||||
|
return query
|
||||||
|
|
||||||
|
def accept(self, visitor):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def _combine(self, other, operation):
|
||||||
|
"""Combine this node with another node into a QCombination object.
|
||||||
|
"""
|
||||||
|
if getattr(other, 'empty', True):
|
||||||
|
return self
|
||||||
|
|
||||||
|
if self.empty:
|
||||||
|
return other
|
||||||
|
|
||||||
|
return QCombination(operation, [self, other])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def empty(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __or__(self, other):
|
||||||
|
return self._combine(other, self.OR)
|
||||||
|
|
||||||
|
def __and__(self, other):
|
||||||
|
return self._combine(other, self.AND)
|
||||||
|
|
||||||
|
|
||||||
|
class QCombination(QNode):
|
||||||
|
"""Represents the combination of several conditions by a given logical
|
||||||
|
operator.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, operation, children):
|
||||||
|
self.operation = operation
|
||||||
|
self.children = []
|
||||||
|
for node in children:
|
||||||
|
# If the child is a combination of the same type, we can merge its
|
||||||
|
# children directly into this combinations children
|
||||||
|
if isinstance(node, QCombination) and node.operation == operation:
|
||||||
|
# self.children += node.children
|
||||||
|
self.children.append(node)
|
||||||
|
else:
|
||||||
|
self.children.append(node)
|
||||||
|
|
||||||
|
def accept(self, visitor):
|
||||||
|
for i in range(len(self.children)):
|
||||||
|
if isinstance(self.children[i], QNode):
|
||||||
|
self.children[i] = self.children[i].accept(visitor)
|
||||||
|
|
||||||
|
return visitor.visit_combination(self)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def empty(self):
|
||||||
|
return not bool(self.children)
|
||||||
|
|
||||||
|
|
||||||
|
class Q(QNode):
|
||||||
|
"""A simple query object, used in a query tree to build up more complex
|
||||||
|
query structures.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, **query):
|
||||||
|
self.query = query
|
||||||
|
|
||||||
|
def accept(self, visitor):
|
||||||
|
return visitor.visit_query(self)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def empty(self):
|
||||||
|
return not bool(self.query)
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
from mongoengine.connection import get_db
|
|
||||||
|
|
||||||
|
|
||||||
class query_counter(object):
|
|
||||||
""" Query_counter contextmanager to get the number of queries. """
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
""" Construct the query_counter. """
|
|
||||||
self.counter = 0
|
|
||||||
self.db = get_db()
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
""" On every with block we need to drop the profile collection. """
|
|
||||||
self.db.set_profiling_level(0)
|
|
||||||
self.db.system.profile.drop()
|
|
||||||
self.db.set_profiling_level(2)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, t, value, traceback):
|
|
||||||
""" Reset the profiling level. """
|
|
||||||
self.db.set_profiling_level(0)
|
|
||||||
|
|
||||||
def __eq__(self, value):
|
|
||||||
""" == Compare querycounter. """
|
|
||||||
return value == self._get_count()
|
|
||||||
|
|
||||||
def __ne__(self, value):
|
|
||||||
""" != Compare querycounter. """
|
|
||||||
return not self.__eq__(value)
|
|
||||||
|
|
||||||
def __lt__(self, value):
|
|
||||||
""" < Compare querycounter. """
|
|
||||||
return self._get_count() < value
|
|
||||||
|
|
||||||
def __le__(self, value):
|
|
||||||
""" <= Compare querycounter. """
|
|
||||||
return self._get_count() <= value
|
|
||||||
|
|
||||||
def __gt__(self, value):
|
|
||||||
""" > Compare querycounter. """
|
|
||||||
return self._get_count() > value
|
|
||||||
|
|
||||||
def __ge__(self, value):
|
|
||||||
""" >= Compare querycounter. """
|
|
||||||
return self._get_count() >= value
|
|
||||||
|
|
||||||
def __int__(self):
|
|
||||||
""" int representation. """
|
|
||||||
return self._get_count()
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
""" repr query_counter as the number of queries. """
|
|
||||||
return u"%s" % self._get_count()
|
|
||||||
|
|
||||||
def _get_count(self):
|
|
||||||
""" Get the number of queries. """
|
|
||||||
count = self.db.system.profile.find().count() - self.counter
|
|
||||||
self.counter += 1
|
|
||||||
return count
|
|
||||||
@@ -5,7 +5,7 @@
|
|||||||
%define srcname mongoengine
|
%define srcname mongoengine
|
||||||
|
|
||||||
Name: python-%{srcname}
|
Name: python-%{srcname}
|
||||||
Version: 0.6.8
|
Version: 0.7.9
|
||||||
Release: 1%{?dist}
|
Release: 1%{?dist}
|
||||||
Summary: A Python Document-Object Mapper for working with MongoDB
|
Summary: A Python Document-Object Mapper for working with MongoDB
|
||||||
|
|
||||||
|
|||||||
11
setup.cfg
Normal file
11
setup.cfg
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
[nosetests]
|
||||||
|
verbosity = 3
|
||||||
|
detailed-errors = 1
|
||||||
|
#with-coverage = 1
|
||||||
|
#cover-erase = 1
|
||||||
|
#cover-html = 1
|
||||||
|
#cover-html-dir = ../htmlcov
|
||||||
|
#cover-package = mongoengine
|
||||||
|
py3where = build
|
||||||
|
where = tests
|
||||||
|
#tests = document/__init__.py
|
||||||
50
setup.py
50
setup.py
@@ -1,27 +1,35 @@
|
|||||||
from setuptools import setup, find_packages
|
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
from setuptools import setup, find_packages
|
||||||
|
|
||||||
DESCRIPTION = "A Python Document-Object Mapper for working with MongoDB"
|
# Hack to silence atexit traceback in newer python versions
|
||||||
|
try:
|
||||||
|
import multiprocessing
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
DESCRIPTION = 'MongoEngine is a Python Object-Document ' + \
|
||||||
|
'Mapper for working with MongoDB.'
|
||||||
LONG_DESCRIPTION = None
|
LONG_DESCRIPTION = None
|
||||||
try:
|
try:
|
||||||
LONG_DESCRIPTION = open('README.rst').read()
|
LONG_DESCRIPTION = open('README.rst').read()
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def get_version(version_tuple):
|
def get_version(version_tuple):
|
||||||
version = '%s.%s' % (version_tuple[0], version_tuple[1])
|
if not isinstance(version_tuple[-1], int):
|
||||||
if version_tuple[2]:
|
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
|
||||||
version = '%s.%s' % (version, version_tuple[2])
|
return '.'.join(map(str, version_tuple))
|
||||||
return version
|
|
||||||
|
|
||||||
# Dirty hack to get version number from monogengine/__init__.py - we can't
|
# Dirty hack to get version number from monogengine/__init__.py - we can't
|
||||||
# import it as it depends on PyMongo and PyMongo isn't installed until this
|
# import it as it depends on PyMongo and PyMongo isn't installed until this
|
||||||
# file is read
|
# file is read
|
||||||
init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
|
init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
|
||||||
version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0]
|
version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0]
|
||||||
|
|
||||||
VERSION = get_version(eval(version_line.split('=')[-1]))
|
VERSION = get_version(eval(version_line.split('=')[-1]))
|
||||||
print VERSION
|
print(VERSION)
|
||||||
|
|
||||||
CLASSIFIERS = [
|
CLASSIFIERS = [
|
||||||
'Development Status :: 4 - Beta',
|
'Development Status :: 4 - Beta',
|
||||||
@@ -29,18 +37,38 @@ CLASSIFIERS = [
|
|||||||
'License :: OSI Approved :: MIT License',
|
'License :: OSI Approved :: MIT License',
|
||||||
'Operating System :: OS Independent',
|
'Operating System :: OS Independent',
|
||||||
'Programming Language :: Python',
|
'Programming Language :: Python',
|
||||||
|
"Programming Language :: Python :: 2",
|
||||||
|
"Programming Language :: Python :: 2.5",
|
||||||
|
"Programming Language :: Python :: 2.6",
|
||||||
|
"Programming Language :: Python :: 2.7",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.1",
|
||||||
|
"Programming Language :: Python :: 3.2",
|
||||||
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
'Topic :: Database',
|
'Topic :: Database',
|
||||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
extra_opts = {}
|
||||||
|
if sys.version_info[0] == 3:
|
||||||
|
extra_opts['use_2to3'] = True
|
||||||
|
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker']
|
||||||
|
extra_opts['packages'] = find_packages(exclude=('tests',))
|
||||||
|
if "test" in sys.argv or "nosetests" in sys.argv:
|
||||||
|
extra_opts['packages'].append("tests")
|
||||||
|
extra_opts['package_data'] = {"tests": ["fields/mongoengine.png"]}
|
||||||
|
else:
|
||||||
|
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django==1.4.2', 'PIL']
|
||||||
|
extra_opts['packages'] = find_packages(exclude=('tests',))
|
||||||
|
|
||||||
setup(name='mongoengine',
|
setup(name='mongoengine',
|
||||||
version=VERSION,
|
version=VERSION,
|
||||||
packages=find_packages(),
|
|
||||||
author='Harry Marr',
|
author='Harry Marr',
|
||||||
author_email='harry.marr@{nospam}gmail.com',
|
author_email='harry.marr@{nospam}gmail.com',
|
||||||
maintainer="Ross Lawley",
|
maintainer="Ross Lawley",
|
||||||
maintainer_email="ross.lawley@{nospam}gmail.com",
|
maintainer_email="ross.lawley@{nospam}gmail.com",
|
||||||
url='http://mongoengine.org/',
|
url='http://mongoengine.org/',
|
||||||
|
download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
|
||||||
license='MIT',
|
license='MIT',
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
description=DESCRIPTION,
|
description=DESCRIPTION,
|
||||||
@@ -48,6 +76,6 @@ setup(name='mongoengine',
|
|||||||
platforms=['any'],
|
platforms=['any'],
|
||||||
classifiers=CLASSIFIERS,
|
classifiers=CLASSIFIERS,
|
||||||
install_requires=['pymongo'],
|
install_requires=['pymongo'],
|
||||||
test_suite='tests',
|
test_suite='nose.collector',
|
||||||
tests_require=['blinker', 'django>=1.3', 'PIL']
|
**extra_opts
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -0,0 +1,3 @@
|
|||||||
|
from all_warnings import AllWarnings
|
||||||
|
from document import *
|
||||||
|
from queryset import *
|
||||||
94
tests/all_warnings/__init__.py
Normal file
94
tests/all_warnings/__init__.py
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
"""
|
||||||
|
This test has been put into a module. This is because it tests warnings that
|
||||||
|
only get triggered on first hit. This way we can ensure its imported into the
|
||||||
|
top level and called first by the test suite.
|
||||||
|
"""
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
import unittest
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ('AllWarnings', )
|
||||||
|
|
||||||
|
|
||||||
|
class AllWarnings(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
conn = connect(db='mongoenginetest')
|
||||||
|
self.warning_list = []
|
||||||
|
self.showwarning_default = warnings.showwarning
|
||||||
|
warnings.showwarning = self.append_to_warning_list
|
||||||
|
|
||||||
|
def append_to_warning_list(self, message, category, *args):
|
||||||
|
self.warning_list.append({"message": message,
|
||||||
|
"category": category})
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
# restore default handling of warnings
|
||||||
|
warnings.showwarning = self.showwarning_default
|
||||||
|
|
||||||
|
def test_dbref_reference_field_future_warning(self):
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
parent = ReferenceField('self')
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
p1 = Person()
|
||||||
|
p1.parent = None
|
||||||
|
p1.save()
|
||||||
|
|
||||||
|
p2 = Person(name="Wilson Jr")
|
||||||
|
p2.parent = p1
|
||||||
|
p2.save(cascade=False)
|
||||||
|
|
||||||
|
self.assertTrue(len(self.warning_list) > 0)
|
||||||
|
warning = self.warning_list[0]
|
||||||
|
self.assertEqual(FutureWarning, warning["category"])
|
||||||
|
self.assertTrue("ReferenceFields will default to using ObjectId"
|
||||||
|
in str(warning["message"]))
|
||||||
|
|
||||||
|
def test_document_save_cascade_future_warning(self):
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
parent = ReferenceField('self')
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
p1 = Person(name="Wilson Snr")
|
||||||
|
p1.parent = None
|
||||||
|
p1.save()
|
||||||
|
|
||||||
|
p2 = Person(name="Wilson Jr")
|
||||||
|
p2.parent = p1
|
||||||
|
p2.parent.name = "Poppa Wilson"
|
||||||
|
p2.save()
|
||||||
|
|
||||||
|
self.assertTrue(len(self.warning_list) > 0)
|
||||||
|
if len(self.warning_list) > 1:
|
||||||
|
print self.warning_list
|
||||||
|
warning = self.warning_list[0]
|
||||||
|
self.assertEqual(FutureWarning, warning["category"])
|
||||||
|
self.assertTrue("Cascading saves will default to off in 0.8"
|
||||||
|
in str(warning["message"]))
|
||||||
|
|
||||||
|
def test_document_collection_syntax_warning(self):
|
||||||
|
|
||||||
|
class NonAbstractBase(Document):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
class InheritedDocumentFailTest(NonAbstractBase):
|
||||||
|
meta = {'collection': 'fail'}
|
||||||
|
|
||||||
|
warning = self.warning_list[0]
|
||||||
|
self.assertEqual(SyntaxWarning, warning["category"])
|
||||||
|
self.assertEqual('non_abstract_base',
|
||||||
|
InheritedDocumentFailTest._get_collection_name())
|
||||||
|
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
3008
tests/document.py
3008
tests/document.py
File diff suppressed because it is too large
Load Diff
15
tests/document/__init__.py
Normal file
15
tests/document/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from class_methods import *
|
||||||
|
from delta import *
|
||||||
|
from dynamic import *
|
||||||
|
from indexes import *
|
||||||
|
from inheritance import *
|
||||||
|
from instance import *
|
||||||
|
from json_serialisation import *
|
||||||
|
from validation import *
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
187
tests/document/class_methods.py
Normal file
187
tests/document/class_methods.py
Normal file
@@ -0,0 +1,187 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import with_statement
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
from mongoengine.queryset import NULLIFY
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
|
||||||
|
__all__ = ("ClassMethodsTest", )
|
||||||
|
|
||||||
|
|
||||||
|
class ClassMethodsTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
non_field = True
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
for collection in self.db.collection_names():
|
||||||
|
if 'system.' in collection:
|
||||||
|
continue
|
||||||
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
|
def test_definition(self):
|
||||||
|
"""Ensure that document may be defined using fields.
|
||||||
|
"""
|
||||||
|
self.assertEqual(['age', 'id', 'name'],
|
||||||
|
sorted(self.Person._fields.keys()))
|
||||||
|
self.assertEqual(["IntField", "ObjectIdField", "StringField"],
|
||||||
|
sorted([x.__class__.__name__ for x in
|
||||||
|
self.Person._fields.values()]))
|
||||||
|
|
||||||
|
def test_get_db(self):
|
||||||
|
"""Ensure that get_db returns the expected db.
|
||||||
|
"""
|
||||||
|
db = self.Person._get_db()
|
||||||
|
self.assertEqual(self.db, db)
|
||||||
|
|
||||||
|
def test_get_collection_name(self):
|
||||||
|
"""Ensure that get_collection_name returns the expected collection
|
||||||
|
name.
|
||||||
|
"""
|
||||||
|
collection_name = 'person'
|
||||||
|
self.assertEqual(collection_name, self.Person._get_collection_name())
|
||||||
|
|
||||||
|
def test_get_collection(self):
|
||||||
|
"""Ensure that get_collection returns the expected collection.
|
||||||
|
"""
|
||||||
|
collection_name = 'person'
|
||||||
|
collection = self.Person._get_collection()
|
||||||
|
self.assertEqual(self.db[collection_name], collection)
|
||||||
|
|
||||||
|
def test_drop_collection(self):
|
||||||
|
"""Ensure that the collection may be dropped from the database.
|
||||||
|
"""
|
||||||
|
collection_name = 'person'
|
||||||
|
self.Person(name='Test').save()
|
||||||
|
self.assertTrue(collection_name in self.db.collection_names())
|
||||||
|
|
||||||
|
self.Person.drop_collection()
|
||||||
|
self.assertFalse(collection_name in self.db.collection_names())
|
||||||
|
|
||||||
|
def test_register_delete_rule(self):
|
||||||
|
"""Ensure that register delete rule adds a delete rule to the document
|
||||||
|
meta.
|
||||||
|
"""
|
||||||
|
class Job(Document):
|
||||||
|
employee = ReferenceField(self.Person)
|
||||||
|
|
||||||
|
self.assertEqual(self.Person._meta.get('delete_rules'), None)
|
||||||
|
|
||||||
|
self.Person.register_delete_rule(Job, 'employee', NULLIFY)
|
||||||
|
self.assertEqual(self.Person._meta['delete_rules'],
|
||||||
|
{(Job, 'employee'): NULLIFY})
|
||||||
|
|
||||||
|
def test_collection_naming(self):
|
||||||
|
"""Ensure that a collection with a specified name may be used.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class DefaultNamingTest(Document):
|
||||||
|
pass
|
||||||
|
self.assertEqual('default_naming_test',
|
||||||
|
DefaultNamingTest._get_collection_name())
|
||||||
|
|
||||||
|
class CustomNamingTest(Document):
|
||||||
|
meta = {'collection': 'pimp_my_collection'}
|
||||||
|
|
||||||
|
self.assertEqual('pimp_my_collection',
|
||||||
|
CustomNamingTest._get_collection_name())
|
||||||
|
|
||||||
|
class DynamicNamingTest(Document):
|
||||||
|
meta = {'collection': lambda c: "DYNAMO"}
|
||||||
|
self.assertEqual('DYNAMO', DynamicNamingTest._get_collection_name())
|
||||||
|
|
||||||
|
# Use Abstract class to handle backwards compatibility
|
||||||
|
class BaseDocument(Document):
|
||||||
|
meta = {
|
||||||
|
'abstract': True,
|
||||||
|
'collection': lambda c: c.__name__.lower()
|
||||||
|
}
|
||||||
|
|
||||||
|
class OldNamingConvention(BaseDocument):
|
||||||
|
pass
|
||||||
|
self.assertEqual('oldnamingconvention',
|
||||||
|
OldNamingConvention._get_collection_name())
|
||||||
|
|
||||||
|
class InheritedAbstractNamingTest(BaseDocument):
|
||||||
|
meta = {'collection': 'wibble'}
|
||||||
|
self.assertEqual('wibble',
|
||||||
|
InheritedAbstractNamingTest._get_collection_name())
|
||||||
|
|
||||||
|
# Mixin tests
|
||||||
|
class BaseMixin(object):
|
||||||
|
meta = {
|
||||||
|
'collection': lambda c: c.__name__.lower()
|
||||||
|
}
|
||||||
|
|
||||||
|
class OldMixinNamingConvention(Document, BaseMixin):
|
||||||
|
pass
|
||||||
|
self.assertEqual('oldmixinnamingconvention',
|
||||||
|
OldMixinNamingConvention._get_collection_name())
|
||||||
|
|
||||||
|
class BaseMixin(object):
|
||||||
|
meta = {
|
||||||
|
'collection': lambda c: c.__name__.lower()
|
||||||
|
}
|
||||||
|
|
||||||
|
class BaseDocument(Document, BaseMixin):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
class MyDocument(BaseDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertEqual('basedocument', MyDocument._get_collection_name())
|
||||||
|
|
||||||
|
def test_custom_collection_name_operations(self):
|
||||||
|
"""Ensure that a collection with a specified name is used as expected.
|
||||||
|
"""
|
||||||
|
collection_name = 'personCollTest'
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {'collection': collection_name}
|
||||||
|
|
||||||
|
Person(name="Test User").save()
|
||||||
|
self.assertTrue(collection_name in self.db.collection_names())
|
||||||
|
|
||||||
|
user_obj = self.db[collection_name].find_one()
|
||||||
|
self.assertEqual(user_obj['name'], "Test User")
|
||||||
|
|
||||||
|
user_obj = Person.objects[0]
|
||||||
|
self.assertEqual(user_obj.name, "Test User")
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
self.assertFalse(collection_name in self.db.collection_names())
|
||||||
|
|
||||||
|
def test_collection_name_and_primary(self):
|
||||||
|
"""Ensure that a collection with a specified name may be used.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField(primary_key=True)
|
||||||
|
meta = {'collection': 'app'}
|
||||||
|
|
||||||
|
Person(name="Test User").save()
|
||||||
|
|
||||||
|
user_obj = Person.objects.first()
|
||||||
|
self.assertEqual(user_obj.name, "Test User")
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
689
tests/document/delta.py
Normal file
689
tests/document/delta.py
Normal file
@@ -0,0 +1,689 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
|
||||||
|
__all__ = ("DeltaTest",)
|
||||||
|
|
||||||
|
|
||||||
|
class DeltaTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
non_field = True
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
for collection in self.db.collection_names():
|
||||||
|
if 'system.' in collection:
|
||||||
|
continue
|
||||||
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
|
def test_delta(self):
|
||||||
|
self.delta(Document)
|
||||||
|
self.delta(DynamicDocument)
|
||||||
|
|
||||||
|
def delta(self, DocClass):
|
||||||
|
|
||||||
|
class Doc(DocClass):
|
||||||
|
string_field = StringField()
|
||||||
|
int_field = IntField()
|
||||||
|
dict_field = DictField()
|
||||||
|
list_field = ListField()
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc._get_changed_fields(), [])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {}))
|
||||||
|
|
||||||
|
doc.string_field = 'hello'
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['string_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.int_field = 1
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['int_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||||
|
doc.dict_field = dict_value
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
list_value = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.list_field = list_value
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
|
||||||
|
|
||||||
|
# Test unsetting
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.dict_field = {}
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.list_field = []
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
|
||||||
|
|
||||||
|
def test_delta_recursive(self):
|
||||||
|
self.delta_recursive(Document, EmbeddedDocument)
|
||||||
|
self.delta_recursive(DynamicDocument, EmbeddedDocument)
|
||||||
|
self.delta_recursive(Document, DynamicEmbeddedDocument)
|
||||||
|
self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument)
|
||||||
|
|
||||||
|
def delta_recursive(self, DocClass, EmbeddedClass):
|
||||||
|
|
||||||
|
class Embedded(EmbeddedClass):
|
||||||
|
string_field = StringField()
|
||||||
|
int_field = IntField()
|
||||||
|
dict_field = DictField()
|
||||||
|
list_field = ListField()
|
||||||
|
|
||||||
|
class Doc(DocClass):
|
||||||
|
string_field = StringField()
|
||||||
|
int_field = IntField()
|
||||||
|
dict_field = DictField()
|
||||||
|
list_field = ListField()
|
||||||
|
embedded_field = EmbeddedDocumentField(Embedded)
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc._get_changed_fields(), [])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {}))
|
||||||
|
|
||||||
|
embedded_1 = Embedded()
|
||||||
|
embedded_1.string_field = 'hello'
|
||||||
|
embedded_1.int_field = 1
|
||||||
|
embedded_1.dict_field = {'hello': 'world'}
|
||||||
|
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['embedded_field'])
|
||||||
|
|
||||||
|
embedded_delta = {
|
||||||
|
'string_field': 'hello',
|
||||||
|
'int_field': 1,
|
||||||
|
'dict_field': {'hello': 'world'},
|
||||||
|
'list_field': ['1', 2, {'hello': 'world'}]
|
||||||
|
}
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'embedded_field': embedded_delta}, {}))
|
||||||
|
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.embedded_field.dict_field = {}
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['embedded_field.dict_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.dict_field, {})
|
||||||
|
|
||||||
|
doc.embedded_field.list_field = []
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['embedded_field.list_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1}))
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field, [])
|
||||||
|
|
||||||
|
embedded_2 = Embedded()
|
||||||
|
embedded_2.string_field = 'hello'
|
||||||
|
embedded_2.int_field = 1
|
||||||
|
embedded_2.dict_field = {'hello': 'world'}
|
||||||
|
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
|
||||||
|
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['embedded_field.list_field'])
|
||||||
|
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), ({
|
||||||
|
'list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'string_field': 'hello',
|
||||||
|
'dict_field': {'hello': 'world'},
|
||||||
|
'int_field': 1,
|
||||||
|
'list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
}]
|
||||||
|
}, {}))
|
||||||
|
|
||||||
|
self.assertEqual(doc._delta(), ({
|
||||||
|
'embedded_field.list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'string_field': 'hello',
|
||||||
|
'dict_field': {'hello': 'world'},
|
||||||
|
'int_field': 1,
|
||||||
|
'list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
}]
|
||||||
|
}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||||
|
for k in doc.embedded_field.list_field[2]._fields:
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2][k],
|
||||||
|
embedded_2[k])
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].string_field = 'world'
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['embedded_field.list_field.2.string_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(),
|
||||||
|
({'list_field.2.string_field': 'world'}, {}))
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'embedded_field.list_field.2.string_field': 'world'}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||||
|
'world')
|
||||||
|
|
||||||
|
# Test multiple assignments
|
||||||
|
doc.embedded_field.list_field[2].string_field = 'hello world'
|
||||||
|
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['embedded_field.list_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), ({
|
||||||
|
'list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'string_field': 'hello world',
|
||||||
|
'int_field': 1,
|
||||||
|
'list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
'dict_field': {'hello': 'world'}}]}, {}))
|
||||||
|
self.assertEqual(doc._delta(), ({
|
||||||
|
'embedded_field.list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'string_field': 'hello world',
|
||||||
|
'int_field': 1,
|
||||||
|
'list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
'dict_field': {'hello': 'world'}}
|
||||||
|
]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||||
|
'hello world')
|
||||||
|
|
||||||
|
# Test list native methods
|
||||||
|
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'embedded_field.list_field.2.list_field':
|
||||||
|
[2, {'hello': 'world'}]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].list_field.append(1)
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'embedded_field.list_field.2.list_field':
|
||||||
|
[2, {'hello': 'world'}, 1]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||||
|
[2, {'hello': 'world'}, 1])
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||||
|
[1, 2, {'hello': 'world'}])
|
||||||
|
|
||||||
|
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
del(doc.embedded_field.list_field[2].list_field)
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({}, {'embedded_field.list_field.2.list_field': 1}))
|
||||||
|
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.dict_field['Embedded'] = embedded_1
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.dict_field['Embedded'].string_field = 'Hello World'
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['dict_field.Embedded.string_field'])
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'dict_field.Embedded.string_field': 'Hello World'}, {}))
|
||||||
|
|
||||||
|
def test_circular_reference_deltas(self):
|
||||||
|
self.circular_reference_deltas(Document, Document)
|
||||||
|
self.circular_reference_deltas(Document, DynamicDocument)
|
||||||
|
self.circular_reference_deltas(DynamicDocument, Document)
|
||||||
|
self.circular_reference_deltas(DynamicDocument, DynamicDocument)
|
||||||
|
|
||||||
|
def circular_reference_deltas(self, DocClass1, DocClass2):
|
||||||
|
|
||||||
|
class Person(DocClass1):
|
||||||
|
name = StringField()
|
||||||
|
owns = ListField(ReferenceField('Organization'))
|
||||||
|
|
||||||
|
class Organization(DocClass2):
|
||||||
|
name = StringField()
|
||||||
|
owner = ReferenceField('Person')
|
||||||
|
|
||||||
|
person = Person(name="owner")
|
||||||
|
person.save()
|
||||||
|
organization = Organization(name="company")
|
||||||
|
organization.save()
|
||||||
|
|
||||||
|
person.owns.append(organization)
|
||||||
|
organization.owner = person
|
||||||
|
|
||||||
|
person.save()
|
||||||
|
organization.save()
|
||||||
|
|
||||||
|
p = Person.objects[0].select_related()
|
||||||
|
o = Organization.objects.first()
|
||||||
|
self.assertEqual(p.owns[0], o)
|
||||||
|
self.assertEqual(o.owner, p)
|
||||||
|
|
||||||
|
def test_circular_reference_deltas_2(self):
|
||||||
|
self.circular_reference_deltas_2(Document, Document)
|
||||||
|
self.circular_reference_deltas_2(Document, DynamicDocument)
|
||||||
|
self.circular_reference_deltas_2(DynamicDocument, Document)
|
||||||
|
self.circular_reference_deltas_2(DynamicDocument, DynamicDocument)
|
||||||
|
|
||||||
|
def circular_reference_deltas_2(self, DocClass1, DocClass2):
|
||||||
|
|
||||||
|
class Person(DocClass1):
|
||||||
|
name = StringField()
|
||||||
|
owns = ListField(ReferenceField('Organization'))
|
||||||
|
employer = ReferenceField('Organization')
|
||||||
|
|
||||||
|
class Organization(DocClass2):
|
||||||
|
name = StringField()
|
||||||
|
owner = ReferenceField('Person')
|
||||||
|
employees = ListField(ReferenceField('Person'))
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
Organization.drop_collection()
|
||||||
|
|
||||||
|
person = Person(name="owner")
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
employee = Person(name="employee")
|
||||||
|
employee.save()
|
||||||
|
|
||||||
|
organization = Organization(name="company")
|
||||||
|
organization.save()
|
||||||
|
|
||||||
|
person.owns.append(organization)
|
||||||
|
organization.owner = person
|
||||||
|
|
||||||
|
organization.employees.append(employee)
|
||||||
|
employee.employer = organization
|
||||||
|
|
||||||
|
person.save()
|
||||||
|
organization.save()
|
||||||
|
employee.save()
|
||||||
|
|
||||||
|
p = Person.objects.get(name="owner")
|
||||||
|
e = Person.objects.get(name="employee")
|
||||||
|
o = Organization.objects.first()
|
||||||
|
|
||||||
|
self.assertEqual(p.owns[0], o)
|
||||||
|
self.assertEqual(o.owner, p)
|
||||||
|
self.assertEqual(e.employer, o)
|
||||||
|
|
||||||
|
def test_delta_db_field(self):
|
||||||
|
self.delta_db_field(Document)
|
||||||
|
self.delta_db_field(DynamicDocument)
|
||||||
|
|
||||||
|
def delta_db_field(self, DocClass):
|
||||||
|
|
||||||
|
class Doc(DocClass):
|
||||||
|
string_field = StringField(db_field='db_string_field')
|
||||||
|
int_field = IntField(db_field='db_int_field')
|
||||||
|
dict_field = DictField(db_field='db_dict_field')
|
||||||
|
list_field = ListField(db_field='db_list_field')
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc._get_changed_fields(), [])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {}))
|
||||||
|
|
||||||
|
doc.string_field = 'hello'
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_string_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'db_string_field': 'hello'}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.int_field = 1
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_int_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'db_int_field': 1}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||||
|
doc.dict_field = dict_value
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_dict_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'db_dict_field': dict_value}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
list_value = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.list_field = list_value
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_list_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'db_list_field': list_value}, {}))
|
||||||
|
|
||||||
|
# Test unsetting
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.dict_field = {}
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_dict_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'db_dict_field': 1}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.list_field = []
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_list_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'db_list_field': 1}))
|
||||||
|
|
||||||
|
# Test it saves that data
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc.string_field = 'hello'
|
||||||
|
doc.int_field = 1
|
||||||
|
doc.dict_field = {'hello': 'world'}
|
||||||
|
doc.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
self.assertEqual(doc.string_field, 'hello')
|
||||||
|
self.assertEqual(doc.int_field, 1)
|
||||||
|
self.assertEqual(doc.dict_field, {'hello': 'world'})
|
||||||
|
self.assertEqual(doc.list_field, ['1', 2, {'hello': 'world'}])
|
||||||
|
|
||||||
|
def test_delta_recursive_db_field(self):
|
||||||
|
self.delta_recursive_db_field(Document, EmbeddedDocument)
|
||||||
|
self.delta_recursive_db_field(Document, DynamicEmbeddedDocument)
|
||||||
|
self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument)
|
||||||
|
self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument)
|
||||||
|
|
||||||
|
def delta_recursive_db_field(self, DocClass, EmbeddedClass):
|
||||||
|
|
||||||
|
class Embedded(EmbeddedClass):
|
||||||
|
string_field = StringField(db_field='db_string_field')
|
||||||
|
int_field = IntField(db_field='db_int_field')
|
||||||
|
dict_field = DictField(db_field='db_dict_field')
|
||||||
|
list_field = ListField(db_field='db_list_field')
|
||||||
|
|
||||||
|
class Doc(DocClass):
|
||||||
|
string_field = StringField(db_field='db_string_field')
|
||||||
|
int_field = IntField(db_field='db_int_field')
|
||||||
|
dict_field = DictField(db_field='db_dict_field')
|
||||||
|
list_field = ListField(db_field='db_list_field')
|
||||||
|
embedded_field = EmbeddedDocumentField(Embedded,
|
||||||
|
db_field='db_embedded_field')
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc._get_changed_fields(), [])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {}))
|
||||||
|
|
||||||
|
embedded_1 = Embedded()
|
||||||
|
embedded_1.string_field = 'hello'
|
||||||
|
embedded_1.int_field = 1
|
||||||
|
embedded_1.dict_field = {'hello': 'world'}
|
||||||
|
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['db_embedded_field'])
|
||||||
|
|
||||||
|
embedded_delta = {
|
||||||
|
'db_string_field': 'hello',
|
||||||
|
'db_int_field': 1,
|
||||||
|
'db_dict_field': {'hello': 'world'},
|
||||||
|
'db_list_field': ['1', 2, {'hello': 'world'}]
|
||||||
|
}
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'db_embedded_field': embedded_delta}, {}))
|
||||||
|
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.embedded_field.dict_field = {}
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['db_embedded_field.db_dict_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(),
|
||||||
|
({}, {'db_dict_field': 1}))
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({}, {'db_embedded_field.db_dict_field': 1}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.dict_field, {})
|
||||||
|
|
||||||
|
doc.embedded_field.list_field = []
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['db_embedded_field.db_list_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(),
|
||||||
|
({}, {'db_list_field': 1}))
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({}, {'db_embedded_field.db_list_field': 1}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field, [])
|
||||||
|
|
||||||
|
embedded_2 = Embedded()
|
||||||
|
embedded_2.string_field = 'hello'
|
||||||
|
embedded_2.int_field = 1
|
||||||
|
embedded_2.dict_field = {'hello': 'world'}
|
||||||
|
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
|
||||||
|
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['db_embedded_field.db_list_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), ({
|
||||||
|
'db_list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'db_string_field': 'hello',
|
||||||
|
'db_dict_field': {'hello': 'world'},
|
||||||
|
'db_int_field': 1,
|
||||||
|
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
}]
|
||||||
|
}, {}))
|
||||||
|
|
||||||
|
self.assertEqual(doc._delta(), ({
|
||||||
|
'db_embedded_field.db_list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'db_string_field': 'hello',
|
||||||
|
'db_dict_field': {'hello': 'world'},
|
||||||
|
'db_int_field': 1,
|
||||||
|
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
}]
|
||||||
|
}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||||
|
for k in doc.embedded_field.list_field[2]._fields:
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2][k],
|
||||||
|
embedded_2[k])
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].string_field = 'world'
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['db_embedded_field.db_list_field.2.db_string_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(),
|
||||||
|
({'db_list_field.2.db_string_field': 'world'}, {}))
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'db_embedded_field.db_list_field.2.db_string_field': 'world'},
|
||||||
|
{}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||||
|
'world')
|
||||||
|
|
||||||
|
# Test multiple assignments
|
||||||
|
doc.embedded_field.list_field[2].string_field = 'hello world'
|
||||||
|
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||||
|
self.assertEqual(doc._get_changed_fields(),
|
||||||
|
['db_embedded_field.db_list_field'])
|
||||||
|
self.assertEqual(doc.embedded_field._delta(), ({
|
||||||
|
'db_list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'db_string_field': 'hello world',
|
||||||
|
'db_int_field': 1,
|
||||||
|
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
'db_dict_field': {'hello': 'world'}}]}, {}))
|
||||||
|
self.assertEqual(doc._delta(), ({
|
||||||
|
'db_embedded_field.db_list_field': ['1', 2, {
|
||||||
|
'_cls': 'Embedded',
|
||||||
|
'db_string_field': 'hello world',
|
||||||
|
'db_int_field': 1,
|
||||||
|
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||||
|
'db_dict_field': {'hello': 'world'}}
|
||||||
|
]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||||
|
'hello world')
|
||||||
|
|
||||||
|
# Test list native methods
|
||||||
|
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'db_embedded_field.db_list_field.2.db_list_field':
|
||||||
|
[2, {'hello': 'world'}]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].list_field.append(1)
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'db_embedded_field.db_list_field.2.db_list_field':
|
||||||
|
[2, {'hello': 'world'}, 1]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||||
|
[2, {'hello': 'world'}, 1])
|
||||||
|
|
||||||
|
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||||
|
[1, 2, {'hello': 'world'}])
|
||||||
|
|
||||||
|
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
||||||
|
self.assertEqual(doc._delta(),
|
||||||
|
({'db_embedded_field.db_list_field.2.db_list_field':
|
||||||
|
[1, 2, {}]}, {}))
|
||||||
|
doc.save()
|
||||||
|
doc = doc.reload(10)
|
||||||
|
|
||||||
|
del(doc.embedded_field.list_field[2].list_field)
|
||||||
|
self.assertEqual(doc._delta(), ({},
|
||||||
|
{'db_embedded_field.db_list_field.2.db_list_field': 1}))
|
||||||
|
|
||||||
|
def test_delta_for_dynamic_documents(self):
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
p = Person(name="James", age=34)
|
||||||
|
self.assertEqual(p._delta(), ({'age': 34, 'name': 'James',
|
||||||
|
'_cls': 'Person'}, {}))
|
||||||
|
|
||||||
|
p.doc = 123
|
||||||
|
del(p.doc)
|
||||||
|
self.assertEqual(p._delta(), ({'age': 34, 'name': 'James',
|
||||||
|
'_cls': 'Person'}, {'doc': 1}))
|
||||||
|
|
||||||
|
p = Person()
|
||||||
|
p.name = "Dean"
|
||||||
|
p.age = 22
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p.age = 24
|
||||||
|
self.assertEqual(p.age, 24)
|
||||||
|
self.assertEqual(p._get_changed_fields(), ['age'])
|
||||||
|
self.assertEqual(p._delta(), ({'age': 24}, {}))
|
||||||
|
|
||||||
|
p = self.Person.objects(age=22).get()
|
||||||
|
p.age = 24
|
||||||
|
self.assertEqual(p.age, 24)
|
||||||
|
self.assertEqual(p._get_changed_fields(), ['age'])
|
||||||
|
self.assertEqual(p._delta(), ({'age': 24}, {}))
|
||||||
|
|
||||||
|
p.save()
|
||||||
|
self.assertEqual(1, self.Person.objects(age=24).count())
|
||||||
|
|
||||||
|
def test_dynamic_delta(self):
|
||||||
|
|
||||||
|
class Doc(DynamicDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc._get_changed_fields(), [])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {}))
|
||||||
|
|
||||||
|
doc.string_field = 'hello'
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['string_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.int_field = 1
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['int_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||||
|
doc.dict_field = dict_value
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
list_value = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.list_field = list_value
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
|
||||||
|
|
||||||
|
# Test unsetting
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.dict_field = {}
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
|
||||||
|
|
||||||
|
doc._changed_fields = []
|
||||||
|
doc.list_field = []
|
||||||
|
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||||
|
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
296
tests/document/dynamic.py
Normal file
296
tests/document/dynamic.py
Normal file
@@ -0,0 +1,296 @@
|
|||||||
|
import unittest
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
|
||||||
|
__all__ = ("DynamicTest", )
|
||||||
|
|
||||||
|
|
||||||
|
class DynamicTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def test_simple_dynamic_document(self):
|
||||||
|
"""Ensures simple dynamic documents are saved correctly"""
|
||||||
|
|
||||||
|
p = self.Person()
|
||||||
|
p.name = "James"
|
||||||
|
p.age = 34
|
||||||
|
|
||||||
|
self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James",
|
||||||
|
"age": 34})
|
||||||
|
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
self.assertEqual(self.Person.objects.first().age, 34)
|
||||||
|
|
||||||
|
# Confirm no changes to self.Person
|
||||||
|
self.assertFalse(hasattr(self.Person, 'age'))
|
||||||
|
|
||||||
|
def test_change_scope_of_variable(self):
|
||||||
|
"""Test changing the scope of a dynamic field has no adverse effects"""
|
||||||
|
p = self.Person()
|
||||||
|
p.name = "Dean"
|
||||||
|
p.misc = 22
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
p.misc = {'hello': 'world'}
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
self.assertEqual(p.misc, {'hello': 'world'})
|
||||||
|
|
||||||
|
def test_delete_dynamic_field(self):
|
||||||
|
"""Test deleting a dynamic field works"""
|
||||||
|
self.Person.drop_collection()
|
||||||
|
p = self.Person()
|
||||||
|
p.name = "Dean"
|
||||||
|
p.misc = 22
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
p.misc = {'hello': 'world'}
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
self.assertEqual(p.misc, {'hello': 'world'})
|
||||||
|
collection = self.db[self.Person._get_collection_name()]
|
||||||
|
obj = collection.find_one()
|
||||||
|
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name'])
|
||||||
|
|
||||||
|
del(p.misc)
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p = self.Person.objects.get()
|
||||||
|
self.assertFalse(hasattr(p, 'misc'))
|
||||||
|
|
||||||
|
obj = collection.find_one()
|
||||||
|
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name'])
|
||||||
|
|
||||||
|
def test_dynamic_document_queries(self):
|
||||||
|
"""Ensure we can query dynamic fields"""
|
||||||
|
p = self.Person()
|
||||||
|
p.name = "Dean"
|
||||||
|
p.age = 22
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
self.assertEqual(1, self.Person.objects(age=22).count())
|
||||||
|
p = self.Person.objects(age=22)
|
||||||
|
p = p.get()
|
||||||
|
self.assertEqual(22, p.age)
|
||||||
|
|
||||||
|
def test_complex_dynamic_document_queries(self):
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
p = Person(name="test")
|
||||||
|
p.age = "ten"
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
p1 = Person(name="test1")
|
||||||
|
p1.age = "less then ten and a half"
|
||||||
|
p1.save()
|
||||||
|
|
||||||
|
p2 = Person(name="test2")
|
||||||
|
p2.age = 10
|
||||||
|
p2.save()
|
||||||
|
|
||||||
|
self.assertEqual(Person.objects(age__icontains='ten').count(), 2)
|
||||||
|
self.assertEqual(Person.objects(age__gte=10).count(), 1)
|
||||||
|
|
||||||
|
def test_complex_data_lookups(self):
|
||||||
|
"""Ensure you can query dynamic document dynamic fields"""
|
||||||
|
p = self.Person()
|
||||||
|
p.misc = {'hello': 'world'}
|
||||||
|
p.save()
|
||||||
|
|
||||||
|
self.assertEqual(1, self.Person.objects(misc__hello='world').count())
|
||||||
|
|
||||||
|
def test_complex_embedded_document_validation(self):
|
||||||
|
"""Ensure embedded dynamic documents may be validated"""
|
||||||
|
class Embedded(DynamicEmbeddedDocument):
|
||||||
|
content = URLField()
|
||||||
|
|
||||||
|
class Doc(DynamicDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
|
||||||
|
embedded_doc_1 = Embedded(content='http://mongoengine.org')
|
||||||
|
embedded_doc_1.validate()
|
||||||
|
|
||||||
|
embedded_doc_2 = Embedded(content='this is not a url')
|
||||||
|
self.assertRaises(ValidationError, embedded_doc_2.validate)
|
||||||
|
|
||||||
|
doc.embedded_field_1 = embedded_doc_1
|
||||||
|
doc.embedded_field_2 = embedded_doc_2
|
||||||
|
self.assertRaises(ValidationError, doc.validate)
|
||||||
|
|
||||||
|
def test_inheritance(self):
|
||||||
|
"""Ensure that dynamic document plays nice with inheritance"""
|
||||||
|
class Employee(self.Person):
|
||||||
|
salary = IntField()
|
||||||
|
|
||||||
|
Employee.drop_collection()
|
||||||
|
|
||||||
|
self.assertTrue('name' in Employee._fields)
|
||||||
|
self.assertTrue('salary' in Employee._fields)
|
||||||
|
self.assertEqual(Employee._get_collection_name(),
|
||||||
|
self.Person._get_collection_name())
|
||||||
|
|
||||||
|
joe_bloggs = Employee()
|
||||||
|
joe_bloggs.name = "Joe Bloggs"
|
||||||
|
joe_bloggs.salary = 10
|
||||||
|
joe_bloggs.age = 20
|
||||||
|
joe_bloggs.save()
|
||||||
|
|
||||||
|
self.assertEqual(1, self.Person.objects(age=20).count())
|
||||||
|
self.assertEqual(1, Employee.objects(age=20).count())
|
||||||
|
|
||||||
|
joe_bloggs = self.Person.objects.first()
|
||||||
|
self.assertTrue(isinstance(joe_bloggs, Employee))
|
||||||
|
|
||||||
|
def test_embedded_dynamic_document(self):
|
||||||
|
"""Test dynamic embedded documents"""
|
||||||
|
class Embedded(DynamicEmbeddedDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Doc(DynamicDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
|
||||||
|
embedded_1 = Embedded()
|
||||||
|
embedded_1.string_field = 'hello'
|
||||||
|
embedded_1.int_field = 1
|
||||||
|
embedded_1.dict_field = {'hello': 'world'}
|
||||||
|
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
|
self.assertEqual(doc.to_mongo(), {
|
||||||
|
"embedded_field": {
|
||||||
|
"_cls": "Embedded",
|
||||||
|
"string_field": "hello",
|
||||||
|
"int_field": 1,
|
||||||
|
"dict_field": {"hello": "world"},
|
||||||
|
"list_field": ['1', 2, {'hello': 'world'}]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
doc.save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||||
|
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||||
|
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||||
|
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||||
|
self.assertEqual(doc.embedded_field.list_field,
|
||||||
|
['1', 2, {'hello': 'world'}])
|
||||||
|
|
||||||
|
def test_complex_embedded_documents(self):
|
||||||
|
"""Test complex dynamic embedded documents setups"""
|
||||||
|
class Embedded(DynamicEmbeddedDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Doc(DynamicDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
doc = Doc()
|
||||||
|
|
||||||
|
embedded_1 = Embedded()
|
||||||
|
embedded_1.string_field = 'hello'
|
||||||
|
embedded_1.int_field = 1
|
||||||
|
embedded_1.dict_field = {'hello': 'world'}
|
||||||
|
|
||||||
|
embedded_2 = Embedded()
|
||||||
|
embedded_2.string_field = 'hello'
|
||||||
|
embedded_2.int_field = 1
|
||||||
|
embedded_2.dict_field = {'hello': 'world'}
|
||||||
|
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
|
||||||
|
embedded_1.list_field = ['1', 2, embedded_2]
|
||||||
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
|
self.assertEqual(doc.to_mongo(), {
|
||||||
|
"embedded_field": {
|
||||||
|
"_cls": "Embedded",
|
||||||
|
"string_field": "hello",
|
||||||
|
"int_field": 1,
|
||||||
|
"dict_field": {"hello": "world"},
|
||||||
|
"list_field": ['1', 2,
|
||||||
|
{"_cls": "Embedded",
|
||||||
|
"string_field": "hello",
|
||||||
|
"int_field": 1,
|
||||||
|
"dict_field": {"hello": "world"},
|
||||||
|
"list_field": ['1', 2, {'hello': 'world'}]}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
doc.save()
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||||
|
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||||
|
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||||
|
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||||
|
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||||
|
|
||||||
|
embedded_field = doc.embedded_field.list_field[2]
|
||||||
|
|
||||||
|
self.assertEqual(embedded_field.__class__, Embedded)
|
||||||
|
self.assertEqual(embedded_field.string_field, "hello")
|
||||||
|
self.assertEqual(embedded_field.int_field, 1)
|
||||||
|
self.assertEqual(embedded_field.dict_field, {'hello': 'world'})
|
||||||
|
self.assertEqual(embedded_field.list_field, ['1', 2,
|
||||||
|
{'hello': 'world'}])
|
||||||
|
|
||||||
|
def test_dynamic_and_embedded(self):
|
||||||
|
"""Ensure embedded documents play nicely"""
|
||||||
|
|
||||||
|
class Address(EmbeddedDocument):
|
||||||
|
city = StringField()
|
||||||
|
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
Person(name="Ross", address=Address(city="London")).save()
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.address.city = "Lundenne"
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
self.assertEqual(Person.objects.first().address.city, "Lundenne")
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.address = Address(city="Londinium")
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
self.assertEqual(Person.objects.first().address.city, "Londinium")
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.age = 35
|
||||||
|
person.save()
|
||||||
|
self.assertEqual(Person.objects.first().age, 35)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
730
tests/document/indexes.py
Normal file
730
tests/document/indexes.py
Normal file
@@ -0,0 +1,730 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import with_statement
|
||||||
|
import unittest
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import os
|
||||||
|
import pymongo
|
||||||
|
|
||||||
|
from nose.plugins.skip import SkipTest
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import get_db, get_connection
|
||||||
|
|
||||||
|
__all__ = ("IndexesTest", )
|
||||||
|
|
||||||
|
|
||||||
|
class IndexesTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
non_field = True
|
||||||
|
|
||||||
|
meta = {"allow_inheritance": True}
|
||||||
|
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
for collection in self.db.collection_names():
|
||||||
|
if 'system.' in collection:
|
||||||
|
continue
|
||||||
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
|
def test_indexes_document(self):
|
||||||
|
"""Ensure that indexes are used when meta[indexes] is specified for
|
||||||
|
Documents
|
||||||
|
"""
|
||||||
|
self._index_test(Document)
|
||||||
|
|
||||||
|
def test_indexes_dynamic_document(self):
|
||||||
|
"""Ensure that indexes are used when meta[indexes] is specified for
|
||||||
|
Dynamic Documents
|
||||||
|
"""
|
||||||
|
self._index_test(DynamicDocument)
|
||||||
|
|
||||||
|
def _index_test(self, InheritFrom):
|
||||||
|
|
||||||
|
class BlogPost(InheritFrom):
|
||||||
|
date = DateTimeField(db_field='addDate', default=datetime.now)
|
||||||
|
category = StringField()
|
||||||
|
tags = ListField(StringField())
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
'-date',
|
||||||
|
'tags',
|
||||||
|
('category', '-date')
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_specs = [{'fields': [('addDate', -1)]},
|
||||||
|
{'fields': [('tags', 1)]},
|
||||||
|
{'fields': [('category', 1), ('addDate', -1)]}]
|
||||||
|
self.assertEqual(expected_specs, BlogPost._meta['index_specs'])
|
||||||
|
|
||||||
|
BlogPost.ensure_indexes()
|
||||||
|
info = BlogPost.objects._collection.index_information()
|
||||||
|
# _id, '-date', 'tags', ('cat', 'date')
|
||||||
|
self.assertEqual(len(info), 4)
|
||||||
|
info = [value['key'] for key, value in info.iteritems()]
|
||||||
|
for expected in expected_specs:
|
||||||
|
self.assertTrue(expected['fields'] in info)
|
||||||
|
|
||||||
|
def _index_test_inheritance(self, InheritFrom):
|
||||||
|
|
||||||
|
class BlogPost(InheritFrom):
|
||||||
|
date = DateTimeField(db_field='addDate', default=datetime.now)
|
||||||
|
category = StringField()
|
||||||
|
tags = ListField(StringField())
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
'-date',
|
||||||
|
'tags',
|
||||||
|
('category', '-date')
|
||||||
|
],
|
||||||
|
'allow_inheritance': True
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_specs = [{'fields': [('_cls', 1), ('addDate', -1)]},
|
||||||
|
{'fields': [('_cls', 1), ('tags', 1)]},
|
||||||
|
{'fields': [('_cls', 1), ('category', 1),
|
||||||
|
('addDate', -1)]}]
|
||||||
|
self.assertEqual(expected_specs, BlogPost._meta['index_specs'])
|
||||||
|
|
||||||
|
BlogPost.ensure_indexes()
|
||||||
|
info = BlogPost.objects._collection.index_information()
|
||||||
|
# _id, '-date', 'tags', ('cat', 'date')
|
||||||
|
# NB: there is no index on _cls by itself, since
|
||||||
|
# the indices on -date and tags will both contain
|
||||||
|
# _cls as first element in the key
|
||||||
|
self.assertEqual(len(info), 4)
|
||||||
|
info = [value['key'] for key, value in info.iteritems()]
|
||||||
|
for expected in expected_specs:
|
||||||
|
self.assertTrue(expected['fields'] in info)
|
||||||
|
|
||||||
|
class ExtendedBlogPost(BlogPost):
|
||||||
|
title = StringField()
|
||||||
|
meta = {'indexes': ['title']}
|
||||||
|
|
||||||
|
expected_specs.append({'fields': [('_cls', 1), ('title', 1)]})
|
||||||
|
self.assertEqual(expected_specs, ExtendedBlogPost._meta['index_specs'])
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
ExtendedBlogPost.ensure_indexes()
|
||||||
|
info = ExtendedBlogPost.objects._collection.index_information()
|
||||||
|
info = [value['key'] for key, value in info.iteritems()]
|
||||||
|
for expected in expected_specs:
|
||||||
|
self.assertTrue(expected['fields'] in info)
|
||||||
|
|
||||||
|
def test_indexes_document_inheritance(self):
|
||||||
|
"""Ensure that indexes are used when meta[indexes] is specified for
|
||||||
|
Documents
|
||||||
|
"""
|
||||||
|
self._index_test_inheritance(Document)
|
||||||
|
|
||||||
|
def test_indexes_dynamic_document_inheritance(self):
|
||||||
|
"""Ensure that indexes are used when meta[indexes] is specified for
|
||||||
|
Dynamic Documents
|
||||||
|
"""
|
||||||
|
self._index_test_inheritance(DynamicDocument)
|
||||||
|
|
||||||
|
def test_inherited_index(self):
|
||||||
|
"""Ensure index specs are inhertited correctly"""
|
||||||
|
|
||||||
|
class A(Document):
|
||||||
|
title = StringField()
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
{
|
||||||
|
'fields': ('title',),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
'allow_inheritance': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
class B(A):
|
||||||
|
description = StringField()
|
||||||
|
|
||||||
|
self.assertEqual(A._meta['index_specs'], B._meta['index_specs'])
|
||||||
|
self.assertEqual([{'fields': [('_cls', 1), ('title', 1)]}],
|
||||||
|
A._meta['index_specs'])
|
||||||
|
|
||||||
|
def test_build_index_spec_is_not_destructive(self):
|
||||||
|
|
||||||
|
class MyDoc(Document):
|
||||||
|
keywords = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': ['keywords'],
|
||||||
|
'allow_inheritance': False
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertEqual(MyDoc._meta['index_specs'],
|
||||||
|
[{'fields': [('keywords', 1)]}])
|
||||||
|
|
||||||
|
# Force index creation
|
||||||
|
MyDoc.ensure_indexes()
|
||||||
|
|
||||||
|
self.assertEqual(MyDoc._meta['index_specs'],
|
||||||
|
[{'fields': [('keywords', 1)]}])
|
||||||
|
|
||||||
|
def test_embedded_document_index_meta(self):
|
||||||
|
"""Ensure that embedded document indexes are created explicitly
|
||||||
|
"""
|
||||||
|
class Rank(EmbeddedDocument):
|
||||||
|
title = StringField(required=True)
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField(required=True)
|
||||||
|
rank = EmbeddedDocumentField(Rank, required=False)
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
'rank.title',
|
||||||
|
],
|
||||||
|
'allow_inheritance': False
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertEqual([{'fields': [('rank.title', 1)]}],
|
||||||
|
Person._meta['index_specs'])
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
# Indexes are lazy so use list() to perform query
|
||||||
|
list(Person.objects)
|
||||||
|
info = Person.objects._collection.index_information()
|
||||||
|
info = [value['key'] for key, value in info.iteritems()]
|
||||||
|
self.assertTrue([('rank.title', 1)] in info)
|
||||||
|
|
||||||
|
def test_explicit_geo2d_index(self):
|
||||||
|
"""Ensure that geo2d indexes work when created via meta[indexes]
|
||||||
|
"""
|
||||||
|
class Place(Document):
|
||||||
|
location = DictField()
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'indexes': [
|
||||||
|
'*location.point',
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertEqual([{'fields': [('location.point', '2d')]}],
|
||||||
|
Place._meta['index_specs'])
|
||||||
|
|
||||||
|
Place.ensure_indexes()
|
||||||
|
info = Place._get_collection().index_information()
|
||||||
|
info = [value['key'] for key, value in info.iteritems()]
|
||||||
|
self.assertTrue([('location.point', '2d')] in info)
|
||||||
|
|
||||||
|
def test_explicit_geo2d_index_embedded(self):
|
||||||
|
"""Ensure that geo2d indexes work when created via meta[indexes]
|
||||||
|
"""
|
||||||
|
class EmbeddedLocation(EmbeddedDocument):
|
||||||
|
location = DictField()
|
||||||
|
|
||||||
|
class Place(Document):
|
||||||
|
current = DictField(
|
||||||
|
field=EmbeddedDocumentField('EmbeddedLocation'))
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'indexes': [
|
||||||
|
'*current.location.point',
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertEqual([{'fields': [('current.location.point', '2d')]}],
|
||||||
|
Place._meta['index_specs'])
|
||||||
|
|
||||||
|
Place.ensure_indexes()
|
||||||
|
info = Place._get_collection().index_information()
|
||||||
|
info = [value['key'] for key, value in info.iteritems()]
|
||||||
|
self.assertTrue([('current.location.point', '2d')] in info)
|
||||||
|
|
||||||
|
def test_dictionary_indexes(self):
|
||||||
|
"""Ensure that indexes are used when meta[indexes] contains
|
||||||
|
dictionaries instead of lists.
|
||||||
|
"""
|
||||||
|
class BlogPost(Document):
|
||||||
|
date = DateTimeField(db_field='addDate', default=datetime.now)
|
||||||
|
category = StringField()
|
||||||
|
tags = ListField(StringField())
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
{'fields': ['-date'], 'unique': True, 'sparse': True},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertEqual([{'fields': [('addDate', -1)], 'unique': True,
|
||||||
|
'sparse': True}],
|
||||||
|
BlogPost._meta['index_specs'])
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
info = BlogPost.objects._collection.index_information()
|
||||||
|
# _id, '-date'
|
||||||
|
self.assertEqual(len(info), 2)
|
||||||
|
|
||||||
|
# Indexes are lazy so use list() to perform query
|
||||||
|
list(BlogPost.objects)
|
||||||
|
info = BlogPost.objects._collection.index_information()
|
||||||
|
info = [(value['key'],
|
||||||
|
value.get('unique', False),
|
||||||
|
value.get('sparse', False))
|
||||||
|
for key, value in info.iteritems()]
|
||||||
|
self.assertTrue(([('addDate', -1)], True, True) in info)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_abstract_index_inheritance(self):
|
||||||
|
|
||||||
|
class UserBase(Document):
|
||||||
|
user_guid = StringField(required=True)
|
||||||
|
meta = {
|
||||||
|
'abstract': True,
|
||||||
|
'indexes': ['user_guid'],
|
||||||
|
'allow_inheritance': True
|
||||||
|
}
|
||||||
|
|
||||||
|
class Person(UserBase):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': ['name'],
|
||||||
|
}
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
Person(name="test", user_guid='123').save()
|
||||||
|
|
||||||
|
self.assertEqual(1, Person.objects.count())
|
||||||
|
info = Person.objects._collection.index_information()
|
||||||
|
self.assertEqual(sorted(info.keys()),
|
||||||
|
['_cls_1_name_1', '_cls_1_user_guid_1', '_id_'])
|
||||||
|
|
||||||
|
def test_disable_index_creation(self):
|
||||||
|
"""Tests setting auto_create_index to False on the connection will
|
||||||
|
disable any index generation.
|
||||||
|
"""
|
||||||
|
class User(Document):
|
||||||
|
meta = {
|
||||||
|
'indexes': ['user_guid'],
|
||||||
|
'auto_create_index': False
|
||||||
|
}
|
||||||
|
user_guid = StringField(required=True)
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
|
||||||
|
u = User(user_guid='123')
|
||||||
|
u.save()
|
||||||
|
|
||||||
|
self.assertEqual(1, User.objects.count())
|
||||||
|
info = User.objects._collection.index_information()
|
||||||
|
self.assertEqual(info.keys(), ['_id_'])
|
||||||
|
User.drop_collection()
|
||||||
|
|
||||||
|
def test_embedded_document_index(self):
|
||||||
|
"""Tests settings an index on an embedded document
|
||||||
|
"""
|
||||||
|
class Date(EmbeddedDocument):
|
||||||
|
year = IntField(db_field='yr')
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField()
|
||||||
|
date = EmbeddedDocumentField(Date)
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
'-date.year'
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
info = BlogPost.objects._collection.index_information()
|
||||||
|
self.assertEqual(sorted(info.keys()), ['_id_', 'date.yr_-1'])
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_list_embedded_document_index(self):
|
||||||
|
"""Ensure list embedded documents can be indexed
|
||||||
|
"""
|
||||||
|
class Tag(EmbeddedDocument):
|
||||||
|
name = StringField(db_field='tag')
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField()
|
||||||
|
tags = ListField(EmbeddedDocumentField(Tag))
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
'tags.name'
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
info = BlogPost.objects._collection.index_information()
|
||||||
|
# we don't use _cls in with list fields by default
|
||||||
|
self.assertEqual(sorted(info.keys()), ['_id_', 'tags.tag_1'])
|
||||||
|
|
||||||
|
post1 = BlogPost(title="Embedded Indexes tests in place",
|
||||||
|
tags=[Tag(name="about"), Tag(name="time")]
|
||||||
|
)
|
||||||
|
post1.save()
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_recursive_embedded_objects_dont_break_indexes(self):
|
||||||
|
|
||||||
|
class RecursiveObject(EmbeddedDocument):
|
||||||
|
obj = EmbeddedDocumentField('self')
|
||||||
|
|
||||||
|
class RecursiveDocument(Document):
|
||||||
|
recursive_obj = EmbeddedDocumentField(RecursiveObject)
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
RecursiveDocument.ensure_indexes()
|
||||||
|
info = RecursiveDocument._get_collection().index_information()
|
||||||
|
self.assertEqual(sorted(info.keys()), ['_cls_1', '_id_'])
|
||||||
|
|
||||||
|
def test_geo_indexes_recursion(self):
|
||||||
|
|
||||||
|
class Location(Document):
|
||||||
|
name = StringField()
|
||||||
|
location = GeoPointField()
|
||||||
|
|
||||||
|
class Parent(Document):
|
||||||
|
name = StringField()
|
||||||
|
location = ReferenceField(Location, dbref=False)
|
||||||
|
|
||||||
|
Location.drop_collection()
|
||||||
|
Parent.drop_collection()
|
||||||
|
|
||||||
|
list(Parent.objects)
|
||||||
|
|
||||||
|
collection = Parent._get_collection()
|
||||||
|
info = collection.index_information()
|
||||||
|
|
||||||
|
self.assertFalse('location_2d' in info)
|
||||||
|
|
||||||
|
self.assertEqual(len(Parent._geo_indices()), 0)
|
||||||
|
self.assertEqual(len(Location._geo_indices()), 1)
|
||||||
|
|
||||||
|
def test_covered_index(self):
|
||||||
|
"""Ensure that covered indexes can be used
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Test(Document):
|
||||||
|
a = IntField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'indexes': ['a'],
|
||||||
|
'allow_inheritance': False
|
||||||
|
}
|
||||||
|
|
||||||
|
Test.drop_collection()
|
||||||
|
|
||||||
|
obj = Test(a=1)
|
||||||
|
obj.save()
|
||||||
|
|
||||||
|
# Need to be explicit about covered indexes as mongoDB doesn't know if
|
||||||
|
# the documents returned might have more keys in that here.
|
||||||
|
query_plan = Test.objects(id=obj.id).exclude('a').explain()
|
||||||
|
self.assertFalse(query_plan['indexOnly'])
|
||||||
|
|
||||||
|
query_plan = Test.objects(id=obj.id).only('id').explain()
|
||||||
|
self.assertTrue(query_plan['indexOnly'])
|
||||||
|
|
||||||
|
query_plan = Test.objects(a=1).only('a').exclude('id').explain()
|
||||||
|
self.assertTrue(query_plan['indexOnly'])
|
||||||
|
|
||||||
|
def test_index_on_id(self):
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
['categories', 'id']
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
title = StringField(required=True)
|
||||||
|
description = StringField(required=True)
|
||||||
|
categories = ListField()
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
indexes = BlogPost.objects._collection.index_information()
|
||||||
|
self.assertEqual(indexes['categories_1__id_1']['key'],
|
||||||
|
[('categories', 1), ('_id', 1)])
|
||||||
|
|
||||||
|
def test_hint(self):
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
tags = ListField(StringField())
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
'tags',
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
for i in xrange(0, 10):
|
||||||
|
tags = [("tag %i" % n) for n in xrange(0, i % 2)]
|
||||||
|
BlogPost(tags=tags).save()
|
||||||
|
|
||||||
|
self.assertEqual(BlogPost.objects.count(), 10)
|
||||||
|
self.assertEqual(BlogPost.objects.hint().count(), 10)
|
||||||
|
self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10)
|
||||||
|
|
||||||
|
self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).count(), 10)
|
||||||
|
|
||||||
|
def invalid_index():
|
||||||
|
BlogPost.objects.hint('tags')
|
||||||
|
self.assertRaises(TypeError, invalid_index)
|
||||||
|
|
||||||
|
def invalid_index_2():
|
||||||
|
return BlogPost.objects.hint(('tags', 1))
|
||||||
|
self.assertRaises(TypeError, invalid_index_2)
|
||||||
|
|
||||||
|
def test_unique(self):
|
||||||
|
"""Ensure that uniqueness constraints are applied to fields.
|
||||||
|
"""
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField()
|
||||||
|
slug = StringField(unique=True)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
post1 = BlogPost(title='test1', slug='test')
|
||||||
|
post1.save()
|
||||||
|
|
||||||
|
# Two posts with the same slug is not allowed
|
||||||
|
post2 = BlogPost(title='test2', slug='test')
|
||||||
|
self.assertRaises(NotUniqueError, post2.save)
|
||||||
|
|
||||||
|
# Ensure backwards compatibilty for errors
|
||||||
|
self.assertRaises(OperationError, post2.save)
|
||||||
|
|
||||||
|
def test_unique_with(self):
|
||||||
|
"""Ensure that unique_with constraints are applied to fields.
|
||||||
|
"""
|
||||||
|
class Date(EmbeddedDocument):
|
||||||
|
year = IntField(db_field='yr')
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField()
|
||||||
|
date = EmbeddedDocumentField(Date)
|
||||||
|
slug = StringField(unique_with='date.year')
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
post1 = BlogPost(title='test1', date=Date(year=2009), slug='test')
|
||||||
|
post1.save()
|
||||||
|
|
||||||
|
# day is different so won't raise exception
|
||||||
|
post2 = BlogPost(title='test2', date=Date(year=2010), slug='test')
|
||||||
|
post2.save()
|
||||||
|
|
||||||
|
# Now there will be two docs with the same slug and the same day: fail
|
||||||
|
post3 = BlogPost(title='test3', date=Date(year=2010), slug='test')
|
||||||
|
self.assertRaises(OperationError, post3.save)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_unique_embedded_document(self):
|
||||||
|
"""Ensure that uniqueness constraints are applied to fields on embedded documents.
|
||||||
|
"""
|
||||||
|
class SubDocument(EmbeddedDocument):
|
||||||
|
year = IntField(db_field='yr')
|
||||||
|
slug = StringField(unique=True)
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField()
|
||||||
|
sub = EmbeddedDocumentField(SubDocument)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
post1 = BlogPost(title='test1',
|
||||||
|
sub=SubDocument(year=2009, slug="test"))
|
||||||
|
post1.save()
|
||||||
|
|
||||||
|
# sub.slug is different so won't raise exception
|
||||||
|
post2 = BlogPost(title='test2',
|
||||||
|
sub=SubDocument(year=2010, slug='another-slug'))
|
||||||
|
post2.save()
|
||||||
|
|
||||||
|
# Now there will be two docs with the same sub.slug
|
||||||
|
post3 = BlogPost(title='test3',
|
||||||
|
sub=SubDocument(year=2010, slug='test'))
|
||||||
|
self.assertRaises(NotUniqueError, post3.save)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_unique_with_embedded_document_and_embedded_unique(self):
|
||||||
|
"""Ensure that uniqueness constraints are applied to fields on
|
||||||
|
embedded documents. And work with unique_with as well.
|
||||||
|
"""
|
||||||
|
class SubDocument(EmbeddedDocument):
|
||||||
|
year = IntField(db_field='yr')
|
||||||
|
slug = StringField(unique=True)
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField(unique_with='sub.year')
|
||||||
|
sub = EmbeddedDocumentField(SubDocument)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
post1 = BlogPost(title='test1',
|
||||||
|
sub=SubDocument(year=2009, slug="test"))
|
||||||
|
post1.save()
|
||||||
|
|
||||||
|
# sub.slug is different so won't raise exception
|
||||||
|
post2 = BlogPost(title='test2',
|
||||||
|
sub=SubDocument(year=2010, slug='another-slug'))
|
||||||
|
post2.save()
|
||||||
|
|
||||||
|
# Now there will be two docs with the same sub.slug
|
||||||
|
post3 = BlogPost(title='test3',
|
||||||
|
sub=SubDocument(year=2010, slug='test'))
|
||||||
|
self.assertRaises(NotUniqueError, post3.save)
|
||||||
|
|
||||||
|
# Now there will be two docs with the same title and year
|
||||||
|
post3 = BlogPost(title='test1',
|
||||||
|
sub=SubDocument(year=2009, slug='test-1'))
|
||||||
|
self.assertRaises(NotUniqueError, post3.save)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_ttl_indexes(self):
|
||||||
|
|
||||||
|
class Log(Document):
|
||||||
|
created = DateTimeField(default=datetime.now)
|
||||||
|
meta = {
|
||||||
|
'indexes': [
|
||||||
|
{'fields': ['created'], 'expireAfterSeconds': 3600}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.drop_collection()
|
||||||
|
|
||||||
|
if pymongo.version_tuple[0] < 2 and pymongo.version_tuple[1] < 3:
|
||||||
|
raise SkipTest('pymongo needs to be 2.3 or higher for this test')
|
||||||
|
|
||||||
|
connection = get_connection()
|
||||||
|
version_array = connection.server_info()['versionArray']
|
||||||
|
if version_array[0] < 2 and version_array[1] < 2:
|
||||||
|
raise SkipTest('MongoDB needs to be 2.2 or higher for this test')
|
||||||
|
|
||||||
|
# Indexes are lazy so use list() to perform query
|
||||||
|
list(Log.objects)
|
||||||
|
info = Log.objects._collection.index_information()
|
||||||
|
self.assertEqual(3600,
|
||||||
|
info['created_1']['expireAfterSeconds'])
|
||||||
|
|
||||||
|
def test_unique_and_indexes(self):
|
||||||
|
"""Ensure that 'unique' constraints aren't overridden by
|
||||||
|
meta.indexes.
|
||||||
|
"""
|
||||||
|
class Customer(Document):
|
||||||
|
cust_id = IntField(unique=True, required=True)
|
||||||
|
meta = {
|
||||||
|
'indexes': ['cust_id'],
|
||||||
|
'allow_inheritance': False,
|
||||||
|
}
|
||||||
|
|
||||||
|
Customer.drop_collection()
|
||||||
|
cust = Customer(cust_id=1)
|
||||||
|
cust.save()
|
||||||
|
|
||||||
|
cust_dupe = Customer(cust_id=1)
|
||||||
|
try:
|
||||||
|
cust_dupe.save()
|
||||||
|
raise AssertionError("We saved a dupe!")
|
||||||
|
except NotUniqueError:
|
||||||
|
pass
|
||||||
|
Customer.drop_collection()
|
||||||
|
|
||||||
|
def test_unique_and_primary(self):
|
||||||
|
"""If you set a field as primary, then unexpected behaviour can occur.
|
||||||
|
You won't create a duplicate but you will update an existing document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField(primary_key=True, unique=True)
|
||||||
|
password = StringField()
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
|
||||||
|
user = User(name='huangz', password='secret')
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
user = User(name='huangz', password='secret2')
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
self.assertEqual(User.objects.count(), 1)
|
||||||
|
self.assertEqual(User.objects.get().password, 'secret2')
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
|
||||||
|
def test_index_with_pk(self):
|
||||||
|
"""Ensure you can use `pk` as part of a query"""
|
||||||
|
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
comment_id = IntField(required=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
class BlogPost(Document):
|
||||||
|
comments = EmbeddedDocumentField(Comment)
|
||||||
|
meta = {'indexes': [
|
||||||
|
{'fields': ['pk', 'comments.comment_id'],
|
||||||
|
'unique': True}]}
|
||||||
|
except UnboundLocalError:
|
||||||
|
self.fail('Unbound local error at index + pk definition')
|
||||||
|
|
||||||
|
info = BlogPost.objects._collection.index_information()
|
||||||
|
info = [value['key'] for key, value in info.iteritems()]
|
||||||
|
index_item = [('_id', 1), ('comments.comment_id', 1)]
|
||||||
|
self.assertTrue(index_item in info)
|
||||||
|
|
||||||
|
def test_compound_key_embedded(self):
|
||||||
|
|
||||||
|
class CompoundKey(EmbeddedDocument):
|
||||||
|
name = StringField(required=True)
|
||||||
|
term = StringField(required=True)
|
||||||
|
|
||||||
|
class Report(Document):
|
||||||
|
key = EmbeddedDocumentField(CompoundKey, primary_key=True)
|
||||||
|
text = StringField()
|
||||||
|
|
||||||
|
Report.drop_collection()
|
||||||
|
|
||||||
|
my_key = CompoundKey(name="n", term="ok")
|
||||||
|
report = Report(text="OK", key=my_key).save()
|
||||||
|
|
||||||
|
self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}},
|
||||||
|
report.to_mongo())
|
||||||
|
self.assertEqual(report, Report.objects.get(pk=my_key))
|
||||||
|
|
||||||
|
def test_compound_key_dictfield(self):
|
||||||
|
|
||||||
|
class Report(Document):
|
||||||
|
key = DictField(primary_key=True)
|
||||||
|
text = StringField()
|
||||||
|
|
||||||
|
Report.drop_collection()
|
||||||
|
|
||||||
|
my_key = {"name": "n", "term": "ok"}
|
||||||
|
report = Report(text="OK", key=my_key).save()
|
||||||
|
|
||||||
|
self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}},
|
||||||
|
report.to_mongo())
|
||||||
|
self.assertEqual(report, Report.objects.get(pk=my_key))
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
395
tests/document/inheritance.py
Normal file
395
tests/document/inheritance.py
Normal file
@@ -0,0 +1,395 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
import unittest
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from tests.fixtures import Base
|
||||||
|
|
||||||
|
from mongoengine import Document, EmbeddedDocument, connect
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
from mongoengine.fields import (BooleanField, GenericReferenceField,
|
||||||
|
IntField, StringField)
|
||||||
|
|
||||||
|
__all__ = ('InheritanceTest', )
|
||||||
|
|
||||||
|
|
||||||
|
class InheritanceTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
for collection in self.db.collection_names():
|
||||||
|
if 'system.' in collection:
|
||||||
|
continue
|
||||||
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
|
def test_superclasses(self):
|
||||||
|
"""Ensure that the correct list of superclasses is assembled.
|
||||||
|
"""
|
||||||
|
class Animal(Document):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
class Fish(Animal): pass
|
||||||
|
class Guppy(Fish): pass
|
||||||
|
class Mammal(Animal): pass
|
||||||
|
class Dog(Mammal): pass
|
||||||
|
class Human(Mammal): pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal._superclasses, ())
|
||||||
|
self.assertEqual(Fish._superclasses, ('Animal',))
|
||||||
|
self.assertEqual(Guppy._superclasses, ('Animal', 'Animal.Fish'))
|
||||||
|
self.assertEqual(Mammal._superclasses, ('Animal',))
|
||||||
|
self.assertEqual(Dog._superclasses, ('Animal', 'Animal.Mammal'))
|
||||||
|
self.assertEqual(Human._superclasses, ('Animal', 'Animal.Mammal'))
|
||||||
|
|
||||||
|
def test_external_superclasses(self):
|
||||||
|
"""Ensure that the correct list of super classes is assembled when
|
||||||
|
importing part of the model.
|
||||||
|
"""
|
||||||
|
class Animal(Base): pass
|
||||||
|
class Fish(Animal): pass
|
||||||
|
class Guppy(Fish): pass
|
||||||
|
class Mammal(Animal): pass
|
||||||
|
class Dog(Mammal): pass
|
||||||
|
class Human(Mammal): pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal._superclasses, ('Base', ))
|
||||||
|
self.assertEqual(Fish._superclasses, ('Base', 'Base.Animal',))
|
||||||
|
self.assertEqual(Guppy._superclasses, ('Base', 'Base.Animal',
|
||||||
|
'Base.Animal.Fish'))
|
||||||
|
self.assertEqual(Mammal._superclasses, ('Base', 'Base.Animal',))
|
||||||
|
self.assertEqual(Dog._superclasses, ('Base', 'Base.Animal',
|
||||||
|
'Base.Animal.Mammal'))
|
||||||
|
self.assertEqual(Human._superclasses, ('Base', 'Base.Animal',
|
||||||
|
'Base.Animal.Mammal'))
|
||||||
|
|
||||||
|
def test_subclasses(self):
|
||||||
|
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||||
|
assembled.
|
||||||
|
"""
|
||||||
|
class Animal(Document):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
class Fish(Animal): pass
|
||||||
|
class Guppy(Fish): pass
|
||||||
|
class Mammal(Animal): pass
|
||||||
|
class Dog(Mammal): pass
|
||||||
|
class Human(Mammal): pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal._subclasses, ('Animal',
|
||||||
|
'Animal.Fish',
|
||||||
|
'Animal.Fish.Guppy',
|
||||||
|
'Animal.Mammal',
|
||||||
|
'Animal.Mammal.Dog',
|
||||||
|
'Animal.Mammal.Human'))
|
||||||
|
self.assertEqual(Fish._subclasses, ('Animal.Fish',
|
||||||
|
'Animal.Fish.Guppy',))
|
||||||
|
self.assertEqual(Guppy._subclasses, ('Animal.Fish.Guppy',))
|
||||||
|
self.assertEqual(Mammal._subclasses, ('Animal.Mammal',
|
||||||
|
'Animal.Mammal.Dog',
|
||||||
|
'Animal.Mammal.Human'))
|
||||||
|
self.assertEqual(Human._subclasses, ('Animal.Mammal.Human',))
|
||||||
|
|
||||||
|
def test_external_subclasses(self):
|
||||||
|
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||||
|
assembled when importing part of the model.
|
||||||
|
"""
|
||||||
|
class Animal(Base): pass
|
||||||
|
class Fish(Animal): pass
|
||||||
|
class Guppy(Fish): pass
|
||||||
|
class Mammal(Animal): pass
|
||||||
|
class Dog(Mammal): pass
|
||||||
|
class Human(Mammal): pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal._subclasses, ('Base.Animal',
|
||||||
|
'Base.Animal.Fish',
|
||||||
|
'Base.Animal.Fish.Guppy',
|
||||||
|
'Base.Animal.Mammal',
|
||||||
|
'Base.Animal.Mammal.Dog',
|
||||||
|
'Base.Animal.Mammal.Human'))
|
||||||
|
self.assertEqual(Fish._subclasses, ('Base.Animal.Fish',
|
||||||
|
'Base.Animal.Fish.Guppy',))
|
||||||
|
self.assertEqual(Guppy._subclasses, ('Base.Animal.Fish.Guppy',))
|
||||||
|
self.assertEqual(Mammal._subclasses, ('Base.Animal.Mammal',
|
||||||
|
'Base.Animal.Mammal.Dog',
|
||||||
|
'Base.Animal.Mammal.Human'))
|
||||||
|
self.assertEqual(Human._subclasses, ('Base.Animal.Mammal.Human',))
|
||||||
|
|
||||||
|
def test_dynamic_declarations(self):
|
||||||
|
"""Test that declaring an extra class updates meta data"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
self.assertEqual(Animal._superclasses, ())
|
||||||
|
self.assertEqual(Animal._subclasses, ('Animal',))
|
||||||
|
|
||||||
|
# Test dynamically adding a class changes the meta data
|
||||||
|
class Fish(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal._superclasses, ())
|
||||||
|
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish'))
|
||||||
|
|
||||||
|
self.assertEqual(Fish._superclasses, ('Animal', ))
|
||||||
|
self.assertEqual(Fish._subclasses, ('Animal.Fish',))
|
||||||
|
|
||||||
|
# Test dynamically adding an inherited class changes the meta data
|
||||||
|
class Pike(Fish):
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertEqual(Animal._superclasses, ())
|
||||||
|
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish',
|
||||||
|
'Animal.Fish.Pike'))
|
||||||
|
|
||||||
|
self.assertEqual(Fish._superclasses, ('Animal', ))
|
||||||
|
self.assertEqual(Fish._subclasses, ('Animal.Fish', 'Animal.Fish.Pike'))
|
||||||
|
|
||||||
|
self.assertEqual(Pike._superclasses, ('Animal', 'Animal.Fish'))
|
||||||
|
self.assertEqual(Pike._subclasses, ('Animal.Fish.Pike',))
|
||||||
|
|
||||||
|
def test_inheritance_meta_data(self):
|
||||||
|
"""Ensure that document may inherit fields from a superclass document.
|
||||||
|
"""
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
class Employee(Person):
|
||||||
|
salary = IntField()
|
||||||
|
|
||||||
|
self.assertEqual(['age', 'id', 'name', 'salary'],
|
||||||
|
sorted(Employee._fields.keys()))
|
||||||
|
self.assertEqual(Employee._get_collection_name(),
|
||||||
|
Person._get_collection_name())
|
||||||
|
|
||||||
|
|
||||||
|
def test_polymorphic_queries(self):
|
||||||
|
"""Ensure that the correct subclasses are returned from a query
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
class Fish(Animal): pass
|
||||||
|
class Mammal(Animal): pass
|
||||||
|
class Dog(Mammal): pass
|
||||||
|
class Human(Mammal): pass
|
||||||
|
|
||||||
|
Animal.drop_collection()
|
||||||
|
|
||||||
|
Animal().save()
|
||||||
|
Fish().save()
|
||||||
|
Mammal().save()
|
||||||
|
Dog().save()
|
||||||
|
Human().save()
|
||||||
|
|
||||||
|
classes = [obj.__class__ for obj in Animal.objects]
|
||||||
|
self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human])
|
||||||
|
|
||||||
|
classes = [obj.__class__ for obj in Mammal.objects]
|
||||||
|
self.assertEqual(classes, [Mammal, Dog, Human])
|
||||||
|
|
||||||
|
classes = [obj.__class__ for obj in Human.objects]
|
||||||
|
self.assertEqual(classes, [Human])
|
||||||
|
|
||||||
|
|
||||||
|
def test_allow_inheritance(self):
|
||||||
|
"""Ensure that inheritance may be disabled on simple classes and that
|
||||||
|
_cls and _subclasses will not be used.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
def create_dog_class():
|
||||||
|
class Dog(Animal):
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertRaises(ValueError, create_dog_class)
|
||||||
|
|
||||||
|
# Check that _cls etc aren't present on simple documents
|
||||||
|
dog = Animal(name='dog')
|
||||||
|
dog.save()
|
||||||
|
|
||||||
|
collection = self.db[Animal._get_collection_name()]
|
||||||
|
obj = collection.find_one()
|
||||||
|
self.assertFalse('_cls' in obj)
|
||||||
|
|
||||||
|
def test_cant_turn_off_inheritance_on_subclass(self):
|
||||||
|
"""Ensure if inheritance is on in a subclass you cant turn it off
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
def create_mammal_class():
|
||||||
|
class Mammal(Animal):
|
||||||
|
meta = {'allow_inheritance': False}
|
||||||
|
self.assertRaises(ValueError, create_mammal_class)
|
||||||
|
|
||||||
|
def test_allow_inheritance_abstract_document(self):
|
||||||
|
"""Ensure that abstract documents can set inheritance rules and that
|
||||||
|
_cls will not be used.
|
||||||
|
"""
|
||||||
|
class FinalDocument(Document):
|
||||||
|
meta = {'abstract': True,
|
||||||
|
'allow_inheritance': False}
|
||||||
|
|
||||||
|
class Animal(FinalDocument):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
def create_mammal_class():
|
||||||
|
class Mammal(Animal):
|
||||||
|
pass
|
||||||
|
self.assertRaises(ValueError, create_mammal_class)
|
||||||
|
|
||||||
|
# Check that _cls isn't present in simple documents
|
||||||
|
doc = Animal(name='dog')
|
||||||
|
self.assertFalse('_cls' in doc.to_mongo())
|
||||||
|
|
||||||
|
def test_allow_inheritance_embedded_document(self):
|
||||||
|
"""Ensure embedded documents respect inheritance
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
content = StringField()
|
||||||
|
|
||||||
|
def create_special_comment():
|
||||||
|
class SpecialComment(Comment):
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertRaises(ValueError, create_special_comment)
|
||||||
|
|
||||||
|
doc = Comment(content='test')
|
||||||
|
self.assertFalse('_cls' in doc.to_mongo())
|
||||||
|
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
content = StringField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
doc = Comment(content='test')
|
||||||
|
self.assertTrue('_cls' in doc.to_mongo())
|
||||||
|
|
||||||
|
def test_document_inheritance(self):
|
||||||
|
"""Ensure mutliple inheritance of abstract documents
|
||||||
|
"""
|
||||||
|
class DateCreatedDocument(Document):
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'abstract': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
class DateUpdatedDocument(Document):
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'abstract': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
class MyDocument(DateCreatedDocument, DateUpdatedDocument):
|
||||||
|
pass
|
||||||
|
except:
|
||||||
|
self.assertTrue(False, "Couldn't create MyDocument class")
|
||||||
|
|
||||||
|
def test_abstract_documents(self):
|
||||||
|
"""Ensure that a document superclass can be marked as abstract
|
||||||
|
thereby not using it as the name for the collection."""
|
||||||
|
|
||||||
|
defaults = {'index_background': True,
|
||||||
|
'index_drop_dups': True,
|
||||||
|
'index_opts': {'hello': 'world'},
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'queryset_class': 'QuerySet',
|
||||||
|
'db_alias': 'myDB',
|
||||||
|
'shard_key': ('hello', 'world')}
|
||||||
|
|
||||||
|
meta_settings = {'abstract': True}
|
||||||
|
meta_settings.update(defaults)
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = meta_settings
|
||||||
|
|
||||||
|
class Fish(Animal): pass
|
||||||
|
class Guppy(Fish): pass
|
||||||
|
|
||||||
|
class Mammal(Animal):
|
||||||
|
meta = {'abstract': True}
|
||||||
|
class Human(Mammal): pass
|
||||||
|
|
||||||
|
for k, v in defaults.iteritems():
|
||||||
|
for cls in [Animal, Fish, Guppy]:
|
||||||
|
self.assertEqual(cls._meta[k], v)
|
||||||
|
|
||||||
|
self.assertFalse('collection' in Animal._meta)
|
||||||
|
self.assertFalse('collection' in Mammal._meta)
|
||||||
|
|
||||||
|
self.assertEqual(Animal._get_collection_name(), None)
|
||||||
|
self.assertEqual(Mammal._get_collection_name(), None)
|
||||||
|
|
||||||
|
self.assertEqual(Fish._get_collection_name(), 'fish')
|
||||||
|
self.assertEqual(Guppy._get_collection_name(), 'fish')
|
||||||
|
self.assertEqual(Human._get_collection_name(), 'human')
|
||||||
|
|
||||||
|
def create_bad_abstract():
|
||||||
|
class EvilHuman(Human):
|
||||||
|
evil = BooleanField(default=True)
|
||||||
|
meta = {'abstract': True}
|
||||||
|
self.assertRaises(ValueError, create_bad_abstract)
|
||||||
|
|
||||||
|
def test_inherited_collections(self):
|
||||||
|
"""Ensure that subclassed documents don't override parents'
|
||||||
|
collections
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Drink(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
class Drinker(Document):
|
||||||
|
drink = GenericReferenceField()
|
||||||
|
|
||||||
|
try:
|
||||||
|
warnings.simplefilter("error")
|
||||||
|
|
||||||
|
class AcloholicDrink(Drink):
|
||||||
|
meta = {'collection': 'booze'}
|
||||||
|
|
||||||
|
except SyntaxWarning:
|
||||||
|
warnings.simplefilter("ignore")
|
||||||
|
|
||||||
|
class AlcoholicDrink(Drink):
|
||||||
|
meta = {'collection': 'booze'}
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise AssertionError("SyntaxWarning should be triggered")
|
||||||
|
|
||||||
|
warnings.resetwarnings()
|
||||||
|
|
||||||
|
Drink.drop_collection()
|
||||||
|
AlcoholicDrink.drop_collection()
|
||||||
|
Drinker.drop_collection()
|
||||||
|
|
||||||
|
red_bull = Drink(name='Red Bull')
|
||||||
|
red_bull.save()
|
||||||
|
|
||||||
|
programmer = Drinker(drink=red_bull)
|
||||||
|
programmer.save()
|
||||||
|
|
||||||
|
beer = AlcoholicDrink(name='Beer')
|
||||||
|
beer.save()
|
||||||
|
real_person = Drinker(drink=beer)
|
||||||
|
real_person.save()
|
||||||
|
|
||||||
|
self.assertEqual(Drinker.objects[0].drink.name, red_bull.name)
|
||||||
|
self.assertEqual(Drinker.objects[1].drink.name, beer.name)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
2196
tests/document/instance.py
Normal file
2196
tests/document/instance.py
Normal file
File diff suppressed because it is too large
Load Diff
81
tests/document/json_serialisation.py
Normal file
81
tests/document/json_serialisation.py
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from nose.plugins.skip import SkipTest
|
||||||
|
from datetime import datetime
|
||||||
|
from bson import ObjectId
|
||||||
|
|
||||||
|
import pymongo
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
__all__ = ("TestJson",)
|
||||||
|
|
||||||
|
|
||||||
|
class TestJson(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
|
def test_json_simple(self):
|
||||||
|
|
||||||
|
class Embedded(EmbeddedDocument):
|
||||||
|
string = StringField()
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
string = StringField()
|
||||||
|
embedded_field = EmbeddedDocumentField(Embedded)
|
||||||
|
|
||||||
|
doc = Doc(string="Hi", embedded_field=Embedded(string="Hi"))
|
||||||
|
|
||||||
|
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
||||||
|
|
||||||
|
def test_json_complex(self):
|
||||||
|
|
||||||
|
if pymongo.version_tuple[0] <= 2 and pymongo.version_tuple[1] <= 3:
|
||||||
|
raise SkipTest("Need pymongo 2.4 as has a fix for DBRefs")
|
||||||
|
|
||||||
|
class EmbeddedDoc(EmbeddedDocument):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Simple(Document):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
string_field = StringField(default='1')
|
||||||
|
int_field = IntField(default=1)
|
||||||
|
float_field = FloatField(default=1.1)
|
||||||
|
boolean_field = BooleanField(default=True)
|
||||||
|
datetime_field = DateTimeField(default=datetime.now)
|
||||||
|
embedded_document_field = EmbeddedDocumentField(EmbeddedDoc,
|
||||||
|
default=lambda: EmbeddedDoc())
|
||||||
|
list_field = ListField(default=lambda: [1, 2, 3])
|
||||||
|
dict_field = DictField(default=lambda: {"hello": "world"})
|
||||||
|
objectid_field = ObjectIdField(default=ObjectId)
|
||||||
|
reference_field = ReferenceField(Simple, default=lambda:
|
||||||
|
Simple().save())
|
||||||
|
map_field = MapField(IntField(), default=lambda: {"simple": 1})
|
||||||
|
decimal_field = DecimalField(default=1.0)
|
||||||
|
complex_datetime_field = ComplexDateTimeField(default=datetime.now)
|
||||||
|
url_field = URLField(default="http://mongoengine.org")
|
||||||
|
dynamic_field = DynamicField(default=1)
|
||||||
|
generic_reference_field = GenericReferenceField(
|
||||||
|
default=lambda: Simple().save())
|
||||||
|
sorted_list_field = SortedListField(IntField(),
|
||||||
|
default=lambda: [1, 2, 3])
|
||||||
|
email_field = EmailField(default="ross@example.com")
|
||||||
|
geo_point_field = GeoPointField(default=lambda: [1, 2])
|
||||||
|
sequence_field = SequenceField()
|
||||||
|
uuid_field = UUIDField(default=uuid.uuid4)
|
||||||
|
generic_embedded_document_field = GenericEmbeddedDocumentField(
|
||||||
|
default=lambda: EmbeddedDoc())
|
||||||
|
|
||||||
|
doc = Doc()
|
||||||
|
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
146
tests/document/validation.py
Normal file
146
tests/document/validation.py
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
|
||||||
|
__all__ = ("ValidatorErrorTest",)
|
||||||
|
|
||||||
|
|
||||||
|
class ValidatorErrorTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
|
def test_to_dict(self):
|
||||||
|
"""Ensure a ValidationError handles error to_dict correctly.
|
||||||
|
"""
|
||||||
|
error = ValidationError('root')
|
||||||
|
self.assertEqual(error.to_dict(), {})
|
||||||
|
|
||||||
|
# 1st level error schema
|
||||||
|
error.errors = {'1st': ValidationError('bad 1st'), }
|
||||||
|
self.assertTrue('1st' in error.to_dict())
|
||||||
|
self.assertEqual(error.to_dict()['1st'], 'bad 1st')
|
||||||
|
|
||||||
|
# 2nd level error schema
|
||||||
|
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||||
|
'2nd': ValidationError('bad 2nd'),
|
||||||
|
})}
|
||||||
|
self.assertTrue('1st' in error.to_dict())
|
||||||
|
self.assertTrue(isinstance(error.to_dict()['1st'], dict))
|
||||||
|
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||||
|
self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd')
|
||||||
|
|
||||||
|
# moar levels
|
||||||
|
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||||
|
'2nd': ValidationError('bad 2nd', errors={
|
||||||
|
'3rd': ValidationError('bad 3rd', errors={
|
||||||
|
'4th': ValidationError('Inception'),
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
})}
|
||||||
|
self.assertTrue('1st' in error.to_dict())
|
||||||
|
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||||
|
self.assertTrue('3rd' in error.to_dict()['1st']['2nd'])
|
||||||
|
self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd'])
|
||||||
|
self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'],
|
||||||
|
'Inception')
|
||||||
|
|
||||||
|
self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])")
|
||||||
|
|
||||||
|
def test_model_validation(self):
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
username = StringField(primary_key=True)
|
||||||
|
name = StringField(required=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
User().validate()
|
||||||
|
except ValidationError, e:
|
||||||
|
self.assertTrue("User:None" in e.message)
|
||||||
|
self.assertEqual(e.to_dict(), {
|
||||||
|
'username': 'Field is required',
|
||||||
|
'name': 'Field is required'})
|
||||||
|
|
||||||
|
user = User(username="RossC0", name="Ross").save()
|
||||||
|
user.name = None
|
||||||
|
try:
|
||||||
|
user.save()
|
||||||
|
except ValidationError, e:
|
||||||
|
self.assertTrue("User:RossC0" in e.message)
|
||||||
|
self.assertEqual(e.to_dict(), {
|
||||||
|
'name': 'Field is required'})
|
||||||
|
|
||||||
|
def test_fields_rewrite(self):
|
||||||
|
class BasePerson(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
meta = {'abstract': True}
|
||||||
|
|
||||||
|
class Person(BasePerson):
|
||||||
|
name = StringField(required=True)
|
||||||
|
|
||||||
|
p = Person(age=15)
|
||||||
|
self.assertRaises(ValidationError, p.validate)
|
||||||
|
|
||||||
|
def test_embedded_document_validation(self):
|
||||||
|
"""Ensure that embedded documents may be validated.
|
||||||
|
"""
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
date = DateTimeField()
|
||||||
|
content = StringField(required=True)
|
||||||
|
|
||||||
|
comment = Comment()
|
||||||
|
self.assertRaises(ValidationError, comment.validate)
|
||||||
|
|
||||||
|
comment.content = 'test'
|
||||||
|
comment.validate()
|
||||||
|
|
||||||
|
comment.date = 4
|
||||||
|
self.assertRaises(ValidationError, comment.validate)
|
||||||
|
|
||||||
|
comment.date = datetime.now()
|
||||||
|
comment.validate()
|
||||||
|
self.assertEqual(comment._instance, None)
|
||||||
|
|
||||||
|
def test_embedded_db_field_validate(self):
|
||||||
|
|
||||||
|
class SubDoc(EmbeddedDocument):
|
||||||
|
val = IntField(required=True)
|
||||||
|
|
||||||
|
class Doc(Document):
|
||||||
|
id = StringField(primary_key=True)
|
||||||
|
e = EmbeddedDocumentField(SubDoc, db_field='eb')
|
||||||
|
|
||||||
|
try:
|
||||||
|
Doc(id="bad").validate()
|
||||||
|
except ValidationError, e:
|
||||||
|
self.assertTrue("SubDoc:None" in e.message)
|
||||||
|
self.assertEqual(e.to_dict(), {
|
||||||
|
"e": {'val': 'OK could not be converted to int'}})
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
|
||||||
|
Doc(id="test", e=SubDoc(val=15)).save()
|
||||||
|
|
||||||
|
doc = Doc.objects.first()
|
||||||
|
keys = doc._data.keys()
|
||||||
|
self.assertEqual(2, len(keys))
|
||||||
|
self.assertTrue('e' in keys)
|
||||||
|
self.assertTrue('id' in keys)
|
||||||
|
|
||||||
|
doc.e.val = "OK"
|
||||||
|
try:
|
||||||
|
doc.save()
|
||||||
|
except ValidationError, e:
|
||||||
|
self.assertTrue("Doc:test" in e.message)
|
||||||
|
self.assertEqual(e.to_dict(), {
|
||||||
|
"e": {'val': 'OK could not be converted to int'}})
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
@@ -1,502 +0,0 @@
|
|||||||
import unittest
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine.connection import get_db
|
|
||||||
|
|
||||||
|
|
||||||
class DynamicDocTest(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
self.db = get_db()
|
|
||||||
|
|
||||||
class Person(DynamicDocument):
|
|
||||||
name = StringField()
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
self.Person = Person
|
|
||||||
|
|
||||||
def test_simple_dynamic_document(self):
|
|
||||||
"""Ensures simple dynamic documents are saved correctly"""
|
|
||||||
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "James"
|
|
||||||
p.age = 34
|
|
||||||
|
|
||||||
self.assertEquals(p.to_mongo(),
|
|
||||||
{"_types": ["Person"], "_cls": "Person",
|
|
||||||
"name": "James", "age": 34}
|
|
||||||
)
|
|
||||||
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
self.assertEquals(self.Person.objects.first().age, 34)
|
|
||||||
|
|
||||||
# Confirm no changes to self.Person
|
|
||||||
self.assertFalse(hasattr(self.Person, 'age'))
|
|
||||||
|
|
||||||
def test_dynamic_document_delta(self):
|
|
||||||
"""Ensures simple dynamic documents can delta correctly"""
|
|
||||||
p = self.Person(name="James", age=34)
|
|
||||||
self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {}))
|
|
||||||
|
|
||||||
p.doc = 123
|
|
||||||
del(p.doc)
|
|
||||||
self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1}))
|
|
||||||
|
|
||||||
def test_change_scope_of_variable(self):
|
|
||||||
"""Test changing the scope of a dynamic field has no adverse effects"""
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.misc = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
p.misc = {'hello': 'world'}
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
self.assertEquals(p.misc, {'hello': 'world'})
|
|
||||||
|
|
||||||
def test_delete_dynamic_field(self):
|
|
||||||
"""Test deleting a dynamic field works"""
|
|
||||||
self.Person.drop_collection()
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.misc = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
p.misc = {'hello': 'world'}
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
self.assertEquals(p.misc, {'hello': 'world'})
|
|
||||||
collection = self.db[self.Person._get_collection_name()]
|
|
||||||
obj = collection.find_one()
|
|
||||||
self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name'])
|
|
||||||
|
|
||||||
del(p.misc)
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
self.assertFalse(hasattr(p, 'misc'))
|
|
||||||
|
|
||||||
obj = collection.find_one()
|
|
||||||
self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'name'])
|
|
||||||
|
|
||||||
def test_dynamic_document_queries(self):
|
|
||||||
"""Ensure we can query dynamic fields"""
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.age = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
self.assertEquals(1, self.Person.objects(age=22).count())
|
|
||||||
p = self.Person.objects(age=22)
|
|
||||||
p = p.get()
|
|
||||||
self.assertEquals(22, p.age)
|
|
||||||
|
|
||||||
def test_complex_dynamic_document_queries(self):
|
|
||||||
class Person(DynamicDocument):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
p = Person(name="test")
|
|
||||||
p.age = "ten"
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p1 = Person(name="test1")
|
|
||||||
p1.age = "less then ten and a half"
|
|
||||||
p1.save()
|
|
||||||
|
|
||||||
p2 = Person(name="test2")
|
|
||||||
p2.age = 10
|
|
||||||
p2.save()
|
|
||||||
|
|
||||||
self.assertEquals(Person.objects(age__icontains='ten').count(), 2)
|
|
||||||
self.assertEquals(Person.objects(age__gte=10).count(), 1)
|
|
||||||
|
|
||||||
def test_complex_data_lookups(self):
|
|
||||||
"""Ensure you can query dynamic document dynamic fields"""
|
|
||||||
p = self.Person()
|
|
||||||
p.misc = {'hello': 'world'}
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
self.assertEquals(1, self.Person.objects(misc__hello='world').count())
|
|
||||||
|
|
||||||
def test_inheritance(self):
|
|
||||||
"""Ensure that dynamic document plays nice with inheritance"""
|
|
||||||
class Employee(self.Person):
|
|
||||||
salary = IntField()
|
|
||||||
|
|
||||||
Employee.drop_collection()
|
|
||||||
|
|
||||||
self.assertTrue('name' in Employee._fields)
|
|
||||||
self.assertTrue('salary' in Employee._fields)
|
|
||||||
self.assertEqual(Employee._get_collection_name(),
|
|
||||||
self.Person._get_collection_name())
|
|
||||||
|
|
||||||
joe_bloggs = Employee()
|
|
||||||
joe_bloggs.name = "Joe Bloggs"
|
|
||||||
joe_bloggs.salary = 10
|
|
||||||
joe_bloggs.age = 20
|
|
||||||
joe_bloggs.save()
|
|
||||||
|
|
||||||
self.assertEquals(1, self.Person.objects(age=20).count())
|
|
||||||
self.assertEquals(1, Employee.objects(age=20).count())
|
|
||||||
|
|
||||||
joe_bloggs = self.Person.objects.first()
|
|
||||||
self.assertTrue(isinstance(joe_bloggs, Employee))
|
|
||||||
|
|
||||||
def test_embedded_dynamic_document(self):
|
|
||||||
"""Test dynamic embedded documents"""
|
|
||||||
class Embedded(DynamicEmbeddedDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
|
|
||||||
embedded_1 = Embedded()
|
|
||||||
embedded_1.string_field = 'hello'
|
|
||||||
embedded_1.int_field = 1
|
|
||||||
embedded_1.dict_field = {'hello': 'world'}
|
|
||||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
doc.embedded_field = embedded_1
|
|
||||||
|
|
||||||
self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
|
|
||||||
"embedded_field": {
|
|
||||||
"_types": ['Embedded'], "_cls": "Embedded",
|
|
||||||
"string_field": "hello",
|
|
||||||
"int_field": 1,
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
"list_field": ['1', 2, {'hello': 'world'}]
|
|
||||||
}
|
|
||||||
})
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEquals(doc.embedded_field.__class__, Embedded)
|
|
||||||
self.assertEquals(doc.embedded_field.string_field, "hello")
|
|
||||||
self.assertEquals(doc.embedded_field.int_field, 1)
|
|
||||||
self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'})
|
|
||||||
self.assertEquals(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}])
|
|
||||||
|
|
||||||
def test_complex_embedded_documents(self):
|
|
||||||
"""Test complex dynamic embedded documents setups"""
|
|
||||||
class Embedded(DynamicEmbeddedDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
|
|
||||||
embedded_1 = Embedded()
|
|
||||||
embedded_1.string_field = 'hello'
|
|
||||||
embedded_1.int_field = 1
|
|
||||||
embedded_1.dict_field = {'hello': 'world'}
|
|
||||||
|
|
||||||
embedded_2 = Embedded()
|
|
||||||
embedded_2.string_field = 'hello'
|
|
||||||
embedded_2.int_field = 1
|
|
||||||
embedded_2.dict_field = {'hello': 'world'}
|
|
||||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
|
|
||||||
embedded_1.list_field = ['1', 2, embedded_2]
|
|
||||||
doc.embedded_field = embedded_1
|
|
||||||
|
|
||||||
self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
|
|
||||||
"embedded_field": {
|
|
||||||
"_types": ['Embedded'], "_cls": "Embedded",
|
|
||||||
"string_field": "hello",
|
|
||||||
"int_field": 1,
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
"list_field": ['1', 2,
|
|
||||||
{"_types": ['Embedded'], "_cls": "Embedded",
|
|
||||||
"string_field": "hello",
|
|
||||||
"int_field": 1,
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
"list_field": ['1', 2, {'hello': 'world'}]}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
})
|
|
||||||
doc.save()
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEquals(doc.embedded_field.__class__, Embedded)
|
|
||||||
self.assertEquals(doc.embedded_field.string_field, "hello")
|
|
||||||
self.assertEquals(doc.embedded_field.int_field, 1)
|
|
||||||
self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'})
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[0], '1')
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[1], 2)
|
|
||||||
|
|
||||||
embedded_field = doc.embedded_field.list_field[2]
|
|
||||||
|
|
||||||
self.assertEquals(embedded_field.__class__, Embedded)
|
|
||||||
self.assertEquals(embedded_field.string_field, "hello")
|
|
||||||
self.assertEquals(embedded_field.int_field, 1)
|
|
||||||
self.assertEquals(embedded_field.dict_field, {'hello': 'world'})
|
|
||||||
self.assertEquals(embedded_field.list_field, ['1', 2, {'hello': 'world'}])
|
|
||||||
|
|
||||||
def test_delta_for_dynamic_documents(self):
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.age = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p.age = 24
|
|
||||||
self.assertEquals(p.age, 24)
|
|
||||||
self.assertEquals(p._get_changed_fields(), ['age'])
|
|
||||||
self.assertEquals(p._delta(), ({'age': 24}, {}))
|
|
||||||
|
|
||||||
p = self.Person.objects(age=22).get()
|
|
||||||
p.age = 24
|
|
||||||
self.assertEquals(p.age, 24)
|
|
||||||
self.assertEquals(p._get_changed_fields(), ['age'])
|
|
||||||
self.assertEquals(p._delta(), ({'age': 24}, {}))
|
|
||||||
|
|
||||||
p.save()
|
|
||||||
self.assertEquals(1, self.Person.objects(age=24).count())
|
|
||||||
|
|
||||||
def test_delta(self):
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEquals(doc._get_changed_fields(), [])
|
|
||||||
self.assertEquals(doc._delta(), ({}, {}))
|
|
||||||
|
|
||||||
doc.string_field = 'hello'
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['string_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({'string_field': 'hello'}, {}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.int_field = 1
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['int_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({'int_field': 1}, {}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
dict_value = {'hello': 'world', 'ping': 'pong'}
|
|
||||||
doc.dict_field = dict_value
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['dict_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({'dict_field': dict_value}, {}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
list_value = ['1', 2, {'hello': 'world'}]
|
|
||||||
doc.list_field = list_value
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['list_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({'list_field': list_value}, {}))
|
|
||||||
|
|
||||||
# Test unsetting
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.dict_field = {}
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['dict_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({}, {'dict_field': 1}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.list_field = []
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['list_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({}, {'list_field': 1}))
|
|
||||||
|
|
||||||
def test_delta_recursive(self):
|
|
||||||
"""Testing deltaing works with dynamic documents"""
|
|
||||||
class Embedded(DynamicEmbeddedDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEquals(doc._get_changed_fields(), [])
|
|
||||||
self.assertEquals(doc._delta(), ({}, {}))
|
|
||||||
|
|
||||||
embedded_1 = Embedded()
|
|
||||||
embedded_1.string_field = 'hello'
|
|
||||||
embedded_1.int_field = 1
|
|
||||||
embedded_1.dict_field = {'hello': 'world'}
|
|
||||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
doc.embedded_field = embedded_1
|
|
||||||
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field'])
|
|
||||||
|
|
||||||
embedded_delta = {
|
|
||||||
'string_field': 'hello',
|
|
||||||
'int_field': 1,
|
|
||||||
'dict_field': {'hello': 'world'},
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}]
|
|
||||||
}
|
|
||||||
self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {}))
|
|
||||||
embedded_delta.update({
|
|
||||||
'_types': ['Embedded'],
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
})
|
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field': embedded_delta}, {}))
|
|
||||||
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
doc.embedded_field.dict_field = {}
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.dict_field'])
|
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
|
|
||||||
|
|
||||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
doc.embedded_field.list_field = []
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
|
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1}))
|
|
||||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
embedded_2 = Embedded()
|
|
||||||
embedded_2.string_field = 'hello'
|
|
||||||
embedded_2.int_field = 1
|
|
||||||
embedded_2.dict_field = {'hello': 'world'}
|
|
||||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
|
|
||||||
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
|
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({
|
|
||||||
'list_field': ['1', 2, {
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'_types': ['Embedded'],
|
|
||||||
'string_field': 'hello',
|
|
||||||
'dict_field': {'hello': 'world'},
|
|
||||||
'int_field': 1,
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
}]
|
|
||||||
}, {}))
|
|
||||||
|
|
||||||
self.assertEquals(doc._delta(), ({
|
|
||||||
'embedded_field.list_field': ['1', 2, {
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'_types': ['Embedded'],
|
|
||||||
'string_field': 'hello',
|
|
||||||
'dict_field': {'hello': 'world'},
|
|
||||||
'int_field': 1,
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
}]
|
|
||||||
}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2]._changed_fields, [])
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[0], '1')
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[1], 2)
|
|
||||||
for k in doc.embedded_field.list_field[2]._fields:
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k])
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].string_field = 'world'
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field'])
|
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {}))
|
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world')
|
|
||||||
|
|
||||||
# Test multiple assignments
|
|
||||||
doc.embedded_field.list_field[2].string_field = 'hello world'
|
|
||||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
|
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({
|
|
||||||
'list_field': ['1', 2, {
|
|
||||||
'_types': ['Embedded'],
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'string_field': 'hello world',
|
|
||||||
'int_field': 1,
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
'dict_field': {'hello': 'world'}}]}, {}))
|
|
||||||
self.assertEquals(doc._delta(), ({
|
|
||||||
'embedded_field.list_field': ['1', 2, {
|
|
||||||
'_types': ['Embedded'],
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'string_field': 'hello world',
|
|
||||||
'int_field': 1,
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
'dict_field': {'hello': 'world'}}
|
|
||||||
]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world')
|
|
||||||
|
|
||||||
# Test list native methods
|
|
||||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].list_field.append(1)
|
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1])
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].list_field.sort()
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}])
|
|
||||||
|
|
||||||
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
del(doc.embedded_field.list_field[2].list_field)
|
|
||||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1}))
|
|
||||||
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
doc.dict_field = {'embedded': embedded_1}
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
doc.dict_field['embedded'].string_field = 'Hello World'
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['dict_field.embedded.string_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {}))
|
|
||||||
|
|
||||||
def test_indexes(self):
|
|
||||||
"""Ensure that indexes are used when meta[indexes] is specified.
|
|
||||||
"""
|
|
||||||
class BlogPost(DynamicDocument):
|
|
||||||
meta = {
|
|
||||||
'indexes': [
|
|
||||||
'-date',
|
|
||||||
('category', '-date')
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
info = BlogPost.objects._collection.index_information()
|
|
||||||
# _id, '-date', ('cat', 'date')
|
|
||||||
# NB: there is no index on _types by itself, since
|
|
||||||
# the indices on -date and tags will both contain
|
|
||||||
# _types as first element in the key
|
|
||||||
self.assertEqual(len(info), 3)
|
|
||||||
|
|
||||||
# Indexes are lazy so use list() to perform query
|
|
||||||
list(BlogPost.objects)
|
|
||||||
info = BlogPost.objects._collection.index_information()
|
|
||||||
info = [value['key'] for key, value in info.iteritems()]
|
|
||||||
self.assertTrue([('_types', 1), ('category', 1), ('date', -1)]
|
|
||||||
in info)
|
|
||||||
self.assertTrue([('_types', 1), ('date', -1)] in info)
|
|
||||||
2
tests/fields/__init__.py
Normal file
2
tests/fields/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
from fields import *
|
||||||
|
from file_tests import *
|
||||||
File diff suppressed because it is too large
Load Diff
383
tests/fields/file_tests.py
Normal file
383
tests/fields/file_tests.py
Normal file
@@ -0,0 +1,383 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import with_statement
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import copy
|
||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
import gridfs
|
||||||
|
|
||||||
|
from nose.plugins.skip import SkipTest
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
from mongoengine.python_support import PY3, b, StringIO
|
||||||
|
|
||||||
|
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png')
|
||||||
|
|
||||||
|
|
||||||
|
class FileTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.db.drop_collection('fs.files')
|
||||||
|
self.db.drop_collection('fs.chunks')
|
||||||
|
|
||||||
|
def test_file_field_optional(self):
|
||||||
|
# Make sure FileField is optional and not required
|
||||||
|
class DemoFile(Document):
|
||||||
|
the_file = FileField()
|
||||||
|
DemoFile.objects.create()
|
||||||
|
|
||||||
|
def test_file_fields(self):
|
||||||
|
"""Ensure that file fields can be written to and their data retrieved
|
||||||
|
"""
|
||||||
|
|
||||||
|
class PutFile(Document):
|
||||||
|
the_file = FileField()
|
||||||
|
|
||||||
|
PutFile.drop_collection()
|
||||||
|
|
||||||
|
text = b('Hello, World!')
|
||||||
|
content_type = 'text/plain'
|
||||||
|
|
||||||
|
putfile = PutFile()
|
||||||
|
putfile.the_file.put(text, content_type=content_type)
|
||||||
|
putfile.save()
|
||||||
|
|
||||||
|
result = PutFile.objects.first()
|
||||||
|
self.assertTrue(putfile == result)
|
||||||
|
self.assertEqual(result.the_file.read(), text)
|
||||||
|
self.assertEqual(result.the_file.content_type, content_type)
|
||||||
|
result.the_file.delete() # Remove file from GridFS
|
||||||
|
PutFile.objects.delete()
|
||||||
|
|
||||||
|
# Ensure file-like objects are stored
|
||||||
|
PutFile.drop_collection()
|
||||||
|
|
||||||
|
putfile = PutFile()
|
||||||
|
putstring = StringIO()
|
||||||
|
putstring.write(text)
|
||||||
|
putstring.seek(0)
|
||||||
|
putfile.the_file.put(putstring, content_type=content_type)
|
||||||
|
putfile.save()
|
||||||
|
|
||||||
|
result = PutFile.objects.first()
|
||||||
|
self.assertTrue(putfile == result)
|
||||||
|
self.assertEqual(result.the_file.read(), text)
|
||||||
|
self.assertEqual(result.the_file.content_type, content_type)
|
||||||
|
result.the_file.delete()
|
||||||
|
|
||||||
|
def test_file_fields_stream(self):
|
||||||
|
"""Ensure that file fields can be written to and their data retrieved
|
||||||
|
"""
|
||||||
|
class StreamFile(Document):
|
||||||
|
the_file = FileField()
|
||||||
|
|
||||||
|
StreamFile.drop_collection()
|
||||||
|
|
||||||
|
text = b('Hello, World!')
|
||||||
|
more_text = b('Foo Bar')
|
||||||
|
content_type = 'text/plain'
|
||||||
|
|
||||||
|
streamfile = StreamFile()
|
||||||
|
streamfile.the_file.new_file(content_type=content_type)
|
||||||
|
streamfile.the_file.write(text)
|
||||||
|
streamfile.the_file.write(more_text)
|
||||||
|
streamfile.the_file.close()
|
||||||
|
streamfile.save()
|
||||||
|
|
||||||
|
result = StreamFile.objects.first()
|
||||||
|
self.assertTrue(streamfile == result)
|
||||||
|
self.assertEqual(result.the_file.read(), text + more_text)
|
||||||
|
self.assertEqual(result.the_file.content_type, content_type)
|
||||||
|
result.the_file.seek(0)
|
||||||
|
self.assertEqual(result.the_file.tell(), 0)
|
||||||
|
self.assertEqual(result.the_file.read(len(text)), text)
|
||||||
|
self.assertEqual(result.the_file.tell(), len(text))
|
||||||
|
self.assertEqual(result.the_file.read(len(more_text)), more_text)
|
||||||
|
self.assertEqual(result.the_file.tell(), len(text + more_text))
|
||||||
|
result.the_file.delete()
|
||||||
|
|
||||||
|
# Ensure deleted file returns None
|
||||||
|
self.assertTrue(result.the_file.read() == None)
|
||||||
|
|
||||||
|
def test_file_fields_set(self):
|
||||||
|
|
||||||
|
class SetFile(Document):
|
||||||
|
the_file = FileField()
|
||||||
|
|
||||||
|
text = b('Hello, World!')
|
||||||
|
more_text = b('Foo Bar')
|
||||||
|
|
||||||
|
SetFile.drop_collection()
|
||||||
|
|
||||||
|
setfile = SetFile()
|
||||||
|
setfile.the_file = text
|
||||||
|
setfile.save()
|
||||||
|
|
||||||
|
result = SetFile.objects.first()
|
||||||
|
self.assertTrue(setfile == result)
|
||||||
|
self.assertEqual(result.the_file.read(), text)
|
||||||
|
|
||||||
|
# Try replacing file with new one
|
||||||
|
result.the_file.replace(more_text)
|
||||||
|
result.save()
|
||||||
|
|
||||||
|
result = SetFile.objects.first()
|
||||||
|
self.assertTrue(setfile == result)
|
||||||
|
self.assertEqual(result.the_file.read(), more_text)
|
||||||
|
result.the_file.delete()
|
||||||
|
|
||||||
|
def test_file_field_no_default(self):
|
||||||
|
|
||||||
|
class GridDocument(Document):
|
||||||
|
the_file = FileField()
|
||||||
|
|
||||||
|
GridDocument.drop_collection()
|
||||||
|
|
||||||
|
with tempfile.TemporaryFile() as f:
|
||||||
|
f.write(b("Hello World!"))
|
||||||
|
f.flush()
|
||||||
|
|
||||||
|
# Test without default
|
||||||
|
doc_a = GridDocument()
|
||||||
|
doc_a.save()
|
||||||
|
|
||||||
|
doc_b = GridDocument.objects.with_id(doc_a.id)
|
||||||
|
doc_b.the_file.replace(f, filename='doc_b')
|
||||||
|
doc_b.save()
|
||||||
|
self.assertNotEqual(doc_b.the_file.grid_id, None)
|
||||||
|
|
||||||
|
# Test it matches
|
||||||
|
doc_c = GridDocument.objects.with_id(doc_b.id)
|
||||||
|
self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id)
|
||||||
|
|
||||||
|
# Test with default
|
||||||
|
doc_d = GridDocument(the_file=b(''))
|
||||||
|
doc_d.save()
|
||||||
|
|
||||||
|
doc_e = GridDocument.objects.with_id(doc_d.id)
|
||||||
|
self.assertEqual(doc_d.the_file.grid_id, doc_e.the_file.grid_id)
|
||||||
|
|
||||||
|
doc_e.the_file.replace(f, filename='doc_e')
|
||||||
|
doc_e.save()
|
||||||
|
|
||||||
|
doc_f = GridDocument.objects.with_id(doc_e.id)
|
||||||
|
self.assertEqual(doc_e.the_file.grid_id, doc_f.the_file.grid_id)
|
||||||
|
|
||||||
|
db = GridDocument._get_db()
|
||||||
|
grid_fs = gridfs.GridFS(db)
|
||||||
|
self.assertEqual(['doc_b', 'doc_e'], grid_fs.list())
|
||||||
|
|
||||||
|
def test_file_uniqueness(self):
|
||||||
|
"""Ensure that each instance of a FileField is unique
|
||||||
|
"""
|
||||||
|
class TestFile(Document):
|
||||||
|
name = StringField()
|
||||||
|
the_file = FileField()
|
||||||
|
|
||||||
|
# First instance
|
||||||
|
test_file = TestFile()
|
||||||
|
test_file.name = "Hello, World!"
|
||||||
|
test_file.the_file.put(b('Hello, World!'))
|
||||||
|
test_file.save()
|
||||||
|
|
||||||
|
# Second instance
|
||||||
|
test_file_dupe = TestFile()
|
||||||
|
data = test_file_dupe.the_file.read() # Should be None
|
||||||
|
|
||||||
|
self.assertTrue(test_file.name != test_file_dupe.name)
|
||||||
|
self.assertTrue(test_file.the_file.read() != data)
|
||||||
|
|
||||||
|
TestFile.drop_collection()
|
||||||
|
|
||||||
|
def test_file_saving(self):
|
||||||
|
"""Ensure you can add meta data to file"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
genus = StringField()
|
||||||
|
family = StringField()
|
||||||
|
photo = FileField()
|
||||||
|
|
||||||
|
Animal.drop_collection()
|
||||||
|
marmot = Animal(genus='Marmota', family='Sciuridae')
|
||||||
|
|
||||||
|
marmot_photo = open(TEST_IMAGE_PATH, 'rb') # Retrieve a photo from disk
|
||||||
|
marmot.photo.put(marmot_photo, content_type='image/jpeg', foo='bar')
|
||||||
|
marmot.photo.close()
|
||||||
|
marmot.save()
|
||||||
|
|
||||||
|
marmot = Animal.objects.get()
|
||||||
|
self.assertEqual(marmot.photo.content_type, 'image/jpeg')
|
||||||
|
self.assertEqual(marmot.photo.foo, 'bar')
|
||||||
|
|
||||||
|
def test_file_boolean(self):
|
||||||
|
"""Ensure that a boolean test of a FileField indicates its presence
|
||||||
|
"""
|
||||||
|
class TestFile(Document):
|
||||||
|
the_file = FileField()
|
||||||
|
TestFile.drop_collection()
|
||||||
|
|
||||||
|
test_file = TestFile()
|
||||||
|
self.assertFalse(bool(test_file.the_file))
|
||||||
|
test_file.the_file.put(b('Hello, World!'), content_type='text/plain')
|
||||||
|
test_file.save()
|
||||||
|
self.assertTrue(bool(test_file.the_file))
|
||||||
|
|
||||||
|
test_file = TestFile.objects.first()
|
||||||
|
self.assertEqual(test_file.the_file.content_type, "text/plain")
|
||||||
|
|
||||||
|
def test_file_cmp(self):
|
||||||
|
"""Test comparing against other types"""
|
||||||
|
class TestFile(Document):
|
||||||
|
the_file = FileField()
|
||||||
|
|
||||||
|
test_file = TestFile()
|
||||||
|
self.assertFalse(test_file.the_file in [{"test": 1}])
|
||||||
|
|
||||||
|
def test_image_field(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('PIL does not have Python 3 support')
|
||||||
|
|
||||||
|
class TestImage(Document):
|
||||||
|
image = ImageField()
|
||||||
|
|
||||||
|
TestImage.drop_collection()
|
||||||
|
|
||||||
|
t = TestImage()
|
||||||
|
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||||
|
t.save()
|
||||||
|
|
||||||
|
t = TestImage.objects.first()
|
||||||
|
|
||||||
|
self.assertEqual(t.image.format, 'PNG')
|
||||||
|
|
||||||
|
w, h = t.image.size
|
||||||
|
self.assertEqual(w, 371)
|
||||||
|
self.assertEqual(h, 76)
|
||||||
|
|
||||||
|
t.image.delete()
|
||||||
|
|
||||||
|
def test_image_field_resize(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('PIL does not have Python 3 support')
|
||||||
|
|
||||||
|
class TestImage(Document):
|
||||||
|
image = ImageField(size=(185, 37))
|
||||||
|
|
||||||
|
TestImage.drop_collection()
|
||||||
|
|
||||||
|
t = TestImage()
|
||||||
|
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||||
|
t.save()
|
||||||
|
|
||||||
|
t = TestImage.objects.first()
|
||||||
|
|
||||||
|
self.assertEqual(t.image.format, 'PNG')
|
||||||
|
w, h = t.image.size
|
||||||
|
|
||||||
|
self.assertEqual(w, 185)
|
||||||
|
self.assertEqual(h, 37)
|
||||||
|
|
||||||
|
t.image.delete()
|
||||||
|
|
||||||
|
def test_image_field_resize_force(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('PIL does not have Python 3 support')
|
||||||
|
|
||||||
|
class TestImage(Document):
|
||||||
|
image = ImageField(size=(185, 37, True))
|
||||||
|
|
||||||
|
TestImage.drop_collection()
|
||||||
|
|
||||||
|
t = TestImage()
|
||||||
|
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||||
|
t.save()
|
||||||
|
|
||||||
|
t = TestImage.objects.first()
|
||||||
|
|
||||||
|
self.assertEqual(t.image.format, 'PNG')
|
||||||
|
w, h = t.image.size
|
||||||
|
|
||||||
|
self.assertEqual(w, 185)
|
||||||
|
self.assertEqual(h, 37)
|
||||||
|
|
||||||
|
t.image.delete()
|
||||||
|
|
||||||
|
def test_image_field_thumbnail(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('PIL does not have Python 3 support')
|
||||||
|
|
||||||
|
class TestImage(Document):
|
||||||
|
image = ImageField(thumbnail_size=(92, 18))
|
||||||
|
|
||||||
|
TestImage.drop_collection()
|
||||||
|
|
||||||
|
t = TestImage()
|
||||||
|
t.image.put(open(TEST_IMAGE_PATH, 'rb'))
|
||||||
|
t.save()
|
||||||
|
|
||||||
|
t = TestImage.objects.first()
|
||||||
|
|
||||||
|
self.assertEqual(t.image.thumbnail.format, 'PNG')
|
||||||
|
self.assertEqual(t.image.thumbnail.width, 92)
|
||||||
|
self.assertEqual(t.image.thumbnail.height, 18)
|
||||||
|
|
||||||
|
t.image.delete()
|
||||||
|
|
||||||
|
def test_file_multidb(self):
|
||||||
|
register_connection('test_files', 'test_files')
|
||||||
|
|
||||||
|
class TestFile(Document):
|
||||||
|
name = StringField()
|
||||||
|
the_file = FileField(db_alias="test_files",
|
||||||
|
collection_name="macumba")
|
||||||
|
|
||||||
|
TestFile.drop_collection()
|
||||||
|
|
||||||
|
# delete old filesystem
|
||||||
|
get_db("test_files").macumba.files.drop()
|
||||||
|
get_db("test_files").macumba.chunks.drop()
|
||||||
|
|
||||||
|
# First instance
|
||||||
|
test_file = TestFile()
|
||||||
|
test_file.name = "Hello, World!"
|
||||||
|
test_file.the_file.put(b('Hello, World!'),
|
||||||
|
name="hello.txt")
|
||||||
|
test_file.save()
|
||||||
|
|
||||||
|
data = get_db("test_files").macumba.files.find_one()
|
||||||
|
self.assertEqual(data.get('name'), 'hello.txt')
|
||||||
|
|
||||||
|
test_file = TestFile.objects.first()
|
||||||
|
self.assertEqual(test_file.the_file.read(),
|
||||||
|
b('Hello, World!'))
|
||||||
|
|
||||||
|
def test_copyable(self):
|
||||||
|
class PutFile(Document):
|
||||||
|
the_file = FileField()
|
||||||
|
|
||||||
|
PutFile.drop_collection()
|
||||||
|
|
||||||
|
text = b('Hello, World!')
|
||||||
|
content_type = 'text/plain'
|
||||||
|
|
||||||
|
putfile = PutFile()
|
||||||
|
putfile.the_file.put(text, content_type=content_type)
|
||||||
|
putfile.save()
|
||||||
|
|
||||||
|
class TestFile(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
self.assertEqual(putfile, copy.copy(putfile))
|
||||||
|
self.assertEqual(putfile, copy.deepcopy(putfile))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
|
Before Width: | Height: | Size: 8.1 KiB After Width: | Height: | Size: 8.1 KiB |
4
tests/migration/__init__.py
Normal file
4
tests/migration/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
from turn_off_inheritance import *
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
51
tests/migration/test_convert_to_new_inheritance_model.py
Normal file
51
tests/migration/test_convert_to_new_inheritance_model.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import Document, connect
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
from mongoengine.fields import StringField
|
||||||
|
|
||||||
|
__all__ = ('ConvertToNewInheritanceModel', )
|
||||||
|
|
||||||
|
|
||||||
|
class ConvertToNewInheritanceModel(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
for collection in self.db.collection_names():
|
||||||
|
if 'system.' in collection:
|
||||||
|
continue
|
||||||
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
|
def test_how_to_convert_to_the_new_inheritance_model(self):
|
||||||
|
"""Demonstrates migrating from 0.7 to 0.8
|
||||||
|
"""
|
||||||
|
|
||||||
|
# 1. Declaration of the class
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'indexes': ['name']
|
||||||
|
}
|
||||||
|
|
||||||
|
# 2. Remove _types
|
||||||
|
collection = Animal._get_collection()
|
||||||
|
collection.update({}, {"$unset": {"_types": 1}}, multi=True)
|
||||||
|
|
||||||
|
# 3. Confirm extra data is removed
|
||||||
|
count = collection.find({'_types': {"$exists": True}}).count()
|
||||||
|
assert count == 0
|
||||||
|
|
||||||
|
# 4. Remove indexes
|
||||||
|
info = collection.index_information()
|
||||||
|
indexes_to_drop = [key for key, value in info.iteritems()
|
||||||
|
if '_types' in dict(value['key'])]
|
||||||
|
for index in indexes_to_drop:
|
||||||
|
collection.drop_index(index)
|
||||||
|
|
||||||
|
# 5. Recreate indexes
|
||||||
|
Animal.ensure_indexes()
|
||||||
62
tests/migration/turn_off_inheritance.py
Normal file
62
tests/migration/turn_off_inheritance.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import Document, connect
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
from mongoengine.fields import StringField
|
||||||
|
|
||||||
|
__all__ = ('TurnOffInheritanceTest', )
|
||||||
|
|
||||||
|
|
||||||
|
class TurnOffInheritanceTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
for collection in self.db.collection_names():
|
||||||
|
if 'system.' in collection:
|
||||||
|
continue
|
||||||
|
self.db.drop_collection(collection)
|
||||||
|
|
||||||
|
def test_how_to_turn_off_inheritance(self):
|
||||||
|
"""Demonstrates migrating from allow_inheritance = True to False.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# 1. Old declaration of the class
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
|
'indexes': ['name']
|
||||||
|
}
|
||||||
|
|
||||||
|
# 2. Turn off inheritance
|
||||||
|
class Animal(Document):
|
||||||
|
name = StringField()
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': False,
|
||||||
|
'indexes': ['name']
|
||||||
|
}
|
||||||
|
|
||||||
|
# 3. Remove _types and _cls
|
||||||
|
collection = Animal._get_collection()
|
||||||
|
collection.update({}, {"$unset": {"_types": 1, "_cls": 1}}, multi=True)
|
||||||
|
|
||||||
|
# 3. Confirm extra data is removed
|
||||||
|
count = collection.find({"$or": [{'_types': {"$exists": True}},
|
||||||
|
{'_cls': {"$exists": True}}]}).count()
|
||||||
|
assert count == 0
|
||||||
|
|
||||||
|
# 4. Remove indexes
|
||||||
|
info = collection.index_information()
|
||||||
|
indexes_to_drop = [key for key, value in info.iteritems()
|
||||||
|
if '_types' in dict(value['key'])
|
||||||
|
or '_cls' in dict(value['key'])]
|
||||||
|
for index in indexes_to_drop:
|
||||||
|
collection.drop_index(index)
|
||||||
|
|
||||||
|
# 5. Recreate indexes
|
||||||
|
Animal.ensure_indexes()
|
||||||
5
tests/queryset/__init__.py
Normal file
5
tests/queryset/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
|
||||||
|
from transform import *
|
||||||
|
from field_list import *
|
||||||
|
from queryset import *
|
||||||
|
from visitor import *
|
||||||
399
tests/queryset/field_list.py
Normal file
399
tests/queryset/field_list.py
Normal file
@@ -0,0 +1,399 @@
|
|||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.queryset import QueryFieldList
|
||||||
|
|
||||||
|
__all__ = ("QueryFieldListTest", "OnlyExcludeAllTest")
|
||||||
|
|
||||||
|
|
||||||
|
class QueryFieldListTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_empty(self):
|
||||||
|
q = QueryFieldList()
|
||||||
|
self.assertFalse(q)
|
||||||
|
|
||||||
|
q = QueryFieldList(always_include=['_cls'])
|
||||||
|
self.assertFalse(q)
|
||||||
|
|
||||||
|
def test_include_include(self):
|
||||||
|
q = QueryFieldList()
|
||||||
|
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY)
|
||||||
|
self.assertEqual(q.as_dict(), {'a': True, 'b': True})
|
||||||
|
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||||
|
self.assertEqual(q.as_dict(), {'b': True})
|
||||||
|
|
||||||
|
def test_include_exclude(self):
|
||||||
|
q = QueryFieldList()
|
||||||
|
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY)
|
||||||
|
self.assertEqual(q.as_dict(), {'a': True, 'b': True})
|
||||||
|
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE)
|
||||||
|
self.assertEqual(q.as_dict(), {'a': True})
|
||||||
|
|
||||||
|
def test_exclude_exclude(self):
|
||||||
|
q = QueryFieldList()
|
||||||
|
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE)
|
||||||
|
self.assertEqual(q.as_dict(), {'a': False, 'b': False})
|
||||||
|
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE)
|
||||||
|
self.assertEqual(q.as_dict(), {'a': False, 'b': False, 'c': False})
|
||||||
|
|
||||||
|
def test_exclude_include(self):
|
||||||
|
q = QueryFieldList()
|
||||||
|
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE)
|
||||||
|
self.assertEqual(q.as_dict(), {'a': False, 'b': False})
|
||||||
|
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||||
|
self.assertEqual(q.as_dict(), {'c': True})
|
||||||
|
|
||||||
|
def test_always_include(self):
|
||||||
|
q = QueryFieldList(always_include=['x', 'y'])
|
||||||
|
q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE)
|
||||||
|
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||||
|
self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'c': True})
|
||||||
|
|
||||||
|
def test_reset(self):
|
||||||
|
q = QueryFieldList(always_include=['x', 'y'])
|
||||||
|
q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE)
|
||||||
|
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||||
|
self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'c': True})
|
||||||
|
q.reset()
|
||||||
|
self.assertFalse(q)
|
||||||
|
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||||
|
self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'b': True, 'c': True})
|
||||||
|
|
||||||
|
def test_using_a_slice(self):
|
||||||
|
q = QueryFieldList()
|
||||||
|
q += QueryFieldList(fields=['a'], value={"$slice": 5})
|
||||||
|
self.assertEqual(q.as_dict(), {'a': {"$slice": 5}})
|
||||||
|
|
||||||
|
|
||||||
|
class OnlyExcludeAllTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def test_mixing_only_exclude(self):
|
||||||
|
|
||||||
|
class MyDoc(Document):
|
||||||
|
a = StringField()
|
||||||
|
b = StringField()
|
||||||
|
c = StringField()
|
||||||
|
d = StringField()
|
||||||
|
e = StringField()
|
||||||
|
f = StringField()
|
||||||
|
|
||||||
|
include = ['a', 'b', 'c', 'd', 'e']
|
||||||
|
exclude = ['d', 'e']
|
||||||
|
only = ['b', 'c']
|
||||||
|
|
||||||
|
qs = MyDoc.objects.fields(**dict(((i, 1) for i in include)))
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||||
|
{'a': 1, 'b': 1, 'c': 1, 'd': 1, 'e': 1})
|
||||||
|
qs = qs.only(*only)
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||||
|
qs = qs.exclude(*exclude)
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||||
|
|
||||||
|
qs = MyDoc.objects.fields(**dict(((i, 1) for i in include)))
|
||||||
|
qs = qs.exclude(*exclude)
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1})
|
||||||
|
qs = qs.only(*only)
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||||
|
|
||||||
|
qs = MyDoc.objects.exclude(*exclude)
|
||||||
|
qs = qs.fields(**dict(((i, 1) for i in include)))
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1})
|
||||||
|
qs = qs.only(*only)
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||||
|
|
||||||
|
def test_slicing(self):
|
||||||
|
|
||||||
|
class MyDoc(Document):
|
||||||
|
a = ListField()
|
||||||
|
b = ListField()
|
||||||
|
c = ListField()
|
||||||
|
d = ListField()
|
||||||
|
e = ListField()
|
||||||
|
f = ListField()
|
||||||
|
|
||||||
|
include = ['a', 'b', 'c', 'd', 'e']
|
||||||
|
exclude = ['d', 'e']
|
||||||
|
only = ['b', 'c']
|
||||||
|
|
||||||
|
qs = MyDoc.objects.fields(**dict(((i, 1) for i in include)))
|
||||||
|
qs = qs.exclude(*exclude)
|
||||||
|
qs = qs.only(*only)
|
||||||
|
qs = qs.fields(slice__b=5)
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||||
|
{'b': {'$slice': 5}, 'c': 1})
|
||||||
|
|
||||||
|
qs = qs.fields(slice__c=[5, 1])
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||||
|
{'b': {'$slice': 5}, 'c': {'$slice': [5, 1]}})
|
||||||
|
|
||||||
|
qs = qs.exclude('c')
|
||||||
|
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||||
|
{'b': {'$slice': 5}})
|
||||||
|
|
||||||
|
def test_only(self):
|
||||||
|
"""Ensure that QuerySet.only only returns the requested fields.
|
||||||
|
"""
|
||||||
|
person = self.Person(name='test', age=25)
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
obj = self.Person.objects.only('name').get()
|
||||||
|
self.assertEqual(obj.name, person.name)
|
||||||
|
self.assertEqual(obj.age, None)
|
||||||
|
|
||||||
|
obj = self.Person.objects.only('age').get()
|
||||||
|
self.assertEqual(obj.name, None)
|
||||||
|
self.assertEqual(obj.age, person.age)
|
||||||
|
|
||||||
|
obj = self.Person.objects.only('name', 'age').get()
|
||||||
|
self.assertEqual(obj.name, person.name)
|
||||||
|
self.assertEqual(obj.age, person.age)
|
||||||
|
|
||||||
|
# Check polymorphism still works
|
||||||
|
class Employee(self.Person):
|
||||||
|
salary = IntField(db_field='wage')
|
||||||
|
|
||||||
|
employee = Employee(name='test employee', age=40, salary=30000)
|
||||||
|
employee.save()
|
||||||
|
|
||||||
|
obj = self.Person.objects(id=employee.id).only('age').get()
|
||||||
|
self.assertTrue(isinstance(obj, Employee))
|
||||||
|
|
||||||
|
# Check field names are looked up properly
|
||||||
|
obj = Employee.objects(id=employee.id).only('salary').get()
|
||||||
|
self.assertEqual(obj.salary, employee.salary)
|
||||||
|
self.assertEqual(obj.name, None)
|
||||||
|
|
||||||
|
def test_only_with_subfields(self):
|
||||||
|
class User(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
email = StringField()
|
||||||
|
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
title = StringField()
|
||||||
|
text = StringField()
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
content = StringField()
|
||||||
|
author = EmbeddedDocumentField(User)
|
||||||
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
post = BlogPost(content='Had a good coffee today...')
|
||||||
|
post.author = User(name='Test User')
|
||||||
|
post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')]
|
||||||
|
post.save()
|
||||||
|
|
||||||
|
obj = BlogPost.objects.only('author.name',).get()
|
||||||
|
self.assertEqual(obj.content, None)
|
||||||
|
self.assertEqual(obj.author.email, None)
|
||||||
|
self.assertEqual(obj.author.name, 'Test User')
|
||||||
|
self.assertEqual(obj.comments, [])
|
||||||
|
|
||||||
|
obj = BlogPost.objects.only('content', 'comments.title',).get()
|
||||||
|
self.assertEqual(obj.content, 'Had a good coffee today...')
|
||||||
|
self.assertEqual(obj.author, None)
|
||||||
|
self.assertEqual(obj.comments[0].title, 'I aggree')
|
||||||
|
self.assertEqual(obj.comments[1].title, 'Coffee')
|
||||||
|
self.assertEqual(obj.comments[0].text, None)
|
||||||
|
self.assertEqual(obj.comments[1].text, None)
|
||||||
|
|
||||||
|
obj = BlogPost.objects.only('comments',).get()
|
||||||
|
self.assertEqual(obj.content, None)
|
||||||
|
self.assertEqual(obj.author, None)
|
||||||
|
self.assertEqual(obj.comments[0].title, 'I aggree')
|
||||||
|
self.assertEqual(obj.comments[1].title, 'Coffee')
|
||||||
|
self.assertEqual(obj.comments[0].text, 'Great post!')
|
||||||
|
self.assertEqual(obj.comments[1].text, 'I hate coffee')
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_exclude(self):
|
||||||
|
class User(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
email = StringField()
|
||||||
|
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
title = StringField()
|
||||||
|
text = StringField()
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
content = StringField()
|
||||||
|
author = EmbeddedDocumentField(User)
|
||||||
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
post = BlogPost(content='Had a good coffee today...')
|
||||||
|
post.author = User(name='Test User')
|
||||||
|
post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')]
|
||||||
|
post.save()
|
||||||
|
|
||||||
|
obj = BlogPost.objects.exclude('author', 'comments.text').get()
|
||||||
|
self.assertEqual(obj.author, None)
|
||||||
|
self.assertEqual(obj.content, 'Had a good coffee today...')
|
||||||
|
self.assertEqual(obj.comments[0].title, 'I aggree')
|
||||||
|
self.assertEqual(obj.comments[0].text, None)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_exclude_only_combining(self):
|
||||||
|
class Attachment(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
content = StringField()
|
||||||
|
|
||||||
|
class Email(Document):
|
||||||
|
sender = StringField()
|
||||||
|
to = StringField()
|
||||||
|
subject = StringField()
|
||||||
|
body = StringField()
|
||||||
|
content_type = StringField()
|
||||||
|
attachments = ListField(EmbeddedDocumentField(Attachment))
|
||||||
|
|
||||||
|
Email.drop_collection()
|
||||||
|
email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain')
|
||||||
|
email.attachments = [
|
||||||
|
Attachment(name='file1.doc', content='ABC'),
|
||||||
|
Attachment(name='file2.doc', content='XYZ'),
|
||||||
|
]
|
||||||
|
email.save()
|
||||||
|
|
||||||
|
obj = Email.objects.exclude('content_type').exclude('body').get()
|
||||||
|
self.assertEqual(obj.sender, 'me')
|
||||||
|
self.assertEqual(obj.to, 'you')
|
||||||
|
self.assertEqual(obj.subject, 'From Russia with Love')
|
||||||
|
self.assertEqual(obj.body, None)
|
||||||
|
self.assertEqual(obj.content_type, None)
|
||||||
|
|
||||||
|
obj = Email.objects.only('sender', 'to').exclude('body', 'sender').get()
|
||||||
|
self.assertEqual(obj.sender, None)
|
||||||
|
self.assertEqual(obj.to, 'you')
|
||||||
|
self.assertEqual(obj.subject, None)
|
||||||
|
self.assertEqual(obj.body, None)
|
||||||
|
self.assertEqual(obj.content_type, None)
|
||||||
|
|
||||||
|
obj = Email.objects.exclude('attachments.content').exclude('body').only('to', 'attachments.name').get()
|
||||||
|
self.assertEqual(obj.attachments[0].name, 'file1.doc')
|
||||||
|
self.assertEqual(obj.attachments[0].content, None)
|
||||||
|
self.assertEqual(obj.sender, None)
|
||||||
|
self.assertEqual(obj.to, 'you')
|
||||||
|
self.assertEqual(obj.subject, None)
|
||||||
|
self.assertEqual(obj.body, None)
|
||||||
|
self.assertEqual(obj.content_type, None)
|
||||||
|
|
||||||
|
Email.drop_collection()
|
||||||
|
|
||||||
|
def test_all_fields(self):
|
||||||
|
|
||||||
|
class Email(Document):
|
||||||
|
sender = StringField()
|
||||||
|
to = StringField()
|
||||||
|
subject = StringField()
|
||||||
|
body = StringField()
|
||||||
|
content_type = StringField()
|
||||||
|
|
||||||
|
Email.drop_collection()
|
||||||
|
|
||||||
|
email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain')
|
||||||
|
email.save()
|
||||||
|
|
||||||
|
obj = Email.objects.exclude('content_type', 'body').only('to', 'body').all_fields().get()
|
||||||
|
self.assertEqual(obj.sender, 'me')
|
||||||
|
self.assertEqual(obj.to, 'you')
|
||||||
|
self.assertEqual(obj.subject, 'From Russia with Love')
|
||||||
|
self.assertEqual(obj.body, 'Hello!')
|
||||||
|
self.assertEqual(obj.content_type, 'text/plain')
|
||||||
|
|
||||||
|
Email.drop_collection()
|
||||||
|
|
||||||
|
def test_slicing_fields(self):
|
||||||
|
"""Ensure that query slicing an array works.
|
||||||
|
"""
|
||||||
|
class Numbers(Document):
|
||||||
|
n = ListField(IntField())
|
||||||
|
|
||||||
|
Numbers.drop_collection()
|
||||||
|
|
||||||
|
numbers = Numbers(n=[0,1,2,3,4,5,-5,-4,-3,-2,-1])
|
||||||
|
numbers.save()
|
||||||
|
|
||||||
|
# first three
|
||||||
|
numbers = Numbers.objects.fields(slice__n=3).get()
|
||||||
|
self.assertEqual(numbers.n, [0, 1, 2])
|
||||||
|
|
||||||
|
# last three
|
||||||
|
numbers = Numbers.objects.fields(slice__n=-3).get()
|
||||||
|
self.assertEqual(numbers.n, [-3, -2, -1])
|
||||||
|
|
||||||
|
# skip 2, limit 3
|
||||||
|
numbers = Numbers.objects.fields(slice__n=[2, 3]).get()
|
||||||
|
self.assertEqual(numbers.n, [2, 3, 4])
|
||||||
|
|
||||||
|
# skip to fifth from last, limit 4
|
||||||
|
numbers = Numbers.objects.fields(slice__n=[-5, 4]).get()
|
||||||
|
self.assertEqual(numbers.n, [-5, -4, -3, -2])
|
||||||
|
|
||||||
|
# skip to fifth from last, limit 10
|
||||||
|
numbers = Numbers.objects.fields(slice__n=[-5, 10]).get()
|
||||||
|
self.assertEqual(numbers.n, [-5, -4, -3, -2, -1])
|
||||||
|
|
||||||
|
# skip to fifth from last, limit 10 dict method
|
||||||
|
numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get()
|
||||||
|
self.assertEqual(numbers.n, [-5, -4, -3, -2, -1])
|
||||||
|
|
||||||
|
def test_slicing_nested_fields(self):
|
||||||
|
"""Ensure that query slicing an embedded array works.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class EmbeddedNumber(EmbeddedDocument):
|
||||||
|
n = ListField(IntField())
|
||||||
|
|
||||||
|
class Numbers(Document):
|
||||||
|
embedded = EmbeddedDocumentField(EmbeddedNumber)
|
||||||
|
|
||||||
|
Numbers.drop_collection()
|
||||||
|
|
||||||
|
numbers = Numbers()
|
||||||
|
numbers.embedded = EmbeddedNumber(n=[0,1,2,3,4,5,-5,-4,-3,-2,-1])
|
||||||
|
numbers.save()
|
||||||
|
|
||||||
|
# first three
|
||||||
|
numbers = Numbers.objects.fields(slice__embedded__n=3).get()
|
||||||
|
self.assertEqual(numbers.embedded.n, [0, 1, 2])
|
||||||
|
|
||||||
|
# last three
|
||||||
|
numbers = Numbers.objects.fields(slice__embedded__n=-3).get()
|
||||||
|
self.assertEqual(numbers.embedded.n, [-3, -2, -1])
|
||||||
|
|
||||||
|
# skip 2, limit 3
|
||||||
|
numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get()
|
||||||
|
self.assertEqual(numbers.embedded.n, [2, 3, 4])
|
||||||
|
|
||||||
|
# skip to fifth from last, limit 4
|
||||||
|
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get()
|
||||||
|
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2])
|
||||||
|
|
||||||
|
# skip to fifth from last, limit 10
|
||||||
|
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get()
|
||||||
|
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1])
|
||||||
|
|
||||||
|
# skip to fifth from last, limit 10 dict method
|
||||||
|
numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get()
|
||||||
|
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1])
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
File diff suppressed because it is too large
Load Diff
148
tests/queryset/transform.py
Normal file
148
tests/queryset/transform.py
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
from __future__ import with_statement
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.queryset import Q
|
||||||
|
from mongoengine.queryset import transform
|
||||||
|
|
||||||
|
__all__ = ("TransformTest",)
|
||||||
|
|
||||||
|
|
||||||
|
class TransformTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
|
def test_transform_query(self):
|
||||||
|
"""Ensure that the _transform_query function operates correctly.
|
||||||
|
"""
|
||||||
|
self.assertEqual(transform.query(name='test', age=30),
|
||||||
|
{'name': 'test', 'age': 30})
|
||||||
|
self.assertEqual(transform.query(age__lt=30),
|
||||||
|
{'age': {'$lt': 30}})
|
||||||
|
self.assertEqual(transform.query(age__gt=20, age__lt=50),
|
||||||
|
{'age': {'$gt': 20, '$lt': 50}})
|
||||||
|
self.assertEqual(transform.query(age=20, age__gt=50),
|
||||||
|
{'$and': [{'age': {'$gt': 50}}, {'age': 20}]})
|
||||||
|
self.assertEqual(transform.query(friend__age__gte=30),
|
||||||
|
{'friend.age': {'$gte': 30}})
|
||||||
|
self.assertEqual(transform.query(name__exists=True),
|
||||||
|
{'name': {'$exists': True}})
|
||||||
|
|
||||||
|
def test_query_field_name(self):
|
||||||
|
"""Ensure that the correct field name is used when querying.
|
||||||
|
"""
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
content = StringField(db_field='commentContent')
|
||||||
|
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField(db_field='postTitle')
|
||||||
|
comments = ListField(EmbeddedDocumentField(Comment),
|
||||||
|
db_field='postComments')
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
data = {'title': 'Post 1', 'comments': [Comment(content='test')]}
|
||||||
|
post = BlogPost(**data)
|
||||||
|
post.save()
|
||||||
|
|
||||||
|
self.assertTrue('postTitle' in
|
||||||
|
BlogPost.objects(title=data['title'])._query)
|
||||||
|
self.assertFalse('title' in
|
||||||
|
BlogPost.objects(title=data['title'])._query)
|
||||||
|
self.assertEqual(len(BlogPost.objects(title=data['title'])), 1)
|
||||||
|
|
||||||
|
self.assertTrue('_id' in BlogPost.objects(pk=post.id)._query)
|
||||||
|
self.assertEqual(len(BlogPost.objects(pk=post.id)), 1)
|
||||||
|
|
||||||
|
self.assertTrue('postComments.commentContent' in
|
||||||
|
BlogPost.objects(comments__content='test')._query)
|
||||||
|
self.assertEqual(len(BlogPost.objects(comments__content='test')), 1)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_query_pk_field_name(self):
|
||||||
|
"""Ensure that the correct "primary key" field name is used when
|
||||||
|
querying
|
||||||
|
"""
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField(primary_key=True, db_field='postTitle')
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
data = {'title': 'Post 1'}
|
||||||
|
post = BlogPost(**data)
|
||||||
|
post.save()
|
||||||
|
|
||||||
|
self.assertTrue('_id' in BlogPost.objects(pk=data['title'])._query)
|
||||||
|
self.assertTrue('_id' in BlogPost.objects(title=data['title'])._query)
|
||||||
|
self.assertEqual(len(BlogPost.objects(pk=data['title'])), 1)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_chaining(self):
|
||||||
|
class A(Document):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class B(Document):
|
||||||
|
a = ReferenceField(A)
|
||||||
|
|
||||||
|
A.drop_collection()
|
||||||
|
B.drop_collection()
|
||||||
|
|
||||||
|
a1 = A().save()
|
||||||
|
a2 = A().save()
|
||||||
|
|
||||||
|
B(a=a1).save()
|
||||||
|
|
||||||
|
# Works
|
||||||
|
q1 = B.objects.filter(a__in=[a1, a2], a=a1)._query
|
||||||
|
|
||||||
|
# Doesn't work
|
||||||
|
q2 = B.objects.filter(a__in=[a1, a2])
|
||||||
|
q2 = q2.filter(a=a1)._query
|
||||||
|
|
||||||
|
self.assertEqual(q1, q2)
|
||||||
|
|
||||||
|
def test_raw_query_and_Q_objects(self):
|
||||||
|
"""
|
||||||
|
Test raw plays nicely
|
||||||
|
"""
|
||||||
|
class Foo(Document):
|
||||||
|
name = StringField()
|
||||||
|
a = StringField()
|
||||||
|
b = StringField()
|
||||||
|
c = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': False
|
||||||
|
}
|
||||||
|
|
||||||
|
query = Foo.objects(__raw__={'$nor': [{'name': 'bar'}]})._query
|
||||||
|
self.assertEqual(query, {'$nor': [{'name': 'bar'}]})
|
||||||
|
|
||||||
|
q1 = {'$or': [{'a': 1}, {'b': 1}]}
|
||||||
|
query = Foo.objects(Q(__raw__=q1) & Q(c=1))._query
|
||||||
|
self.assertEqual(query, {'$or': [{'a': 1}, {'b': 1}], 'c': 1})
|
||||||
|
|
||||||
|
def test_raw_and_merging(self):
|
||||||
|
class Doc(Document):
|
||||||
|
meta = {'allow_inheritance': False}
|
||||||
|
|
||||||
|
raw_query = Doc.objects(__raw__={'deleted': False,
|
||||||
|
'scraped': 'yes',
|
||||||
|
'$nor': [{'views.extracted': 'no'},
|
||||||
|
{'attachments.views.extracted':'no'}]
|
||||||
|
})._query
|
||||||
|
|
||||||
|
expected = {'deleted': False, 'scraped': 'yes',
|
||||||
|
'$nor': [{'views.extracted': 'no'},
|
||||||
|
{'attachments.views.extracted': 'no'}]}
|
||||||
|
self.assertEqual(expected, raw_query)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
335
tests/queryset/visitor.py
Normal file
335
tests/queryset/visitor.py
Normal file
@@ -0,0 +1,335 @@
|
|||||||
|
from __future__ import with_statement
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from bson import ObjectId
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.queryset import Q
|
||||||
|
from mongoengine.errors import InvalidQueryError
|
||||||
|
|
||||||
|
__all__ = ("QTest",)
|
||||||
|
|
||||||
|
|
||||||
|
class QTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
self.Person = Person
|
||||||
|
|
||||||
|
def test_empty_q(self):
|
||||||
|
"""Ensure that empty Q objects won't hurt.
|
||||||
|
"""
|
||||||
|
q1 = Q()
|
||||||
|
q2 = Q(age__gte=18)
|
||||||
|
q3 = Q()
|
||||||
|
q4 = Q(name='test')
|
||||||
|
q5 = Q()
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
age = IntField()
|
||||||
|
|
||||||
|
query = {'$or': [{'age': {'$gte': 18}}, {'name': 'test'}]}
|
||||||
|
self.assertEqual((q1 | q2 | q3 | q4 | q5).to_query(Person), query)
|
||||||
|
|
||||||
|
query = {'age': {'$gte': 18}, 'name': 'test'}
|
||||||
|
self.assertEqual((q1 & q2 & q3 & q4 & q5).to_query(Person), query)
|
||||||
|
|
||||||
|
def test_q_with_dbref(self):
|
||||||
|
"""Ensure Q objects handle DBRefs correctly"""
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Post(Document):
|
||||||
|
created_user = ReferenceField(User)
|
||||||
|
|
||||||
|
user = User.objects.create()
|
||||||
|
Post.objects.create(created_user=user)
|
||||||
|
|
||||||
|
self.assertEqual(Post.objects.filter(created_user=user).count(), 1)
|
||||||
|
self.assertEqual(Post.objects.filter(Q(created_user=user)).count(), 1)
|
||||||
|
|
||||||
|
def test_and_combination(self):
|
||||||
|
"""Ensure that Q-objects correctly AND together.
|
||||||
|
"""
|
||||||
|
class TestDoc(Document):
|
||||||
|
x = IntField()
|
||||||
|
y = StringField()
|
||||||
|
|
||||||
|
# Check than an error is raised when conflicting queries are anded
|
||||||
|
def invalid_combination():
|
||||||
|
query = Q(x__lt=7) & Q(x__lt=3)
|
||||||
|
query.to_query(TestDoc)
|
||||||
|
self.assertRaises(InvalidQueryError, invalid_combination)
|
||||||
|
|
||||||
|
# Check normal cases work without an error
|
||||||
|
query = Q(x__lt=7) & Q(x__gt=3)
|
||||||
|
|
||||||
|
q1 = Q(x__lt=7)
|
||||||
|
q2 = Q(x__gt=3)
|
||||||
|
query = (q1 & q2).to_query(TestDoc)
|
||||||
|
self.assertEqual(query, {'x': {'$lt': 7, '$gt': 3}})
|
||||||
|
|
||||||
|
# More complex nested example
|
||||||
|
query = Q(x__lt=100) & Q(y__ne='NotMyString')
|
||||||
|
query &= Q(y__in=['a', 'b', 'c']) & Q(x__gt=-100)
|
||||||
|
mongo_query = {
|
||||||
|
'x': {'$lt': 100, '$gt': -100},
|
||||||
|
'y': {'$ne': 'NotMyString', '$in': ['a', 'b', 'c']},
|
||||||
|
}
|
||||||
|
self.assertEqual(query.to_query(TestDoc), mongo_query)
|
||||||
|
|
||||||
|
def test_or_combination(self):
|
||||||
|
"""Ensure that Q-objects correctly OR together.
|
||||||
|
"""
|
||||||
|
class TestDoc(Document):
|
||||||
|
x = IntField()
|
||||||
|
|
||||||
|
q1 = Q(x__lt=3)
|
||||||
|
q2 = Q(x__gt=7)
|
||||||
|
query = (q1 | q2).to_query(TestDoc)
|
||||||
|
self.assertEqual(query, {
|
||||||
|
'$or': [
|
||||||
|
{'x': {'$lt': 3}},
|
||||||
|
{'x': {'$gt': 7}},
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
def test_and_or_combination(self):
|
||||||
|
"""Ensure that Q-objects handle ANDing ORed components.
|
||||||
|
"""
|
||||||
|
class TestDoc(Document):
|
||||||
|
x = IntField()
|
||||||
|
y = BooleanField()
|
||||||
|
|
||||||
|
TestDoc.drop_collection()
|
||||||
|
|
||||||
|
query = (Q(x__gt=0) | Q(x__exists=False))
|
||||||
|
query &= Q(x__lt=100)
|
||||||
|
self.assertEqual(query.to_query(TestDoc), {'$and': [
|
||||||
|
{'$or': [{'x': {'$gt': 0}},
|
||||||
|
{'x': {'$exists': False}}]},
|
||||||
|
{'x': {'$lt': 100}}]
|
||||||
|
})
|
||||||
|
|
||||||
|
q1 = (Q(x__gt=0) | Q(x__exists=False))
|
||||||
|
q2 = (Q(x__lt=100) | Q(y=True))
|
||||||
|
query = (q1 & q2).to_query(TestDoc)
|
||||||
|
|
||||||
|
TestDoc(x=101).save()
|
||||||
|
TestDoc(x=10).save()
|
||||||
|
TestDoc(y=True).save()
|
||||||
|
|
||||||
|
self.assertEqual(query,
|
||||||
|
{'$and': [
|
||||||
|
{'$or': [{'x': {'$gt': 0}}, {'x': {'$exists': False}}]},
|
||||||
|
{'$or': [{'x': {'$lt': 100}}, {'y': True}]}
|
||||||
|
]})
|
||||||
|
|
||||||
|
self.assertEqual(2, TestDoc.objects(q1 & q2).count())
|
||||||
|
|
||||||
|
def test_or_and_or_combination(self):
|
||||||
|
"""Ensure that Q-objects handle ORing ANDed ORed components. :)
|
||||||
|
"""
|
||||||
|
class TestDoc(Document):
|
||||||
|
x = IntField()
|
||||||
|
y = BooleanField()
|
||||||
|
|
||||||
|
TestDoc.drop_collection()
|
||||||
|
TestDoc(x=-1, y=True).save()
|
||||||
|
TestDoc(x=101, y=True).save()
|
||||||
|
TestDoc(x=99, y=False).save()
|
||||||
|
TestDoc(x=101, y=False).save()
|
||||||
|
|
||||||
|
q1 = (Q(x__gt=0) & (Q(y=True) | Q(y__exists=False)))
|
||||||
|
q2 = (Q(x__lt=100) & (Q(y=False) | Q(y__exists=False)))
|
||||||
|
query = (q1 | q2).to_query(TestDoc)
|
||||||
|
|
||||||
|
self.assertEqual(query,
|
||||||
|
{'$or': [
|
||||||
|
{'$and': [{'x': {'$gt': 0}},
|
||||||
|
{'$or': [{'y': True}, {'y': {'$exists': False}}]}]},
|
||||||
|
{'$and': [{'x': {'$lt': 100}},
|
||||||
|
{'$or': [{'y': False}, {'y': {'$exists': False}}]}]}
|
||||||
|
]}
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(2, TestDoc.objects(q1 | q2).count())
|
||||||
|
|
||||||
|
def test_multiple_occurence_in_field(self):
|
||||||
|
class Test(Document):
|
||||||
|
name = StringField(max_length=40)
|
||||||
|
title = StringField(max_length=40)
|
||||||
|
|
||||||
|
q1 = Q(name__contains='te') | Q(title__contains='te')
|
||||||
|
q2 = Q(name__contains='12') | Q(title__contains='12')
|
||||||
|
|
||||||
|
q3 = q1 & q2
|
||||||
|
|
||||||
|
query = q3.to_query(Test)
|
||||||
|
self.assertEqual(query["$and"][0], q1.to_query(Test))
|
||||||
|
self.assertEqual(query["$and"][1], q2.to_query(Test))
|
||||||
|
|
||||||
|
def test_q_clone(self):
|
||||||
|
|
||||||
|
class TestDoc(Document):
|
||||||
|
x = IntField()
|
||||||
|
|
||||||
|
TestDoc.drop_collection()
|
||||||
|
for i in xrange(1, 101):
|
||||||
|
t = TestDoc(x=i)
|
||||||
|
t.save()
|
||||||
|
|
||||||
|
# Check normal cases work without an error
|
||||||
|
test = TestDoc.objects(Q(x__lt=7) & Q(x__gt=3))
|
||||||
|
|
||||||
|
self.assertEqual(test.count(), 3)
|
||||||
|
|
||||||
|
test2 = test.clone()
|
||||||
|
self.assertEqual(test2.count(), 3)
|
||||||
|
self.assertFalse(test2 == test)
|
||||||
|
|
||||||
|
test3 = test2.filter(x=6)
|
||||||
|
self.assertEqual(test3.count(), 1)
|
||||||
|
self.assertEqual(test.count(), 3)
|
||||||
|
|
||||||
|
def test_q(self):
|
||||||
|
"""Ensure that Q objects may be used to query for documents.
|
||||||
|
"""
|
||||||
|
class BlogPost(Document):
|
||||||
|
title = StringField()
|
||||||
|
publish_date = DateTimeField()
|
||||||
|
published = BooleanField()
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
post1 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 8), published=False)
|
||||||
|
post1.save()
|
||||||
|
|
||||||
|
post2 = BlogPost(title='Test 2', publish_date=datetime(2010, 1, 15), published=True)
|
||||||
|
post2.save()
|
||||||
|
|
||||||
|
post3 = BlogPost(title='Test 3', published=True)
|
||||||
|
post3.save()
|
||||||
|
|
||||||
|
post4 = BlogPost(title='Test 4', publish_date=datetime(2010, 1, 8))
|
||||||
|
post4.save()
|
||||||
|
|
||||||
|
post5 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 15))
|
||||||
|
post5.save()
|
||||||
|
|
||||||
|
post6 = BlogPost(title='Test 1', published=False)
|
||||||
|
post6.save()
|
||||||
|
|
||||||
|
# Check ObjectId lookup works
|
||||||
|
obj = BlogPost.objects(id=post1.id).first()
|
||||||
|
self.assertEqual(obj, post1)
|
||||||
|
|
||||||
|
# Check Q object combination with one does not exist
|
||||||
|
q = BlogPost.objects(Q(title='Test 5') | Q(published=True))
|
||||||
|
posts = [post.id for post in q]
|
||||||
|
|
||||||
|
published_posts = (post2, post3)
|
||||||
|
self.assertTrue(all(obj.id in posts for obj in published_posts))
|
||||||
|
|
||||||
|
q = BlogPost.objects(Q(title='Test 1') | Q(published=True))
|
||||||
|
posts = [post.id for post in q]
|
||||||
|
published_posts = (post1, post2, post3, post5, post6)
|
||||||
|
self.assertTrue(all(obj.id in posts for obj in published_posts))
|
||||||
|
|
||||||
|
# Check Q object combination
|
||||||
|
date = datetime(2010, 1, 10)
|
||||||
|
q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True))
|
||||||
|
posts = [post.id for post in q]
|
||||||
|
|
||||||
|
published_posts = (post1, post2, post3, post4)
|
||||||
|
self.assertTrue(all(obj.id in posts for obj in published_posts))
|
||||||
|
|
||||||
|
self.assertFalse(any(obj.id in posts for obj in [post5, post6]))
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
# Check the 'in' operator
|
||||||
|
self.Person(name='user1', age=20).save()
|
||||||
|
self.Person(name='user2', age=20).save()
|
||||||
|
self.Person(name='user3', age=30).save()
|
||||||
|
self.Person(name='user4', age=40).save()
|
||||||
|
|
||||||
|
self.assertEqual(len(self.Person.objects(Q(age__in=[20]))), 2)
|
||||||
|
self.assertEqual(len(self.Person.objects(Q(age__in=[20, 30]))), 3)
|
||||||
|
|
||||||
|
# Test invalid query objs
|
||||||
|
def wrong_query_objs():
|
||||||
|
self.Person.objects('user1')
|
||||||
|
def wrong_query_objs_filter():
|
||||||
|
self.Person.objects('user1')
|
||||||
|
self.assertRaises(InvalidQueryError, wrong_query_objs)
|
||||||
|
self.assertRaises(InvalidQueryError, wrong_query_objs_filter)
|
||||||
|
|
||||||
|
def test_q_regex(self):
|
||||||
|
"""Ensure that Q objects can be queried using regexes.
|
||||||
|
"""
|
||||||
|
person = self.Person(name='Guido van Rossum')
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
import re
|
||||||
|
obj = self.Person.objects(Q(name=re.compile('^Gui'))).first()
|
||||||
|
self.assertEqual(obj, person)
|
||||||
|
obj = self.Person.objects(Q(name=re.compile('^gui'))).first()
|
||||||
|
self.assertEqual(obj, None)
|
||||||
|
|
||||||
|
obj = self.Person.objects(Q(name=re.compile('^gui', re.I))).first()
|
||||||
|
self.assertEqual(obj, person)
|
||||||
|
|
||||||
|
obj = self.Person.objects(Q(name__not=re.compile('^bob'))).first()
|
||||||
|
self.assertEqual(obj, person)
|
||||||
|
|
||||||
|
obj = self.Person.objects(Q(name__not=re.compile('^Gui'))).first()
|
||||||
|
self.assertEqual(obj, None)
|
||||||
|
|
||||||
|
def test_q_lists(self):
|
||||||
|
"""Ensure that Q objects query ListFields correctly.
|
||||||
|
"""
|
||||||
|
class BlogPost(Document):
|
||||||
|
tags = ListField(StringField())
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
BlogPost(tags=['python', 'mongo']).save()
|
||||||
|
BlogPost(tags=['python']).save()
|
||||||
|
|
||||||
|
self.assertEqual(len(BlogPost.objects(Q(tags='mongo'))), 1)
|
||||||
|
self.assertEqual(len(BlogPost.objects(Q(tags='python'))), 2)
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_q_merge_queries_edge_case(self):
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
email = EmailField(required=False)
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
pk = ObjectId()
|
||||||
|
User(email='example@example.com', pk=pk).save()
|
||||||
|
|
||||||
|
self.assertEqual(1, User.objects.filter(
|
||||||
|
Q(email='example@example.com') |
|
||||||
|
Q(name='John Doe')
|
||||||
|
).limit(2).filter(pk=pk).count())
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
@@ -1,10 +1,16 @@
|
|||||||
|
from __future__ import with_statement
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
import unittest
|
import unittest
|
||||||
import pymongo
|
import datetime
|
||||||
|
|
||||||
import mongoengine.connection
|
import pymongo
|
||||||
|
from bson.tz_util import utc
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
|
import mongoengine.connection
|
||||||
from mongoengine.connection import get_db, get_connection, ConnectionError
|
from mongoengine.connection import get_db, get_connection, ConnectionError
|
||||||
|
from mongoengine.context_managers import switch_db
|
||||||
|
|
||||||
|
|
||||||
class ConnectionTest(unittest.TestCase):
|
class ConnectionTest(unittest.TestCase):
|
||||||
@@ -70,11 +76,26 @@ class ConnectionTest(unittest.TestCase):
|
|||||||
"""
|
"""
|
||||||
connect('mongoenginetest', alias='t1', tz_aware=True)
|
connect('mongoenginetest', alias='t1', tz_aware=True)
|
||||||
conn = get_connection('t1')
|
conn = get_connection('t1')
|
||||||
|
|
||||||
self.assertTrue(conn.tz_aware)
|
self.assertTrue(conn.tz_aware)
|
||||||
|
|
||||||
connect('mongoenginetest2', alias='t2')
|
connect('mongoenginetest2', alias='t2')
|
||||||
conn = get_connection('t2')
|
conn = get_connection('t2')
|
||||||
self.assertFalse(conn.tz_aware)
|
self.assertFalse(conn.tz_aware)
|
||||||
|
|
||||||
|
def test_datetime(self):
|
||||||
|
connect('mongoenginetest', tz_aware=True)
|
||||||
|
d = datetime.datetime(2010, 5, 5, tzinfo=utc)
|
||||||
|
|
||||||
|
class DateDoc(Document):
|
||||||
|
the_date = DateTimeField(required=True)
|
||||||
|
|
||||||
|
DateDoc.drop_collection()
|
||||||
|
DateDoc(the_date=d).save()
|
||||||
|
|
||||||
|
date_doc = DateDoc.objects.first()
|
||||||
|
self.assertEqual(d, date_doc.the_date)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
156
tests/test_context_managers.py
Normal file
156
tests/test_context_managers.py
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
from __future__ import with_statement
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
from mongoengine.context_managers import (switch_db, switch_collection,
|
||||||
|
no_dereference, query_counter)
|
||||||
|
|
||||||
|
|
||||||
|
class ContextManagersTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_switch_db_context_manager(self):
|
||||||
|
connect('mongoenginetest')
|
||||||
|
register_connection('testdb-1', 'mongoenginetest2')
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
Group(name="hello - default").save()
|
||||||
|
self.assertEqual(1, Group.objects.count())
|
||||||
|
|
||||||
|
with switch_db(Group, 'testdb-1') as Group:
|
||||||
|
|
||||||
|
self.assertEqual(0, Group.objects.count())
|
||||||
|
|
||||||
|
Group(name="hello").save()
|
||||||
|
|
||||||
|
self.assertEqual(1, Group.objects.count())
|
||||||
|
|
||||||
|
Group.drop_collection()
|
||||||
|
self.assertEqual(0, Group.objects.count())
|
||||||
|
|
||||||
|
self.assertEqual(1, Group.objects.count())
|
||||||
|
|
||||||
|
def test_switch_collection_context_manager(self):
|
||||||
|
connect('mongoenginetest')
|
||||||
|
register_connection('testdb-1', 'mongoenginetest2')
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Group.drop_collection()
|
||||||
|
with switch_collection(Group, 'group1') as Group:
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
Group(name="hello - group").save()
|
||||||
|
self.assertEqual(1, Group.objects.count())
|
||||||
|
|
||||||
|
with switch_collection(Group, 'group1') as Group:
|
||||||
|
|
||||||
|
self.assertEqual(0, Group.objects.count())
|
||||||
|
|
||||||
|
Group(name="hello - group1").save()
|
||||||
|
|
||||||
|
self.assertEqual(1, Group.objects.count())
|
||||||
|
|
||||||
|
Group.drop_collection()
|
||||||
|
self.assertEqual(0, Group.objects.count())
|
||||||
|
|
||||||
|
self.assertEqual(1, Group.objects.count())
|
||||||
|
|
||||||
|
def test_no_dereference_context_manager_object_id(self):
|
||||||
|
"""Ensure that DBRef items in ListFields aren't dereferenced.
|
||||||
|
"""
|
||||||
|
connect('mongoenginetest')
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
ref = ReferenceField(User, dbref=False)
|
||||||
|
generic = GenericReferenceField()
|
||||||
|
members = ListField(ReferenceField(User, dbref=False))
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
for i in xrange(1, 51):
|
||||||
|
User(name='user %s' % i).save()
|
||||||
|
|
||||||
|
user = User.objects.first()
|
||||||
|
Group(ref=user, members=User.objects, generic=user).save()
|
||||||
|
|
||||||
|
with no_dereference(Group) as NoDeRefGroup:
|
||||||
|
self.assertTrue(Group._fields['members']._auto_dereference)
|
||||||
|
self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference)
|
||||||
|
|
||||||
|
with no_dereference(Group) as Group:
|
||||||
|
group = Group.objects.first()
|
||||||
|
self.assertTrue(all([not isinstance(m, User)
|
||||||
|
for m in group.members]))
|
||||||
|
self.assertFalse(isinstance(group.ref, User))
|
||||||
|
self.assertFalse(isinstance(group.generic, User))
|
||||||
|
|
||||||
|
self.assertTrue(all([isinstance(m, User)
|
||||||
|
for m in group.members]))
|
||||||
|
self.assertTrue(isinstance(group.ref, User))
|
||||||
|
self.assertTrue(isinstance(group.generic, User))
|
||||||
|
|
||||||
|
def test_no_dereference_context_manager_dbref(self):
|
||||||
|
"""Ensure that DBRef items in ListFields aren't dereferenced.
|
||||||
|
"""
|
||||||
|
connect('mongoenginetest')
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
ref = ReferenceField(User, dbref=True)
|
||||||
|
generic = GenericReferenceField()
|
||||||
|
members = ListField(ReferenceField(User, dbref=True))
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
for i in xrange(1, 51):
|
||||||
|
User(name='user %s' % i).save()
|
||||||
|
|
||||||
|
user = User.objects.first()
|
||||||
|
Group(ref=user, members=User.objects, generic=user).save()
|
||||||
|
|
||||||
|
with no_dereference(Group) as NoDeRefGroup:
|
||||||
|
self.assertTrue(Group._fields['members']._auto_dereference)
|
||||||
|
self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference)
|
||||||
|
|
||||||
|
with no_dereference(Group) as Group:
|
||||||
|
group = Group.objects.first()
|
||||||
|
self.assertTrue(all([not isinstance(m, User)
|
||||||
|
for m in group.members]))
|
||||||
|
self.assertFalse(isinstance(group.ref, User))
|
||||||
|
self.assertFalse(isinstance(group.generic, User))
|
||||||
|
|
||||||
|
self.assertTrue(all([isinstance(m, User)
|
||||||
|
for m in group.members]))
|
||||||
|
self.assertTrue(isinstance(group.ref, User))
|
||||||
|
self.assertTrue(isinstance(group.generic, User))
|
||||||
|
|
||||||
|
def test_query_counter(self):
|
||||||
|
connect('mongoenginetest')
|
||||||
|
db = get_db()
|
||||||
|
db.test.find({})
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(0, q)
|
||||||
|
|
||||||
|
for i in xrange(1, 51):
|
||||||
|
db.test.find({}).count()
|
||||||
|
|
||||||
|
self.assertEqual(50, q)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
@@ -1,8 +1,14 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import with_statement
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
|
from bson import DBRef, ObjectId
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.connection import get_db
|
from mongoengine.connection import get_db
|
||||||
from mongoengine.tests import query_counter
|
from mongoengine.context_managers import query_counter
|
||||||
|
|
||||||
|
|
||||||
class FieldTest(unittest.TestCase):
|
class FieldTest(unittest.TestCase):
|
||||||
@@ -39,6 +45,12 @@ class FieldTest(unittest.TestCase):
|
|||||||
group_obj = Group.objects.first()
|
group_obj = Group.objects.first()
|
||||||
self.assertEqual(q, 1)
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
len(group_obj._data['members'])
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
len(group_obj.members)
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
[m for m in group_obj.members]
|
||||||
self.assertEqual(q, 2)
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
@@ -63,6 +75,157 @@ class FieldTest(unittest.TestCase):
|
|||||||
User.drop_collection()
|
User.drop_collection()
|
||||||
Group.drop_collection()
|
Group.drop_collection()
|
||||||
|
|
||||||
|
def test_list_item_dereference_dref_false(self):
|
||||||
|
"""Ensure that DBRef items in ListFields are dereferenced.
|
||||||
|
"""
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
members = ListField(ReferenceField(User, dbref=False))
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
for i in xrange(1, 51):
|
||||||
|
user = User(name='user %s' % i)
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
group = Group(members=User.objects)
|
||||||
|
group.save()
|
||||||
|
group.reload() # Confirm reload works
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
# Document select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first().select_related()
|
||||||
|
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
# Queryset select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
group_objs = Group.objects.select_related()
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
for group_obj in group_objs:
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
def test_list_item_dereference_dref_false_stores_as_type(self):
|
||||||
|
"""Ensure that DBRef items are stored as their type
|
||||||
|
"""
|
||||||
|
class User(Document):
|
||||||
|
my_id = IntField(primary_key=True)
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
members = ListField(ReferenceField(User, dbref=False))
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
user = User(my_id=1, name='user 1').save()
|
||||||
|
|
||||||
|
Group(members=User.objects).save()
|
||||||
|
group = Group.objects.first()
|
||||||
|
|
||||||
|
self.assertEqual(Group._get_collection().find_one()['members'], [1])
|
||||||
|
self.assertEqual(group.members, [user])
|
||||||
|
|
||||||
|
def test_handle_old_style_references(self):
|
||||||
|
"""Ensure that DBRef items in ListFields are dereferenced.
|
||||||
|
"""
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
members = ListField(ReferenceField(User, dbref=True))
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
for i in xrange(1, 26):
|
||||||
|
user = User(name='user %s' % i)
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
group = Group(members=User.objects)
|
||||||
|
group.save()
|
||||||
|
|
||||||
|
group = Group._get_collection().find_one()
|
||||||
|
|
||||||
|
# Update the model to change the reference
|
||||||
|
class Group(Document):
|
||||||
|
members = ListField(ReferenceField(User, dbref=False))
|
||||||
|
|
||||||
|
group = Group.objects.first()
|
||||||
|
group.members.append(User(name="String!").save())
|
||||||
|
group.save()
|
||||||
|
|
||||||
|
group = Group.objects.first()
|
||||||
|
self.assertEqual(group.members[0].name, 'user 1')
|
||||||
|
self.assertEqual(group.members[-1].name, 'String!')
|
||||||
|
|
||||||
|
def test_migrate_references(self):
|
||||||
|
"""Example of migrating ReferenceField storage
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Create some sample data
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
author = ReferenceField(User, dbref=True)
|
||||||
|
members = ListField(ReferenceField(User, dbref=True))
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
user = User(name="Ross").save()
|
||||||
|
group = Group(author=user, members=[user]).save()
|
||||||
|
|
||||||
|
raw_data = Group._get_collection().find_one()
|
||||||
|
self.assertTrue(isinstance(raw_data['author'], DBRef))
|
||||||
|
self.assertTrue(isinstance(raw_data['members'][0], DBRef))
|
||||||
|
group = Group.objects.first()
|
||||||
|
|
||||||
|
self.assertEqual(group.author, user)
|
||||||
|
self.assertEqual(group.members, [user])
|
||||||
|
|
||||||
|
# Migrate the model definition
|
||||||
|
class Group(Document):
|
||||||
|
author = ReferenceField(User, dbref=False)
|
||||||
|
members = ListField(ReferenceField(User, dbref=False))
|
||||||
|
|
||||||
|
# Migrate the data
|
||||||
|
for g in Group.objects():
|
||||||
|
# Explicitly mark as changed so resets
|
||||||
|
g._mark_as_changed('author')
|
||||||
|
g._mark_as_changed('members')
|
||||||
|
g.save()
|
||||||
|
|
||||||
|
group = Group.objects.first()
|
||||||
|
self.assertEqual(group.author, user)
|
||||||
|
self.assertEqual(group.members, [user])
|
||||||
|
|
||||||
|
raw_data = Group._get_collection().find_one()
|
||||||
|
self.assertTrue(isinstance(raw_data['author'], ObjectId))
|
||||||
|
self.assertTrue(isinstance(raw_data['members'][0], ObjectId))
|
||||||
|
|
||||||
def test_recursive_reference(self):
|
def test_recursive_reference(self):
|
||||||
"""Ensure that ReferenceFields can reference their own documents.
|
"""Ensure that ReferenceFields can reference their own documents.
|
||||||
"""
|
"""
|
||||||
@@ -109,10 +272,10 @@ class FieldTest(unittest.TestCase):
|
|||||||
peter = Employee.objects.with_id(peter.id).select_related()
|
peter = Employee.objects.with_id(peter.id).select_related()
|
||||||
self.assertEqual(q, 2)
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
self.assertEquals(peter.boss, bill)
|
self.assertEqual(peter.boss, bill)
|
||||||
self.assertEqual(q, 2)
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
self.assertEquals(peter.friends, friends)
|
self.assertEqual(peter.friends, friends)
|
||||||
self.assertEqual(q, 2)
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
# Queryset select_related
|
# Queryset select_related
|
||||||
@@ -123,10 +286,10 @@ class FieldTest(unittest.TestCase):
|
|||||||
self.assertEqual(q, 2)
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
for employee in employees:
|
for employee in employees:
|
||||||
self.assertEquals(employee.boss, bill)
|
self.assertEqual(employee.boss, bill)
|
||||||
self.assertEqual(q, 2)
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
self.assertEquals(employee.friends, friends)
|
self.assertEqual(employee.friends, friends)
|
||||||
self.assertEqual(q, 2)
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
def test_circular_reference(self):
|
def test_circular_reference(self):
|
||||||
@@ -160,7 +323,7 @@ class FieldTest(unittest.TestCase):
|
|||||||
daughter.relations.append(self_rel)
|
daughter.relations.append(self_rel)
|
||||||
daughter.save()
|
daughter.save()
|
||||||
|
|
||||||
self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
|
self.assertEqual("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
|
||||||
|
|
||||||
def test_circular_reference_on_self(self):
|
def test_circular_reference_on_self(self):
|
||||||
"""Ensure you can handle circular references
|
"""Ensure you can handle circular references
|
||||||
@@ -186,7 +349,7 @@ class FieldTest(unittest.TestCase):
|
|||||||
daughter.relations.append(daughter)
|
daughter.relations.append(daughter)
|
||||||
daughter.save()
|
daughter.save()
|
||||||
|
|
||||||
self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
|
self.assertEqual("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
|
||||||
|
|
||||||
def test_circular_tree_reference(self):
|
def test_circular_tree_reference(self):
|
||||||
"""Ensure you can handle circular references with more than one level
|
"""Ensure you can handle circular references with more than one level
|
||||||
@@ -203,14 +366,10 @@ class FieldTest(unittest.TestCase):
|
|||||||
return "<Person: %s>" % self.name
|
return "<Person: %s>" % self.name
|
||||||
|
|
||||||
Person.drop_collection()
|
Person.drop_collection()
|
||||||
paul = Person(name="Paul")
|
paul = Person(name="Paul").save()
|
||||||
paul.save()
|
maria = Person(name="Maria").save()
|
||||||
maria = Person(name="Maria")
|
julia = Person(name='Julia').save()
|
||||||
maria.save()
|
anna = Person(name='Anna').save()
|
||||||
julia = Person(name='Julia')
|
|
||||||
julia.save()
|
|
||||||
anna = Person(name='Anna')
|
|
||||||
anna.save()
|
|
||||||
|
|
||||||
paul.other.friends = [maria, julia, anna]
|
paul.other.friends = [maria, julia, anna]
|
||||||
paul.other.name = "Paul's friends"
|
paul.other.name = "Paul's friends"
|
||||||
@@ -228,7 +387,7 @@ class FieldTest(unittest.TestCase):
|
|||||||
anna.other.name = "Anna's friends"
|
anna.other.name = "Anna's friends"
|
||||||
anna.save()
|
anna.save()
|
||||||
|
|
||||||
self.assertEquals(
|
self.assertEqual(
|
||||||
"[<Person: Paul>, <Person: Maria>, <Person: Julia>, <Person: Anna>]",
|
"[<Person: Paul>, <Person: Maria>, <Person: Julia>, <Person: Anna>]",
|
||||||
"%s" % Person.objects()
|
"%s" % Person.objects()
|
||||||
)
|
)
|
||||||
@@ -781,8 +940,8 @@ class FieldTest(unittest.TestCase):
|
|||||||
root.save()
|
root.save()
|
||||||
|
|
||||||
root = root.reload()
|
root = root.reload()
|
||||||
self.assertEquals(root.children, [company])
|
self.assertEqual(root.children, [company])
|
||||||
self.assertEquals(company.parents, [root])
|
self.assertEqual(company.parents, [root])
|
||||||
|
|
||||||
def test_dict_in_dbref_instance(self):
|
def test_dict_in_dbref_instance(self):
|
||||||
|
|
||||||
@@ -808,8 +967,8 @@ class FieldTest(unittest.TestCase):
|
|||||||
room_101.save()
|
room_101.save()
|
||||||
|
|
||||||
room = Room.objects.first().select_related()
|
room = Room.objects.first().select_related()
|
||||||
self.assertEquals(room.staffs_with_position[0]['staff'], sarah)
|
self.assertEqual(room.staffs_with_position[0]['staff'], sarah)
|
||||||
self.assertEquals(room.staffs_with_position[1]['staff'], bob)
|
self.assertEqual(room.staffs_with_position[1]['staff'], bob)
|
||||||
|
|
||||||
def test_document_reload_no_inheritance(self):
|
def test_document_reload_no_inheritance(self):
|
||||||
class Foo(Document):
|
class Foo(Document):
|
||||||
@@ -839,5 +998,185 @@ class FieldTest(unittest.TestCase):
|
|||||||
foo.save()
|
foo.save()
|
||||||
foo.reload()
|
foo.reload()
|
||||||
|
|
||||||
self.assertEquals(type(foo.bar), Bar)
|
self.assertEqual(type(foo.bar), Bar)
|
||||||
self.assertEquals(type(foo.baz), Baz)
|
self.assertEqual(type(foo.baz), Baz)
|
||||||
|
|
||||||
|
def test_list_lookup_not_checked_in_map(self):
|
||||||
|
"""Ensure we dereference list data correctly
|
||||||
|
"""
|
||||||
|
class Comment(Document):
|
||||||
|
id = IntField(primary_key=True)
|
||||||
|
text = StringField()
|
||||||
|
|
||||||
|
class Message(Document):
|
||||||
|
id = IntField(primary_key=True)
|
||||||
|
comments = ListField(ReferenceField(Comment))
|
||||||
|
|
||||||
|
Comment.drop_collection()
|
||||||
|
Message.drop_collection()
|
||||||
|
|
||||||
|
c1 = Comment(id=0, text='zero').save()
|
||||||
|
c2 = Comment(id=1, text='one').save()
|
||||||
|
Message(id=1, comments=[c1, c2]).save()
|
||||||
|
|
||||||
|
msg = Message.objects.get(id=1)
|
||||||
|
self.assertEqual(0, msg.comments[0].id)
|
||||||
|
self.assertEqual(1, msg.comments[1].id)
|
||||||
|
|
||||||
|
def test_list_item_dereference_dref_false_save_doesnt_cause_extra_queries(self):
|
||||||
|
"""Ensure that DBRef items in ListFields are dereferenced.
|
||||||
|
"""
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
name = StringField()
|
||||||
|
members = ListField(ReferenceField(User, dbref=False))
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
for i in xrange(1, 51):
|
||||||
|
User(name='user %s' % i).save()
|
||||||
|
|
||||||
|
Group(name="Test", members=User.objects).save()
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
group_obj.name = "new test"
|
||||||
|
group_obj.save()
|
||||||
|
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
def test_list_item_dereference_dref_true_save_doesnt_cause_extra_queries(self):
|
||||||
|
"""Ensure that DBRef items in ListFields are dereferenced.
|
||||||
|
"""
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
name = StringField()
|
||||||
|
members = ListField(ReferenceField(User, dbref=True))
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
for i in xrange(1, 51):
|
||||||
|
User(name='user %s' % i).save()
|
||||||
|
|
||||||
|
Group(name="Test", members=User.objects).save()
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
group_obj.name = "new test"
|
||||||
|
group_obj.save()
|
||||||
|
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
def test_generic_reference_save_doesnt_cause_extra_queries(self):
|
||||||
|
|
||||||
|
class UserA(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class UserB(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class UserC(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
name = StringField()
|
||||||
|
members = ListField(GenericReferenceField())
|
||||||
|
|
||||||
|
UserA.drop_collection()
|
||||||
|
UserB.drop_collection()
|
||||||
|
UserC.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
members = []
|
||||||
|
for i in xrange(1, 51):
|
||||||
|
a = UserA(name='User A %s' % i).save()
|
||||||
|
b = UserB(name='User B %s' % i).save()
|
||||||
|
c = UserC(name='User C %s' % i).save()
|
||||||
|
|
||||||
|
members += [a, b, c]
|
||||||
|
|
||||||
|
Group(name="test", members=members).save()
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
group_obj.name = "new test"
|
||||||
|
group_obj.save()
|
||||||
|
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
def test_tuples_as_tuples(self):
|
||||||
|
"""
|
||||||
|
Ensure that tuples remain tuples when they are
|
||||||
|
inside a ComplexBaseField
|
||||||
|
"""
|
||||||
|
from mongoengine.base import BaseField
|
||||||
|
|
||||||
|
class EnumField(BaseField):
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
super(EnumField, self).__init__(**kwargs)
|
||||||
|
|
||||||
|
def to_mongo(self, value):
|
||||||
|
return value
|
||||||
|
|
||||||
|
def to_python(self, value):
|
||||||
|
return tuple(value)
|
||||||
|
|
||||||
|
class TestDoc(Document):
|
||||||
|
items = ListField(EnumField())
|
||||||
|
|
||||||
|
TestDoc.drop_collection()
|
||||||
|
tuples = [(100, 'Testing')]
|
||||||
|
doc = TestDoc()
|
||||||
|
doc.items = tuples
|
||||||
|
doc.save()
|
||||||
|
x = TestDoc.objects().get()
|
||||||
|
self.assertTrue(x is not None)
|
||||||
|
self.assertTrue(len(x.items) == 1)
|
||||||
|
self.assertTrue(tuple(x.items[0]) in tuples)
|
||||||
|
self.assertTrue(x.items[0] in tuples)
|
||||||
|
|
||||||
|
def test_non_ascii_pk(self):
|
||||||
|
"""
|
||||||
|
Ensure that dbref conversion to string does not fail when
|
||||||
|
non-ascii characters are used in primary key
|
||||||
|
"""
|
||||||
|
class Brand(Document):
|
||||||
|
title = StringField(max_length=255, primary_key=True)
|
||||||
|
|
||||||
|
class BrandGroup(Document):
|
||||||
|
title = StringField(max_length=255, primary_key=True)
|
||||||
|
brands = ListField(ReferenceField("Brand", dbref=True))
|
||||||
|
|
||||||
|
Brand.drop_collection()
|
||||||
|
BrandGroup.drop_collection()
|
||||||
|
|
||||||
|
brand1 = Brand(title="Moschino").save()
|
||||||
|
brand2 = Brand(title=u"Денис Симачёв").save()
|
||||||
|
|
||||||
|
BrandGroup(title="top_brands", brands=[brand1, brand2]).save()
|
||||||
|
brand_groups = BrandGroup.objects().all()
|
||||||
|
|
||||||
|
self.assertEqual(2, len([brand for bg in brand_groups for brand in bg.brands]))
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
|
|
||||||
@@ -1,24 +1,67 @@
|
|||||||
# -*- coding: utf-8 -*-
|
from __future__ import with_statement
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
import unittest
|
import unittest
|
||||||
|
from nose.plugins.skip import SkipTest
|
||||||
|
from mongoengine.python_support import PY3
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.django.shortcuts import get_document_or_404
|
|
||||||
|
|
||||||
from django.http import Http404
|
try:
|
||||||
from django.template import Context, Template
|
from mongoengine.django.shortcuts import get_document_or_404
|
||||||
from django.conf import settings
|
|
||||||
from django.core.paginator import Paginator
|
|
||||||
|
|
||||||
settings.configure()
|
from django.http import Http404
|
||||||
|
from django.template import Context, Template
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.paginator import Paginator
|
||||||
|
|
||||||
from django.contrib.sessions.tests import SessionTestsMixin
|
settings.configure(USE_TZ=True)
|
||||||
from mongoengine.django.sessions import SessionStore, MongoSession
|
|
||||||
|
from django.contrib.sessions.tests import SessionTestsMixin
|
||||||
|
from mongoengine.django.sessions import SessionStore, MongoSession
|
||||||
|
except Exception, err:
|
||||||
|
if PY3:
|
||||||
|
SessionTestsMixin = type # dummy value so no error
|
||||||
|
SessionStore = None # dummy value so no error
|
||||||
|
else:
|
||||||
|
raise err
|
||||||
|
|
||||||
|
|
||||||
|
from datetime import tzinfo, timedelta
|
||||||
|
ZERO = timedelta(0)
|
||||||
|
|
||||||
|
class FixedOffset(tzinfo):
|
||||||
|
"""Fixed offset in minutes east from UTC."""
|
||||||
|
|
||||||
|
def __init__(self, offset, name):
|
||||||
|
self.__offset = timedelta(minutes = offset)
|
||||||
|
self.__name = name
|
||||||
|
|
||||||
|
def utcoffset(self, dt):
|
||||||
|
return self.__offset
|
||||||
|
|
||||||
|
def tzname(self, dt):
|
||||||
|
return self.__name
|
||||||
|
|
||||||
|
def dst(self, dt):
|
||||||
|
return ZERO
|
||||||
|
|
||||||
|
|
||||||
|
def activate_timezone(tz):
|
||||||
|
"""Activate Django timezone support if it is available.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from django.utils import timezone
|
||||||
|
timezone.deactivate()
|
||||||
|
timezone.activate(tz)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class QuerySetTest(unittest.TestCase):
|
class QuerySetTest(unittest.TestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('django does not have Python 3 support')
|
||||||
connect(db='mongoenginetest')
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
@@ -93,12 +136,32 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
start = end - 1
|
start = end - 1
|
||||||
self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end))
|
self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end))
|
||||||
|
|
||||||
|
def test_nested_queryset_template_iterator(self):
|
||||||
|
# Try iterating the same queryset twice, nested, in a Django template.
|
||||||
|
names = ['A', 'B', 'C', 'D']
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
|
||||||
|
for name in names:
|
||||||
|
User(name=name).save()
|
||||||
|
|
||||||
|
users = User.objects.all().order_by('name')
|
||||||
|
template = Template("{% for user in users %}{{ user.name }}{% ifequal forloop.counter 2 %} {% for inner_user in users %}{{ inner_user.name }}{% endfor %} {% endifequal %}{% endfor %}")
|
||||||
|
rendered = template.render(Context({'users': users}))
|
||||||
|
self.assertEqual(rendered, 'AB ABCD CD')
|
||||||
|
|
||||||
class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase):
|
class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase):
|
||||||
backend = SessionStore
|
backend = SessionStore
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('django does not have Python 3 support')
|
||||||
connect(db='mongoenginetest')
|
connect(db='mongoenginetest')
|
||||||
MongoSession.drop_collection()
|
MongoSession.drop_collection()
|
||||||
super(MongoDBSessionTest, self).setUp()
|
super(MongoDBSessionTest, self).setUp()
|
||||||
@@ -108,3 +171,18 @@ class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase):
|
|||||||
session['test'] = True
|
session['test'] = True
|
||||||
session.save()
|
session.save()
|
||||||
self.assertTrue('test' in session)
|
self.assertTrue('test' in session)
|
||||||
|
|
||||||
|
def test_session_expiration_tz(self):
|
||||||
|
activate_timezone(FixedOffset(60, 'UTC+1'))
|
||||||
|
# create and save new session
|
||||||
|
session = SessionStore()
|
||||||
|
session.set_expiry(600) # expire in 600 seconds
|
||||||
|
session['test_expire'] = True
|
||||||
|
session.save()
|
||||||
|
# reload session with key
|
||||||
|
key = session.session_key
|
||||||
|
session = SessionStore(key)
|
||||||
|
self.assertTrue('test_expire' in session, 'Session has expired before it is expected')
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
@@ -1,4 +1,7 @@
|
|||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import pymongo
|
import pymongo
|
||||||
from pymongo import ReadPreference, ReplicaSetConnection
|
from pymongo import ReadPreference, ReplicaSetConnection
|
||||||
|
|
||||||
@@ -26,7 +29,7 @@ class ConnectionTest(unittest.TestCase):
|
|||||||
if not isinstance(conn, ReplicaSetConnection):
|
if not isinstance(conn, ReplicaSetConnection):
|
||||||
return
|
return
|
||||||
|
|
||||||
self.assertEquals(conn.read_preference, ReadPreference.SECONDARY_ONLY)
|
self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_ONLY)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
@@ -1,4 +1,6 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
import sys
|
||||||
|
sys.path[0:0] = [""]
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
@@ -21,6 +23,7 @@ class SignalTests(unittest.TestCase):
|
|||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
connect(db='mongoenginetest')
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
class Author(Document):
|
class Author(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
|
||||||
@@ -70,7 +73,6 @@ class SignalTests(unittest.TestCase):
|
|||||||
signal_output.append('Not loaded')
|
signal_output.append('Not loaded')
|
||||||
self.Author = Author
|
self.Author = Author
|
||||||
|
|
||||||
|
|
||||||
class Another(Document):
|
class Another(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
|
||||||
@@ -108,8 +110,22 @@ class SignalTests(unittest.TestCase):
|
|||||||
signal_output.append('post_delete Another signal, %s' % document)
|
signal_output.append('post_delete Another signal, %s' % document)
|
||||||
|
|
||||||
self.Another = Another
|
self.Another = Another
|
||||||
# Save up the number of connected signals so that we can check at the end
|
|
||||||
# that all the signals we register get properly unregistered
|
class ExplicitId(Document):
|
||||||
|
id = IntField(primary_key=True)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def post_save(cls, sender, document, **kwargs):
|
||||||
|
if 'created' in kwargs:
|
||||||
|
if kwargs['created']:
|
||||||
|
signal_output.append('Is created')
|
||||||
|
else:
|
||||||
|
signal_output.append('Is updated')
|
||||||
|
|
||||||
|
self.ExplicitId = ExplicitId
|
||||||
|
self.ExplicitId.objects.delete()
|
||||||
|
# Save up the number of connected signals so that we can check at the
|
||||||
|
# end that all the signals we register get properly unregistered
|
||||||
self.pre_signals = (
|
self.pre_signals = (
|
||||||
len(signals.pre_init.receivers),
|
len(signals.pre_init.receivers),
|
||||||
len(signals.post_init.receivers),
|
len(signals.post_init.receivers),
|
||||||
@@ -137,6 +153,8 @@ class SignalTests(unittest.TestCase):
|
|||||||
signals.pre_delete.connect(Another.pre_delete, sender=Another)
|
signals.pre_delete.connect(Another.pre_delete, sender=Another)
|
||||||
signals.post_delete.connect(Another.post_delete, sender=Another)
|
signals.post_delete.connect(Another.post_delete, sender=Another)
|
||||||
|
|
||||||
|
signals.post_save.connect(ExplicitId.post_save, sender=ExplicitId)
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
signals.pre_init.disconnect(self.Author.pre_init)
|
signals.pre_init.disconnect(self.Author.pre_init)
|
||||||
signals.post_init.disconnect(self.Author.post_init)
|
signals.post_init.disconnect(self.Author.post_init)
|
||||||
@@ -154,6 +172,8 @@ class SignalTests(unittest.TestCase):
|
|||||||
signals.post_save.disconnect(self.Another.post_save)
|
signals.post_save.disconnect(self.Another.post_save)
|
||||||
signals.pre_save.disconnect(self.Another.pre_save)
|
signals.pre_save.disconnect(self.Another.pre_save)
|
||||||
|
|
||||||
|
signals.post_save.disconnect(self.ExplicitId.post_save)
|
||||||
|
|
||||||
# Check that all our signals got disconnected properly.
|
# Check that all our signals got disconnected properly.
|
||||||
post_signals = (
|
post_signals = (
|
||||||
len(signals.pre_init.receivers),
|
len(signals.pre_init.receivers),
|
||||||
@@ -166,13 +186,15 @@ class SignalTests(unittest.TestCase):
|
|||||||
len(signals.post_bulk_insert.receivers),
|
len(signals.post_bulk_insert.receivers),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self.ExplicitId.objects.delete()
|
||||||
|
|
||||||
self.assertEqual(self.pre_signals, post_signals)
|
self.assertEqual(self.pre_signals, post_signals)
|
||||||
|
|
||||||
def test_model_signals(self):
|
def test_model_signals(self):
|
||||||
""" Model saves should throw some signals. """
|
""" Model saves should throw some signals. """
|
||||||
|
|
||||||
def create_author():
|
def create_author():
|
||||||
a1 = self.Author(name='Bill Shakespeare')
|
self.Author(name='Bill Shakespeare')
|
||||||
|
|
||||||
def bulk_create_author_with_load():
|
def bulk_create_author_with_load():
|
||||||
a1 = self.Author(name='Bill Shakespeare')
|
a1 = self.Author(name='Bill Shakespeare')
|
||||||
@@ -196,7 +218,7 @@ class SignalTests(unittest.TestCase):
|
|||||||
])
|
])
|
||||||
|
|
||||||
a1.reload()
|
a1.reload()
|
||||||
a1.name='William Shakespeare'
|
a1.name = 'William Shakespeare'
|
||||||
self.assertEqual(self.get_signal_output(a1.save), [
|
self.assertEqual(self.get_signal_output(a1.save), [
|
||||||
"pre_save signal, William Shakespeare",
|
"pre_save signal, William Shakespeare",
|
||||||
"post_save signal, William Shakespeare",
|
"post_save signal, William Shakespeare",
|
||||||
@@ -212,9 +234,9 @@ class SignalTests(unittest.TestCase):
|
|||||||
|
|
||||||
# The output of this signal is not entirely deterministic. The reloaded
|
# The output of this signal is not entirely deterministic. The reloaded
|
||||||
# object will have an object ID. Hence, we only check part of the output
|
# object will have an object ID. Hence, we only check part of the output
|
||||||
self.assertEquals(signal_output[3],
|
self.assertEqual(signal_output[3],
|
||||||
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]")
|
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]")
|
||||||
self.assertEquals(signal_output[-2:],
|
self.assertEqual(signal_output[-2:],
|
||||||
["post_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
["post_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||||
"Is loaded",])
|
"Is loaded",])
|
||||||
|
|
||||||
@@ -228,3 +250,15 @@ class SignalTests(unittest.TestCase):
|
|||||||
])
|
])
|
||||||
|
|
||||||
self.Author.objects.delete()
|
self.Author.objects.delete()
|
||||||
|
|
||||||
|
def test_signals_with_explicit_doc_ids(self):
|
||||||
|
""" Model saves must have a created flag the first time."""
|
||||||
|
ei = self.ExplicitId(id=123)
|
||||||
|
# post save must received the created flag, even if there's already
|
||||||
|
# an object id present
|
||||||
|
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||||
|
# second time, it must be an update
|
||||||
|
self.assertEqual(self.get_signal_output(ei.save), ['Is updated'])
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
Reference in New Issue
Block a user