Compare commits
471 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
88a951ba4f | ||
|
403ceb19dc | ||
|
835d3c3d18 | ||
|
3135b456be | ||
|
0be6d3661a | ||
|
6f5f5b4711 | ||
|
c6c5f85abb | ||
|
7b860f7739 | ||
|
e28804c03a | ||
|
1b9432824b | ||
|
3b71a6b5c5 | ||
|
7ce8768c19 | ||
|
25e0f12976 | ||
|
f168682a68 | ||
|
d25058a46d | ||
|
4d0c092d9f | ||
|
15714ef855 | ||
|
eb743beaa3 | ||
|
0007535a46 | ||
|
8391af026c | ||
|
800f656dcf | ||
|
088c5f49d9 | ||
|
d8d98b6143 | ||
|
02fb3b9315 | ||
|
4f87db784e | ||
|
7e6287b925 | ||
|
999cdfd997 | ||
|
8d6cb087c6 | ||
|
2b7417c728 | ||
|
3c455cf1c1 | ||
|
5135185e31 | ||
|
b461f26e5d | ||
|
faef5b8570 | ||
|
0a20e04c10 | ||
|
d19bb2308d | ||
|
d8dd07d9ef | ||
|
36c56243cd | ||
|
23d06b79a6 | ||
|
e4c4e923ee | ||
|
936d2f1f47 | ||
|
07018b5060 | ||
|
ac90d6ae5c | ||
|
2141f2c4c5 | ||
|
81870777a9 | ||
|
845092dcad | ||
|
dd473d1e1e | ||
|
d2869bf4ed | ||
|
891a3f4b29 | ||
|
6767b50d75 | ||
|
d9e4b562a9 | ||
|
fb3243f1bc | ||
|
5fe1497c92 | ||
|
5446592d44 | ||
|
40ed9a53c9 | ||
|
f7ac8cea90 | ||
|
4ef5d1f0cd | ||
|
6992615c98 | ||
|
43dabb2825 | ||
|
05e40e5681 | ||
|
2c4536e137 | ||
|
3dc81058a0 | ||
|
bd84667a2b | ||
|
e5b6a12977 | ||
|
ca415d5d62 | ||
|
99b4fe7278 | ||
|
327e164869 | ||
|
25bc571f30 | ||
|
38c7e8a1d2 | ||
|
ca282e28e0 | ||
|
5ef59c06df | ||
|
8f55d385d6 | ||
|
cd2fc25c19 | ||
|
709983eea6 | ||
|
40e99b1b80 | ||
|
488684d960 | ||
|
f35034b989 | ||
|
9d6f9b1f26 | ||
|
6148a608fb | ||
|
3fa9e70383 | ||
|
16fea6f009 | ||
|
df9ed835ca | ||
|
e394c8f0f2 | ||
|
21974f7288 | ||
|
5ef0170d77 | ||
|
c21dcf14de | ||
|
a8d20d4e1e | ||
|
8b307485b0 | ||
|
4544afe422 | ||
|
9d7eba5f70 | ||
|
be0aee95f2 | ||
|
3469ed7ab9 | ||
|
1f223aa7e6 | ||
|
0a431ead5e | ||
|
f750796444 | ||
|
c82bcd882a | ||
|
7d0ec33b54 | ||
|
43d48b3feb | ||
|
2e406d2687 | ||
|
3f30808104 | ||
|
ab10217c86 | ||
|
00430491ca | ||
|
109202329f | ||
|
3b1509f307 | ||
|
7ad7b08bed | ||
|
4650e5e8fb | ||
|
af59d4929e | ||
|
e34100bab4 | ||
|
d9b3a9fb60 | ||
|
39eec59c90 | ||
|
d651d0d472 | ||
|
87a2358a65 | ||
|
cef4e313e1 | ||
|
7cc1a4eba0 | ||
|
c6cc0133b3 | ||
|
7748e68440 | ||
|
6c2230a076 | ||
|
66b233eaea | ||
|
fed58f3920 | ||
|
815b2be7f7 | ||
|
f420c9fb7c | ||
|
01bdf10b94 | ||
|
ddedc1ee92 | ||
|
9e9703183f | ||
|
adce9e6220 | ||
|
c499133bbe | ||
|
8f505c2dcc | ||
|
b320064418 | ||
|
a643933d16 | ||
|
2659ec5887 | ||
|
9f8327926d | ||
|
7a568dc118 | ||
|
c946b06be5 | ||
|
c65fd0e477 | ||
|
8f8217e928 | ||
|
6c9e1799c7 | ||
|
decd70eb23 | ||
|
a20d40618f | ||
|
b4af8ec751 | ||
|
feb5eed8a5 | ||
|
f4fa39c70e | ||
|
7b7165f5d8 | ||
|
13897db6d3 | ||
|
c4afdb7198 | ||
|
0284975f3f | ||
|
269e3d1303 | ||
|
8c81f7ece9 | ||
|
f6e0593774 | ||
|
3d80e549cb | ||
|
acc7448dc5 | ||
|
35d3d3de72 | ||
|
0372e07eb0 | ||
|
00221e3410 | ||
|
9c264611cf | ||
|
31d7f70e27 | ||
|
04e8b83d45 | ||
|
e87bf71f20 | ||
|
2dd70c8d62 | ||
|
a3886702a3 | ||
|
713af133a0 | ||
|
057ffffbf2 | ||
|
a81d6d124b | ||
|
23f07fde5e | ||
|
b42b760393 | ||
|
bf6f4c48c0 | ||
|
6133f04841 | ||
|
3c18f79ea4 | ||
|
2af8342fea | ||
|
fc3db7942d | ||
|
164e2b2678 | ||
|
b7b28390df | ||
|
a6e996d921 | ||
|
07e666345d | ||
|
007f10d29d | ||
|
f9284d20ca | ||
|
9050869781 | ||
|
54975de0f3 | ||
|
a7aead5138 | ||
|
6868f66f24 | ||
|
3c0b00e42d | ||
|
3327388f1f | ||
|
04497aec36 | ||
|
aa9d596930 | ||
|
f96e68cd11 | ||
|
013227323d | ||
|
19cbb442ee | ||
|
c0e7f341cb | ||
|
0a1ba7c434 | ||
|
b708dabf98 | ||
|
899e56e5b8 | ||
|
f6d3bd8ccb | ||
|
deb5677a57 | ||
|
5c464c3f5a | ||
|
cceef33fef | ||
|
ed8174fe36 | ||
|
3c8906494f | ||
|
6e745e9882 | ||
|
fb4e9c3772 | ||
|
2c282f9550 | ||
|
d92d41cb05 | ||
|
82e7050561 | ||
|
44f92d4169 | ||
|
2f1fae38dd | ||
|
9fe99979fe | ||
|
6399de0b51 | ||
|
959740a585 | ||
|
159b082828 | ||
|
8e7c5af16c | ||
|
c1645ab7a7 | ||
|
2ae2bfdde9 | ||
|
3fe93968a6 | ||
|
79a2d715b0 | ||
|
50b271c868 | ||
|
a57f28ac83 | ||
|
3f3747a2fe | ||
|
d133913c3d | ||
|
e049cef00a | ||
|
eb8176971c | ||
|
5bbfca45fa | ||
|
9b500cd867 | ||
|
b52cae6575 | ||
|
35a0142f9b | ||
|
d4f6ef4f1b | ||
|
11024deaae | ||
|
5a038de1d5 | ||
|
903982e896 | ||
|
6355c404cc | ||
|
92b9cb5d43 | ||
|
7580383d26 | ||
|
ba0934e41e | ||
|
a6a1021521 | ||
|
33b4d83c73 | ||
|
6cf630c74a | ||
|
736fe5b84e | ||
|
4241bde6ea | ||
|
b4ce14d744 | ||
|
10832a2ccc | ||
|
91aca44f67 | ||
|
96cfbb201a | ||
|
b2bc155701 | ||
|
a70ef5594d | ||
|
6d991586fd | ||
|
f8890ca841 | ||
|
0752c6b24f | ||
|
3ffaf2c0e1 | ||
|
a3e0fbd606 | ||
|
9c8ceb6b4e | ||
|
bebce2c053 | ||
|
34c6790762 | ||
|
a5fb009b62 | ||
|
9671ca5ebf | ||
|
5334ea393e | ||
|
2aaacc02e3 | ||
|
222e929b2d | ||
|
6f16d35a92 | ||
|
d7a2ccf5ac | ||
|
9ce605221a | ||
|
1e930fe950 | ||
|
4dc158589c | ||
|
4525eb457b | ||
|
56a2e07dc2 | ||
|
9b7fe9ac31 | ||
|
c3da07ccf7 | ||
|
b691a56d51 | ||
|
13e0a1b5bb | ||
|
646baddce4 | ||
|
02f61c323d | ||
|
1e3d2df9e7 | ||
|
e43fae86f1 | ||
|
c6151e34e0 | ||
|
45cb991254 | ||
|
839bc99f94 | ||
|
0aeb1ca408 | ||
|
cd76a906f4 | ||
|
e438491938 | ||
|
307b35a5bf | ||
|
217c9720ea | ||
|
778c7dc5f2 | ||
|
4c80154437 | ||
|
6bd9529a66 | ||
|
33ea2b4844 | ||
|
5c807f3dc8 | ||
|
9063b559c4 | ||
|
40f6df7160 | ||
|
95165aa92f | ||
|
d96fcdb35c | ||
|
5efabdcea3 | ||
|
2d57dc0565 | ||
|
576629f825 | ||
|
5badb9d151 | ||
|
45dc379d9a | ||
|
49c0c9f44c | ||
|
ef5fa4d062 | ||
|
35b66d5d94 | ||
|
d0b749a43c | ||
|
bcc4d4e8c6 | ||
|
41bff0b293 | ||
|
dfc7f35ef1 | ||
|
0bbbbdde80 | ||
|
5fa5284b58 | ||
|
b7ef82cb67 | ||
|
1233780265 | ||
|
dd095279c8 | ||
|
4d5200c50f | ||
|
1bcd675ead | ||
|
2a3d3de0b2 | ||
|
b124836f3a | ||
|
93ba95971b | ||
|
7b193b3745 | ||
|
2b647d2405 | ||
|
7714cca599 | ||
|
42511aa9cf | ||
|
ace2a2f3d1 | ||
|
2062fe7a08 | ||
|
d4c02c3988 | ||
|
4c1496b4a4 | ||
|
eec876295d | ||
|
3093175f54 | ||
|
dd05c4d34a | ||
|
57e3a40321 | ||
|
9e70152076 | ||
|
e1da83a8f6 | ||
|
8108198613 | ||
|
915849b2ce | ||
|
2e96302336 | ||
|
051cd744ad | ||
|
53fbc165ba | ||
|
1862bcf867 | ||
|
8909d1d144 | ||
|
a2f0f20284 | ||
|
1951b52aa5 | ||
|
cd7a9345ec | ||
|
dba4c33c81 | ||
|
153c239c9b | ||
|
4034ab4182 | ||
|
9c917c3bd3 | ||
|
cca0222e1d | ||
|
682db9b81f | ||
|
3e000f9be1 | ||
|
548a552638 | ||
|
1d5b5b7d15 | ||
|
91aa4586e2 | ||
|
6d3bc43ef6 | ||
|
0f63e26641 | ||
|
ab2ef69c6a | ||
|
621350515e | ||
|
03ed5c398a | ||
|
65d6f8c018 | ||
|
79d0673ae6 | ||
|
cbd488e19f | ||
|
380d869195 | ||
|
73893f2a33 | ||
|
ad81470d35 | ||
|
fc140d04ef | ||
|
a0257ed7e7 | ||
|
4769487c3b | ||
|
29def587ff | ||
|
f35d0b2b37 | ||
|
283e92d55d | ||
|
c82b26d334 | ||
|
2753e02cda | ||
|
fde733c205 | ||
|
f730591f2c | ||
|
94eac1e79d | ||
|
9f2b6d0ec6 | ||
|
7d7d0ea001 | ||
|
794101691c | ||
|
a443144a5c | ||
|
73f0867061 | ||
|
f97db93212 | ||
|
d36708933c | ||
|
14f82ea0a9 | ||
|
c41dd6495d | ||
|
1005c99e9c | ||
|
f4478fc762 | ||
|
c5ed308ea5 | ||
|
3ab5ba6149 | ||
|
9b2fde962c | ||
|
571a7dc42d | ||
|
3421fffa9b | ||
|
c25619fd63 | ||
|
76adb13a64 | ||
|
33b1eed361 | ||
|
c44891a1a8 | ||
|
f31f52ff1c | ||
|
6ad9a56bd9 | ||
|
a5c2fc4f9d | ||
|
0a65006bb4 | ||
|
3db896c4e2 | ||
|
e80322021a | ||
|
48316ba60d | ||
|
c0f1493473 | ||
|
ccbd128fa2 | ||
|
46817caa68 | ||
|
775c8624d4 | ||
|
36eedc987c | ||
|
3b8f31c888 | ||
|
a34fa74eaa | ||
|
d6b2d8dcb5 | ||
|
aab0599280 | ||
|
dfa8eaf24e | ||
|
63d55cb797 | ||
|
c642eee0d2 | ||
|
5f33d298d7 | ||
|
fc39fd7519 | ||
|
7f442f7485 | ||
|
0ee3203a5a | ||
|
43a5df8780 | ||
|
0949df014b | ||
|
01f4dd8f97 | ||
|
8b7599f5d9 | ||
|
9bdc320cf8 | ||
|
d9c8285806 | ||
|
4b8344082f | ||
|
e5cf76b460 | ||
|
422ca87a12 | ||
|
a512ccca28 | ||
|
ba215be97c | ||
|
ca16050681 | ||
|
06e4ed1bb4 | ||
|
d4a8ae5743 | ||
|
a4f2f811d3 | ||
|
ebaba95eb3 | ||
|
31f7769199 | ||
|
7726be94be | ||
|
f2cbcea6d7 | ||
|
5d6a28954b | ||
|
319f1deceb | ||
|
3f14958741 | ||
|
42ba4a5c56 | ||
|
c804c395ed | ||
|
58c8cf1a3a | ||
|
76ea8c86b7 | ||
|
050378fa72 | ||
|
29d858d58c | ||
|
dc45920afb | ||
|
15fcb57e2f | ||
|
91ee85152c | ||
|
aa7bf7af1e | ||
|
02c1ba39ad | ||
|
8e8d9426df | ||
|
57f301815d | ||
|
dfc9dc713c | ||
|
1a0cad7f5f | ||
|
3df436f0d8 | ||
|
d737fca295 | ||
|
da5a3532d7 | ||
|
27111e7b29 | ||
|
b847bc0aba | ||
|
6eb0bc50e2 | ||
|
7530f03bf6 | ||
|
24a9633edc | ||
|
7e1a5ce445 | ||
|
2ffdbc7fc0 | ||
|
52c7b68cc3 | ||
|
ddbcc8e84b | ||
|
2bfb195ad6 | ||
|
cd2d9517a0 | ||
|
19dc312128 | ||
|
175659628d | ||
|
8fea2b09be | ||
|
f77f45b70c | ||
|
103a287f11 | ||
|
d600ade40c | ||
|
a6a7cba121 | ||
|
7fff635a3f | ||
|
7a749b88c7 | ||
|
1ce6a7f4be | ||
|
a092910fdd | ||
|
bb77838b3e | ||
|
1001f1bd36 | ||
|
de0e5583a5 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -15,3 +15,5 @@ env/
|
||||
.pydevproject
|
||||
tests/test_bugfix.py
|
||||
htmlcov/
|
||||
venv
|
||||
venv3
|
||||
|
22
.landscape.yml
Normal file
22
.landscape.yml
Normal file
@@ -0,0 +1,22 @@
|
||||
pylint:
|
||||
disable:
|
||||
# We use this a lot (e.g. via document._meta)
|
||||
- protected-access
|
||||
|
||||
options:
|
||||
additional-builtins:
|
||||
# add xrange and long as valid built-ins. In Python 3, xrange is
|
||||
# translated into range and long is translated into int via 2to3 (see
|
||||
# "use_2to3" in setup.py). This should be removed when we drop Python
|
||||
# 2 support (which probably won't happen any time soon).
|
||||
- xrange
|
||||
- long
|
||||
|
||||
pyflakes:
|
||||
disable:
|
||||
# undefined variables are already covered by pylint (and exclude
|
||||
# xrange & long)
|
||||
- F821
|
||||
|
||||
ignore-paths:
|
||||
- benchmark.py
|
66
.travis.yml
66
.travis.yml
@@ -1,67 +1,61 @@
|
||||
language: python
|
||||
|
||||
python:
|
||||
- '2.6'
|
||||
- '2.7'
|
||||
- '3.2'
|
||||
- '3.3'
|
||||
- '3.4'
|
||||
- '3.5'
|
||||
- pypy
|
||||
- pypy3
|
||||
|
||||
env:
|
||||
- PYMONGO=2.7.2 DJANGO=dev
|
||||
- PYMONGO=2.7.2 DJANGO=1.7.1
|
||||
- PYMONGO=2.7.2 DJANGO=1.6.8
|
||||
- PYMONGO=2.7.2 DJANGO=1.5.11
|
||||
- PYMONGO=2.8 DJANGO=dev
|
||||
- PYMONGO=2.8 DJANGO=1.7.1
|
||||
- PYMONGO=2.8 DJANGO=1.6.8
|
||||
- PYMONGO=2.8 DJANGO=1.5.11
|
||||
- PYMONGO=2.7
|
||||
- PYMONGO=2.8
|
||||
- PYMONGO=3.0
|
||||
- PYMONGO=dev
|
||||
|
||||
matrix:
|
||||
exclude:
|
||||
- python: '2.6'
|
||||
env: PYMONGO=2.7.2 DJANGO=dev
|
||||
- python: '2.6'
|
||||
env: PYMONGO=2.8 DJANGO=dev
|
||||
- python: '2.6'
|
||||
env: PYMONGO=2.7.2 DJANGO=1.7.1
|
||||
- python: '2.6'
|
||||
env: PYMONGO=2.8 DJANGO=1.7.1
|
||||
allow_failures:
|
||||
- python: pypy3
|
||||
fast_finish: true
|
||||
|
||||
before_install:
|
||||
- travis_retry sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
|
||||
- echo 'deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' |
|
||||
sudo tee /etc/apt/sources.list.d/mongodb.list
|
||||
- travis_retry sudo apt-get update
|
||||
- travis_retry sudo apt-get install mongodb-org-server
|
||||
|
||||
install:
|
||||
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev
|
||||
libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev
|
||||
python-tk
|
||||
- if [[ $PYMONGO == 'dev' ]]; then travis_retry pip install https://github.com/mongodb/mongo-python-driver/tarball/master;
|
||||
true; fi
|
||||
- if [[ $PYMONGO != 'dev' ]]; then travis_retry pip install pymongo==$PYMONGO; true;
|
||||
fi
|
||||
- if [[ $DJANGO == 'dev' ]]; then travis_retry pip install git+https://github.com/django/django.git;
|
||||
fi
|
||||
- if [[ $DJANGO != 'dev' ]]; then travis_retry pip install Django==$DJANGO; fi
|
||||
- travis_retry pip install https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz#md5=1534bb15cf311f07afaa3aacba1c028b
|
||||
- travis_retry pip install --upgrade pip
|
||||
- travis_retry pip install coveralls
|
||||
- travis_retry python setup.py install
|
||||
- travis_retry pip install flake8
|
||||
- travis_retry pip install tox>=1.9
|
||||
- travis_retry pip install "virtualenv<14.0.0" # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32)
|
||||
- travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test
|
||||
|
||||
# Run flake8 for py27
|
||||
before_script:
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then tox -e flake8; fi
|
||||
|
||||
script:
|
||||
- travis_retry python setup.py test
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '3.'* ]]; then 2to3 . -w; fi;
|
||||
- coverage run --source=mongoengine setup.py test
|
||||
- coverage report -m
|
||||
- python benchmark.py
|
||||
after_script: coveralls --verbose
|
||||
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage
|
||||
|
||||
# For now only submit coveralls for Python v2.7. Python v3.x currently shows
|
||||
# 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible
|
||||
# code in a separate dir and runs tests on that.
|
||||
after_script:
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; fi
|
||||
|
||||
notifications:
|
||||
irc: irc.freenode.org#mongoengine
|
||||
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- /^v.*$/
|
||||
|
||||
deploy:
|
||||
provider: pypi
|
||||
user: the_drow
|
||||
|
31
AUTHORS
31
AUTHORS
@@ -12,7 +12,7 @@ Laine Herron https://github.com/LaineHerron
|
||||
|
||||
CONTRIBUTORS
|
||||
|
||||
Dervived from the git logs, inevitably incomplete but all of whom and others
|
||||
Derived from the git logs, inevitably incomplete but all of whom and others
|
||||
have submitted patches, reported bugs and generally helped make MongoEngine
|
||||
that much better:
|
||||
|
||||
@@ -119,7 +119,7 @@ that much better:
|
||||
* Anton Kolechkin
|
||||
* Sergey Nikitin
|
||||
* psychogenic
|
||||
* Stefan Wójcik
|
||||
* Stefan Wójcik (https://github.com/wojcikstefan)
|
||||
* dimonb
|
||||
* Garry Polley
|
||||
* James Slagle
|
||||
@@ -138,7 +138,6 @@ that much better:
|
||||
* hellysmile
|
||||
* Jaepil Jeong
|
||||
* Daniil Sharou
|
||||
* Stefan Wójcik
|
||||
* Pete Campton
|
||||
* Martyn Smith
|
||||
* Marcelo Anton
|
||||
@@ -218,3 +217,29 @@ that much better:
|
||||
* Matthew Ellison (https://github.com/seglberg)
|
||||
* Jimmy Shen (https://github.com/jimmyshen)
|
||||
* J. Fernando Sánchez (https://github.com/balkian)
|
||||
* Michael Chase (https://github.com/rxsegrxup)
|
||||
* Eremeev Danil (https://github.com/elephanter)
|
||||
* Catstyle Lee (https://github.com/Catstyle)
|
||||
* Kiryl Yermakou (https://github.com/rma4ok)
|
||||
* Matthieu Rigal (https://github.com/MRigal)
|
||||
* Charanpal Dhanjal (https://github.com/charanpald)
|
||||
* Emmanuel Leblond (https://github.com/touilleMan)
|
||||
* Breeze.Kay (https://github.com/9nix00)
|
||||
* Vicki Donchenko (https://github.com/kivistein)
|
||||
* Emile Caron (https://github.com/emilecaron)
|
||||
* Amit Lichtenberg (https://github.com/amitlicht)
|
||||
* Gang Li (https://github.com/iici-gli)
|
||||
* Lars Butler (https://github.com/larsbutler)
|
||||
* George Macon (https://github.com/gmacon)
|
||||
* Ashley Whetter (https://github.com/AWhetter)
|
||||
* Paul-Armand Verhaegen (https://github.com/paularmand)
|
||||
* Steven Rossiter (https://github.com/BeardedSteve)
|
||||
* Luo Peng (https://github.com/RussellLuo)
|
||||
* Bryan Bennett (https://github.com/bbenne10)
|
||||
* Gilb's Gilb's (https://github.com/gilbsgilbs)
|
||||
* Joshua Nedrud (https://github.com/Neurostack)
|
||||
* Shu Shen (https://github.com/shushen)
|
||||
* xiaost7 (https://github.com/xiaost7)
|
||||
* Victor Varvaryuk
|
||||
* Stanislav Kaledin (https://github.com/sallyruthstruik)
|
||||
* Dmitry Yantsen (https://github.com/mrTable)
|
||||
|
@@ -29,7 +29,10 @@ Style Guide
|
||||
-----------
|
||||
|
||||
MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_
|
||||
including 4 space indents and 79 character line limits.
|
||||
including 4 space indents. When possible we try to stick to 79 character line limits.
|
||||
However, screens got bigger and an ORM has a strong focus on readability and
|
||||
if it can help, we accept 119 as maximum line length, in a similar way as
|
||||
`django does <https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/coding-style/#python-style>`_
|
||||
|
||||
Testing
|
||||
-------
|
||||
@@ -38,14 +41,21 @@ All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_
|
||||
and any pull requests are automatically tested by Travis. Any pull requests
|
||||
without tests will take longer to be integrated and might be refused.
|
||||
|
||||
You may also submit a simple failing test as a PullRequest if you don't know
|
||||
how to fix it, it will be easier for other people to work on it and it may get
|
||||
fixed faster.
|
||||
|
||||
General Guidelines
|
||||
------------------
|
||||
|
||||
- Avoid backward breaking changes if at all possible.
|
||||
- Write inline documentation for new classes and methods.
|
||||
- Write tests and make sure they pass (make sure you have a mongod
|
||||
running on the default port, then execute ``python setup.py test``
|
||||
running on the default port, then execute ``python setup.py nosetests``
|
||||
from the cmd line to run the test suite).
|
||||
- Ensure tests pass on every Python and PyMongo versions.
|
||||
You can test on these versions locally by executing ``tox``
|
||||
- Add enhancements or problematic bug fixes to docs/changelog.rst
|
||||
- Add yourself to AUTHORS :)
|
||||
|
||||
Documentation
|
||||
|
64
README.rst
64
README.rst
@@ -4,29 +4,31 @@ MongoEngine
|
||||
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
||||
:Repository: https://github.com/MongoEngine/mongoengine
|
||||
:Author: Harry Marr (http://github.com/hmarr)
|
||||
:Maintainer: Ross Lawley (http://github.com/rozza)
|
||||
:Maintainer: Stefan Wójcik (http://github.com/wojcikstefan)
|
||||
|
||||
.. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master
|
||||
:target: http://travis-ci.org/MongoEngine/mongoengine
|
||||
.. image:: https://travis-ci.org/MongoEngine/mongoengine.svg?branch=master
|
||||
:target: https://travis-ci.org/MongoEngine/mongoengine
|
||||
|
||||
.. image:: https://coveralls.io/repos/MongoEngine/mongoengine/badge.png?branch=master
|
||||
:target: https://coveralls.io/r/MongoEngine/mongoengine?branch=master
|
||||
.. image:: https://coveralls.io/repos/github/MongoEngine/mongoengine/badge.svg?branch=master
|
||||
:target: https://coveralls.io/github/MongoEngine/mongoengine?branch=master
|
||||
|
||||
.. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.png
|
||||
:target: https://landscape.io/github/MongoEngine/mongoengine/master
|
||||
:alt: Code Health
|
||||
.. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.svg?style=flat
|
||||
:target: https://landscape.io/github/MongoEngine/mongoengine/master
|
||||
:alt: Code Health
|
||||
|
||||
About
|
||||
=====
|
||||
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
||||
Documentation available at http://mongoengine-odm.rtfd.org - there is currently
|
||||
a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide
|
||||
<https://mongoengine-odm.readthedocs.org/en/latest/guide/index.html>`_ and an `API reference
|
||||
<http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_.
|
||||
Documentation available at https://mongoengine-odm.readthedocs.io - there is currently
|
||||
a `tutorial <https://mongoengine-odm.readthedocs.io/tutorial.html>`_, a `user guide
|
||||
<https://mongoengine-odm.readthedocs.io/guide/index.html>`_ and an `API reference
|
||||
<https://mongoengine-odm.readthedocs.io/apireference.html>`_.
|
||||
|
||||
Installation
|
||||
============
|
||||
If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
||||
We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of
|
||||
`pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``.
|
||||
You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ and thus
|
||||
you can use ``easy_install -U mongoengine``. Otherwise, you can download the
|
||||
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
|
||||
setup.py install``.
|
||||
@@ -38,21 +40,26 @@ Dependencies
|
||||
|
||||
Optional Dependencies
|
||||
---------------------
|
||||
- **Django Integration:** Django>=1.4.0 for Python 2.x or PyPy and Django>=1.5.0 for Python 3.x
|
||||
- **Image Fields**: Pillow>=2.0.0 or PIL (not recommended since MongoEngine is tested with Pillow)
|
||||
- **Image Fields**: Pillow>=2.0.0
|
||||
- dateutil>=2.1.0
|
||||
|
||||
.. note
|
||||
MongoEngine always runs it's test suite against the latest patch version of each dependecy. e.g.: Django 1.6.5
|
||||
MongoEngine always runs it's test suite against the latest patch version of each dependecy. e.g.: PyMongo 3.0.1
|
||||
|
||||
Examples
|
||||
========
|
||||
Some simple examples of what MongoEngine code looks like::
|
||||
Some simple examples of what MongoEngine code looks like:
|
||||
|
||||
.. code :: python
|
||||
|
||||
from mongoengine import *
|
||||
connect('mydb')
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField(required=True, max_length=200)
|
||||
posted = DateTimeField(default=datetime.datetime.now)
|
||||
tags = ListField(StringField(max_length=50))
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class TextPost(BlogPost):
|
||||
content = StringField(required=True)
|
||||
@@ -82,7 +89,7 @@ Some simple examples of what MongoEngine code looks like::
|
||||
|
||||
>>> len(BlogPost.objects)
|
||||
2
|
||||
>>> len(HtmlPost.objects)
|
||||
>>> len(TextPost.objects)
|
||||
1
|
||||
>>> len(LinkPost.objects)
|
||||
1
|
||||
@@ -96,7 +103,26 @@ Some simple examples of what MongoEngine code looks like::
|
||||
Tests
|
||||
=====
|
||||
To run the test suite, ensure you are running a local instance of MongoDB on
|
||||
the standard port, and run: ``python setup.py test``.
|
||||
the standard port and have ``nose`` installed. Then, run: ``python setup.py nosetests``.
|
||||
|
||||
To run the test suite on every supported Python version and every supported PyMongo version,
|
||||
you can use ``tox``.
|
||||
tox and each supported Python version should be installed in your environment:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
# Install tox
|
||||
$ pip install tox
|
||||
# Run the test suites
|
||||
$ tox
|
||||
|
||||
If you wish to run one single or selected tests, use the nosetest convention. It will find the folder,
|
||||
eventually the file, go to the TestClass specified after the colon and eventually right to the single test.
|
||||
Also use the -s argument if you want to print out whatever or access pdb while testing.
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ python setup.py nosetests --tests tests/fields/fields.py:FieldTest.test_cls_field -s
|
||||
|
||||
Community
|
||||
=========
|
||||
|
152
benchmark.py
152
benchmark.py
@@ -1,118 +1,41 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Simple benchmark comparing PyMongo and MongoEngine.
|
||||
|
||||
Sample run on a mid 2015 MacBook Pro (commit b282511):
|
||||
|
||||
Benchmarking...
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo
|
||||
2.58979988098
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo write_concern={"w": 0}
|
||||
1.26657605171
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine
|
||||
8.4351580143
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries without continual assign - MongoEngine
|
||||
7.20191693306
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True
|
||||
6.31104588509
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True
|
||||
6.07083487511
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False
|
||||
5.97704291344
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False
|
||||
5.9111430645
|
||||
"""
|
||||
|
||||
import timeit
|
||||
|
||||
|
||||
def cprofile_main():
|
||||
from pymongo import Connection
|
||||
connection = Connection()
|
||||
connection.drop_database('timeit_test')
|
||||
connection.disconnect()
|
||||
|
||||
from mongoengine import Document, DictField, connect
|
||||
connect("timeit_test")
|
||||
|
||||
class Noddy(Document):
|
||||
fields = DictField()
|
||||
|
||||
for i in range(1):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key" + str(j)] = "value " + str(j)
|
||||
noddy.save()
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
0.4 Performance Figures ...
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo
|
||||
3.86744189262
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine
|
||||
6.23374891281
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||
5.33027005196
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||
pass - No Cascade
|
||||
|
||||
0.5.X
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo
|
||||
3.89597702026
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine
|
||||
21.7735359669
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||
19.8670389652
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||
pass - No Cascade
|
||||
|
||||
0.6.X
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo
|
||||
3.81559205055
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine
|
||||
10.0446798801
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||
9.51354718208
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||
9.02567505836
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, force=True
|
||||
8.44933390617
|
||||
|
||||
0.7.X
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo
|
||||
3.78801012039
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine
|
||||
9.73050498962
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||
8.33456707001
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||
8.37778115273
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, force=True
|
||||
8.36906409264
|
||||
0.8.X
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo
|
||||
3.69964408875
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo write_concern={"w": 0}
|
||||
3.5526599884
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine
|
||||
7.00959801674
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries without continual assign - MongoEngine
|
||||
5.60943293571
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade=True
|
||||
6.715102911
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True
|
||||
5.50644683838
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False
|
||||
4.69851183891
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False
|
||||
4.68946313858
|
||||
----------------------------------------------------------------------------------------------------
|
||||
"""
|
||||
print("Benchmarking...")
|
||||
|
||||
setup = """
|
||||
@@ -131,7 +54,7 @@ noddy = db.noddy
|
||||
for i in range(10000):
|
||||
example = {'fields': {}}
|
||||
for j in range(20):
|
||||
example['fields']["key"+str(j)] = "value "+str(j)
|
||||
example['fields']['key' + str(j)] = 'value ' + str(j)
|
||||
|
||||
noddy.save(example)
|
||||
|
||||
@@ -146,9 +69,10 @@ myNoddys = noddy.find()
|
||||
|
||||
stmt = """
|
||||
from pymongo import MongoClient
|
||||
from pymongo.write_concern import WriteConcern
|
||||
connection = MongoClient()
|
||||
|
||||
db = connection.timeit_test
|
||||
db = connection.get_database('timeit_test', write_concern=WriteConcern(w=0))
|
||||
noddy = db.noddy
|
||||
|
||||
for i in range(10000):
|
||||
@@ -156,7 +80,7 @@ for i in range(10000):
|
||||
for j in range(20):
|
||||
example['fields']["key"+str(j)] = "value "+str(j)
|
||||
|
||||
noddy.save(example, write_concern={"w": 0})
|
||||
noddy.save(example)
|
||||
|
||||
myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
@@ -171,10 +95,10 @@ myNoddys = noddy.find()
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
connection.drop_database('timeit_test')
|
||||
connection.disconnect()
|
||||
connection.close()
|
||||
|
||||
from mongoengine import Document, DictField, connect
|
||||
connect("timeit_test")
|
||||
connect('timeit_test')
|
||||
|
||||
class Noddy(Document):
|
||||
fields = DictField()
|
||||
|
2
docs/_themes/sphinx_rtd_theme/footer.html
vendored
2
docs/_themes/sphinx_rtd_theme/footer.html
vendored
@@ -2,7 +2,7 @@
|
||||
{% if next or prev %}
|
||||
<div class="rst-footer-buttons">
|
||||
{% if next %}
|
||||
<a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}"/>Next <span class="icon icon-circle-arrow-right"></span></a>
|
||||
<a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}">Next <span class="icon icon-circle-arrow-right"></span></a>
|
||||
{% endif %}
|
||||
{% if prev %}
|
||||
<a href="{{ prev.link|e }}" class="btn btn-neutral" title="{{ prev.title|striptags|e }}"><span class="icon icon-circle-arrow-left"></span> Previous</a>
|
||||
|
@@ -34,6 +34,9 @@ Documents
|
||||
.. autoclass:: mongoengine.ValidationError
|
||||
:members:
|
||||
|
||||
.. autoclass:: mongoengine.FieldDoesNotExist
|
||||
|
||||
|
||||
Context Managers
|
||||
================
|
||||
|
||||
|
@@ -2,9 +2,128 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
Development
|
||||
===========
|
||||
- (Fill this out as you fix issues and develop you features).
|
||||
|
||||
Changes in 0.9.X - DEV
|
||||
======================
|
||||
Changes in 0.11.0
|
||||
=================
|
||||
- BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428
|
||||
- BREAKING CHANGE: Dropped Python 2.6 support. #1428
|
||||
- BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428
|
||||
- Fixed absent rounding for DecimalField when `force_string` is set. #1103
|
||||
|
||||
Changes in 0.10.8
|
||||
=================
|
||||
- Added support for QuerySet.batch_size (#1426)
|
||||
- Fixed query set iteration within iteration #1427
|
||||
- Fixed an issue where specifying a MongoDB URI host would override more information than it should #1421
|
||||
- Added ability to filter the generic reference field by ObjectId and DBRef #1425
|
||||
- Fixed delete cascade for models with a custom primary key field #1247
|
||||
- Added ability to specify an authentication mechanism (e.g. X.509) #1333
|
||||
- Added support for falsey primary keys (e.g. doc.pk = 0) #1354
|
||||
- Fixed QuerySet#sum/average for fields w/ explicit db_field #1417
|
||||
- Fixed filtering by embedded_doc=None #1422
|
||||
- Added support for cursor.comment #1420
|
||||
- Fixed doc.get_<field>_display #1419
|
||||
- Fixed __repr__ method of the StrictDict #1424
|
||||
- Added a deprecation warning for Python 2.6
|
||||
|
||||
Changes in 0.10.7
|
||||
=================
|
||||
- Dropped Python 3.2 support #1390
|
||||
- Fixed the bug where dynamic doc has index inside a dict field #1278
|
||||
- Fixed: ListField minus index assignment does not work #1128
|
||||
- Fixed cascade delete mixing among collections #1224
|
||||
- Add `signal_kwargs` argument to `Document.save`, `Document.delete` and `BaseQuerySet.insert` to be passed to signals calls #1206
|
||||
- Raise `OperationError` when trying to do a `drop_collection` on document with no collection set.
|
||||
- count on ListField of EmbeddedDocumentField fails. #1187
|
||||
- Fixed long fields stored as int32 in Python 3. #1253
|
||||
- MapField now handles unicodes keys correctly. #1267
|
||||
- ListField now handles negative indicies correctly. #1270
|
||||
- Fixed AttributeError when initializing EmbeddedDocument with positional args. #681
|
||||
- Fixed no_cursor_timeout error with pymongo 3.0+ #1304
|
||||
- Replaced map-reduce based QuerySet.sum/average with aggregation-based implementations #1336
|
||||
- Fixed support for `__` to escape field names that match operators names in `update` #1351
|
||||
- Fixed BaseDocument#_mark_as_changed #1369
|
||||
- Added support for pickling QuerySet instances. #1397
|
||||
- Fixed connecting to a list of hosts #1389
|
||||
- Fixed a bug where accessing broken references wouldn't raise a DoesNotExist error #1334
|
||||
- Fixed not being able to specify use_db_field=False on ListField(EmbeddedDocumentField) instances #1218
|
||||
- Improvements to the dictionary fields docs #1383
|
||||
|
||||
Changes in 0.10.6
|
||||
=================
|
||||
- Add support for mocking MongoEngine based on mongomock. #1151
|
||||
- Fixed not being able to run tests on Windows. #1153
|
||||
- Allow creation of sparse compound indexes. #1114
|
||||
- count on ListField of EmbeddedDocumentField fails. #1187
|
||||
|
||||
Changes in 0.10.5
|
||||
=================
|
||||
- Fix for reloading of strict with special fields. #1156
|
||||
|
||||
Changes in 0.10.4
|
||||
=================
|
||||
- SaveConditionError is now importable from the top level package. #1165
|
||||
- upsert_one method added. #1157
|
||||
|
||||
Changes in 0.10.3
|
||||
=================
|
||||
- Fix `read_preference` (it had chaining issues with PyMongo 2.x and it didn't work at all with PyMongo 3.x) #1042
|
||||
|
||||
Changes in 0.10.2
|
||||
=================
|
||||
- Allow shard key to point to a field in an embedded document. #551
|
||||
- Allow arbirary metadata in fields. #1129
|
||||
- ReferenceFields now support abstract document types. #837
|
||||
|
||||
Changes in 0.10.1
|
||||
=================
|
||||
- Fix infinite recursion with CASCADE delete rules under specific conditions. #1046
|
||||
- Fix CachedReferenceField bug when loading cached docs as DBRef but failing to save them. #1047
|
||||
- Fix ignored chained options #842
|
||||
- Document save's save_condition error raises `SaveConditionError` exception #1070
|
||||
- Fix Document.reload for DynamicDocument. #1050
|
||||
- StrictDict & SemiStrictDict are shadowed at init time. #1105
|
||||
- Fix ListField minus index assignment does not work. #1119
|
||||
- Remove code that marks field as changed when the field has default but not existed in database #1126
|
||||
- Remove test dependencies (nose and rednose) from install dependencies list. #1079
|
||||
- Recursively build query when using elemMatch operator. #1130
|
||||
- Fix instance back references for lists of embedded documents. #1131
|
||||
|
||||
Changes in 0.10.0
|
||||
=================
|
||||
- Django support was removed and will be available as a separate extension. #958
|
||||
- Allow to load undeclared field with meta attribute 'strict': False #957
|
||||
- Support for PyMongo 3+ #946
|
||||
- Removed get_or_create() deprecated since 0.8.0. #300
|
||||
- Improve Document._created status when switch collection and db #1020
|
||||
- Queryset update doesn't go through field validation #453
|
||||
- Added support for specifying authentication source as option `authSource` in URI. #967
|
||||
- Fixed mark_as_changed to handle higher/lower level fields changed. #927
|
||||
- ListField of embedded docs doesn't set the _instance attribute when iterating over it #914
|
||||
- Support += and *= for ListField #595
|
||||
- Use sets for populating dbrefs to dereference
|
||||
- Fixed unpickled documents replacing the global field's list. #888
|
||||
- Fixed storage of microseconds in ComplexDateTimeField and unused separator option. #910
|
||||
- Don't send a "cls" option to ensureIndex (related to https://jira.mongodb.org/browse/SERVER-769)
|
||||
- Fix for updating sorting in SortedListField. #978
|
||||
- Added __ support to escape field name in fields lookup keywords that match operators names #949
|
||||
- Fix for issue where FileField deletion did not free space in GridFS.
|
||||
- No_dereference() not respected on embedded docs containing reference. #517
|
||||
- Document save raise an exception if save_condition fails #1005
|
||||
- Fixes some internal _id handling issue. #961
|
||||
- Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652
|
||||
- Capped collection multiple of 256. #1011
|
||||
- Added `BaseQuerySet.aggregate_sum` and `BaseQuerySet.aggregate_average` methods.
|
||||
- Fix for delete with write_concern {'w': 0}. #1008
|
||||
- Allow dynamic lookup for more than two parts. #882
|
||||
- Added support for min_distance on geo queries. #831
|
||||
- Allow to add custom metadata to fields #705
|
||||
|
||||
Changes in 0.9.0
|
||||
================
|
||||
- Update FileField when creating a new file #714
|
||||
- Added `EmbeddedDocumentListField` for Lists of Embedded Documents. #826
|
||||
- ComplexDateTimeField should fall back to None when null=True #864
|
||||
@@ -78,6 +197,7 @@ Changes in 0.9.X - DEV
|
||||
- Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791
|
||||
- Make `in_bulk()` respect `no_dereference()` #775
|
||||
- Handle None from model __str__; Fixes #753 #754
|
||||
- _get_changed_fields fix for embedded documents with id field. #925
|
||||
|
||||
Changes in 0.8.7
|
||||
================
|
||||
|
@@ -17,6 +17,10 @@ class Post(Document):
|
||||
tags = ListField(StringField(max_length=30))
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
# bugfix
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
|
||||
class TextPost(Post):
|
||||
content = StringField()
|
||||
|
||||
@@ -45,7 +49,8 @@ print 'ALL POSTS'
|
||||
print
|
||||
for post in Post.objects:
|
||||
print post.title
|
||||
print '=' * post.title.count()
|
||||
#print '=' * post.title.count()
|
||||
print "=" * 20
|
||||
|
||||
if isinstance(post, TextPost):
|
||||
print post.content
|
||||
|
180
docs/django.rst
180
docs/django.rst
@@ -2,176 +2,18 @@
|
||||
Django Support
|
||||
==============
|
||||
|
||||
.. note:: Updated to support Django 1.5
|
||||
|
||||
Connecting
|
||||
==========
|
||||
In your **settings.py** file, ignore the standard database settings (unless you
|
||||
also plan to use the ORM in your project), and instead call
|
||||
:func:`~mongoengine.connect` somewhere in the settings module.
|
||||
|
||||
.. note::
|
||||
If you are not using another Database backend you may need to add a dummy
|
||||
database backend to ``settings.py`` eg::
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.dummy'
|
||||
}
|
||||
}
|
||||
|
||||
Authentication
|
||||
==============
|
||||
MongoEngine includes a Django authentication backend, which uses MongoDB. The
|
||||
:class:`~mongoengine.django.auth.User` model is a MongoEngine
|
||||
:class:`~mongoengine.Document`, but implements most of the methods and
|
||||
attributes that the standard Django :class:`User` model does - so the two are
|
||||
moderately compatible. Using this backend will allow you to store users in
|
||||
MongoDB but still use many of the Django authentication infrastructure (such as
|
||||
the :func:`login_required` decorator and the :func:`authenticate` function). To
|
||||
enable the MongoEngine auth backend, add the following to your **settings.py**
|
||||
file::
|
||||
|
||||
AUTHENTICATION_BACKENDS = (
|
||||
'mongoengine.django.auth.MongoEngineBackend',
|
||||
)
|
||||
|
||||
The :mod:`~mongoengine.django.auth` module also contains a
|
||||
:func:`~mongoengine.django.auth.get_user` helper function, that takes a user's
|
||||
:attr:`id` and returns a :class:`~mongoengine.django.auth.User` object.
|
||||
|
||||
.. versionadded:: 0.1.3
|
||||
|
||||
Custom User model
|
||||
=================
|
||||
Django 1.5 introduced `Custom user Models
|
||||
<https://docs.djangoproject.com/en/dev/topics/auth/customizing/#auth-custom-user>`_
|
||||
which can be used as an alternative to the MongoEngine authentication backend.
|
||||
|
||||
The main advantage of this option is that other components relying on
|
||||
:mod:`django.contrib.auth` and supporting the new swappable user model are more
|
||||
likely to work. For example, you can use the ``createsuperuser`` management
|
||||
command as usual.
|
||||
|
||||
To enable the custom User model in Django, add ``'mongoengine.django.mongo_auth'``
|
||||
in your ``INSTALLED_APPS`` and set ``'mongo_auth.MongoUser'`` as the custom user
|
||||
user model to use. In your **settings.py** file you will have::
|
||||
|
||||
INSTALLED_APPS = (
|
||||
...
|
||||
'django.contrib.auth',
|
||||
'mongoengine.django.mongo_auth',
|
||||
...
|
||||
)
|
||||
|
||||
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
|
||||
|
||||
An additional ``MONGOENGINE_USER_DOCUMENT`` setting enables you to replace the
|
||||
:class:`~mongoengine.django.auth.User` class with another class of your choice::
|
||||
|
||||
MONGOENGINE_USER_DOCUMENT = 'mongoengine.django.auth.User'
|
||||
|
||||
The custom :class:`User` must be a :class:`~mongoengine.Document` class, but
|
||||
otherwise has the same requirements as a standard custom user model,
|
||||
as specified in the `Django Documentation
|
||||
<https://docs.djangoproject.com/en/dev/topics/auth/customizing/>`_.
|
||||
In particular, the custom class must define :attr:`USERNAME_FIELD` and
|
||||
:attr:`REQUIRED_FIELDS` attributes.
|
||||
|
||||
Sessions
|
||||
========
|
||||
Django allows the use of different backend stores for its sessions. MongoEngine
|
||||
provides a MongoDB-based session backend for Django, which allows you to use
|
||||
sessions in your Django application with just MongoDB. To enable the MongoEngine
|
||||
session backend, ensure that your settings module has
|
||||
``'django.contrib.sessions.middleware.SessionMiddleware'`` in the
|
||||
``MIDDLEWARE_CLASSES`` field and ``'django.contrib.sessions'`` in your
|
||||
``INSTALLED_APPS``. From there, all you need to do is add the following line
|
||||
into your settings module::
|
||||
|
||||
SESSION_ENGINE = 'mongoengine.django.sessions'
|
||||
SESSION_SERIALIZER = 'mongoengine.django.sessions.BSONSerializer'
|
||||
|
||||
Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` seconds, but doesn't delete cookie at sessions backend, so ``'mongoengine.django.sessions'`` supports `mongodb TTL
|
||||
<http://docs.mongodb.org/manual/tutorial/expire-data/>`_.
|
||||
|
||||
.. note:: ``SESSION_SERIALIZER`` is only necessary in Django 1.6 as the default
|
||||
serializer is based around JSON and doesn't know how to convert
|
||||
``bson.objectid.ObjectId`` instances to strings.
|
||||
|
||||
.. versionadded:: 0.2.1
|
||||
|
||||
Storage
|
||||
=======
|
||||
With MongoEngine's support for GridFS via the :class:`~mongoengine.fields.FileField`,
|
||||
it is useful to have a Django file storage backend that wraps this. The new
|
||||
storage module is called :class:`~mongoengine.django.storage.GridFSStorage`.
|
||||
Using it is very similar to using the default FileSystemStorage.::
|
||||
|
||||
from mongoengine.django.storage import GridFSStorage
|
||||
fs = GridFSStorage()
|
||||
|
||||
filename = fs.save('hello.txt', 'Hello, World!')
|
||||
|
||||
All of the `Django Storage API methods
|
||||
<http://docs.djangoproject.com/en/dev/ref/files/storage/>`_ have been
|
||||
implemented except :func:`path`. If the filename provided already exists, an
|
||||
underscore and a number (before # the file extension, if one exists) will be
|
||||
appended to the filename until the generated filename doesn't exist. The
|
||||
:func:`save` method will return the new filename.::
|
||||
|
||||
>>> fs.exists('hello.txt')
|
||||
True
|
||||
>>> fs.open('hello.txt').read()
|
||||
'Hello, World!'
|
||||
>>> fs.size('hello.txt')
|
||||
13
|
||||
>>> fs.url('hello.txt')
|
||||
'http://your_media_url/hello.txt'
|
||||
>>> fs.open('hello.txt').name
|
||||
'hello.txt'
|
||||
>>> fs.listdir()
|
||||
([], [u'hello.txt'])
|
||||
|
||||
All files will be saved and retrieved in GridFS via the :class:`FileDocument`
|
||||
document, allowing easy access to the files without the GridFSStorage
|
||||
backend.::
|
||||
|
||||
>>> from mongoengine.django.storage import FileDocument
|
||||
>>> FileDocument.objects()
|
||||
[<FileDocument: FileDocument object>]
|
||||
|
||||
.. versionadded:: 0.4
|
||||
|
||||
Shortcuts
|
||||
=========
|
||||
Inspired by the `Django shortcut get_object_or_404
|
||||
<https://docs.djangoproject.com/en/dev/topics/http/shortcuts/#get-object-or-404>`_,
|
||||
the :func:`~mongoengine.django.shortcuts.get_document_or_404` method returns
|
||||
a document or raises an Http404 exception if the document does not exist::
|
||||
|
||||
from mongoengine.django.shortcuts import get_document_or_404
|
||||
|
||||
admin_user = get_document_or_404(User, username='root')
|
||||
|
||||
The first argument may be a Document or QuerySet object. All other passed arguments
|
||||
and keyword arguments are used in the query::
|
||||
|
||||
foo_email = get_document_or_404(User.objects.only('email'), username='foo', is_active=True).email
|
||||
|
||||
.. note:: Like with :func:`get`, a MultipleObjectsReturned will be raised if more than one
|
||||
object is found.
|
||||
.. note:: Django support has been split from the main MongoEngine
|
||||
repository. The *legacy* Django extension may be found bundled with the
|
||||
0.9 release of MongoEngine.
|
||||
|
||||
|
||||
Also inspired by the `Django shortcut get_list_or_404
|
||||
<https://docs.djangoproject.com/en/dev/topics/http/shortcuts/#get-list-or-404>`_,
|
||||
the :func:`~mongoengine.django.shortcuts.get_list_or_404` method returns a list of
|
||||
documents or raises an Http404 exception if the list is empty::
|
||||
|
||||
from mongoengine.django.shortcuts import get_list_or_404
|
||||
|
||||
active_users = get_list_or_404(User, is_active=True)
|
||||
|
||||
The first argument may be a Document or QuerySet object. All other passed
|
||||
arguments and keyword arguments are used to filter the query.
|
||||
Help Wanted!
|
||||
------------
|
||||
|
||||
The MongoEngine team is looking for help contributing and maintaining a new
|
||||
Django extension for MongoEngine! If you have Django experience and would like
|
||||
to help contribute to the project, please get in touch on the
|
||||
`mailing list <http://groups.google.com/group/mongoengine-users>`_ or by
|
||||
simply contributing on
|
||||
`GitHub <https://github.com/MongoEngine/django-mongoengine>`_.
|
||||
|
@@ -29,7 +29,7 @@ documents are serialized based on their field order.
|
||||
|
||||
Dynamic document schemas
|
||||
========================
|
||||
One of the benefits of MongoDb is dynamic schemas for a collection, whilst data
|
||||
One of the benefits of MongoDB is dynamic schemas for a collection, whilst data
|
||||
should be planned and organised (after all explicit is better than implicit!)
|
||||
there are scenarios where having dynamic / expando style documents is desirable.
|
||||
|
||||
@@ -75,6 +75,7 @@ are as follows:
|
||||
* :class:`~mongoengine.fields.DynamicField`
|
||||
* :class:`~mongoengine.fields.EmailField`
|
||||
* :class:`~mongoengine.fields.EmbeddedDocumentField`
|
||||
* :class:`~mongoengine.fields.EmbeddedDocumentListField`
|
||||
* :class:`~mongoengine.fields.FileField`
|
||||
* :class:`~mongoengine.fields.FloatField`
|
||||
* :class:`~mongoengine.fields.GenericEmbeddedDocumentField`
|
||||
@@ -114,7 +115,7 @@ arguments can be set on all fields:
|
||||
:attr:`default` (Default: None)
|
||||
A value to use when no value is set for this field.
|
||||
|
||||
The definion of default parameters follow `the general rules on Python
|
||||
The definition of default parameters follow `the general rules on Python
|
||||
<http://docs.python.org/reference/compound_stmts.html#function-definitions>`__,
|
||||
which means that some care should be taken when dealing with default mutable objects
|
||||
(like in :class:`~mongoengine.fields.ListField` or :class:`~mongoengine.fields.DictField`)::
|
||||
@@ -146,6 +147,8 @@ arguments can be set on all fields:
|
||||
When True, use this field as a primary key for the collection. `DictField`
|
||||
and `EmbeddedDocuments` both support being the primary key for a document.
|
||||
|
||||
.. note:: If set, this field is also accessible through the `pk` field.
|
||||
|
||||
:attr:`choices` (Default: None)
|
||||
An iterable (e.g. a list or tuple) of choices to which the value of this
|
||||
field should be limited.
|
||||
@@ -170,11 +173,11 @@ arguments can be set on all fields:
|
||||
class Shirt(Document):
|
||||
size = StringField(max_length=3, choices=SIZE)
|
||||
|
||||
:attr:`help_text` (Default: None)
|
||||
Optional help text to output with the field -- used by form libraries
|
||||
|
||||
:attr:`verbose_name` (Default: None)
|
||||
Optional human-readable name for the field -- used by form libraries
|
||||
:attr:`**kwargs` (Optional)
|
||||
You can supply additional metadata as arbitrary additional keyword
|
||||
arguments. You can not override existing attributes, however. Common
|
||||
choices include `help_text` and `verbose_name`, commonly used by form and
|
||||
widget libraries.
|
||||
|
||||
|
||||
List fields
|
||||
@@ -211,9 +214,9 @@ document class as the first argument::
|
||||
|
||||
Dictionary Fields
|
||||
-----------------
|
||||
Often, an embedded document may be used instead of a dictionary -- generally
|
||||
this is recommended as dictionaries don't support validation or custom field
|
||||
types. However, sometimes you will not know the structure of what you want to
|
||||
Often, an embedded document may be used instead of a dictionary – generally
|
||||
embedded documents are recommended as dictionaries don’t support validation
|
||||
or custom field types. However, sometimes you will not know the structure of what you want to
|
||||
store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate::
|
||||
|
||||
class SurveyResponse(Document):
|
||||
@@ -313,12 +316,12 @@ reference with a delete rule specification. A delete rule is specified by
|
||||
supplying the :attr:`reverse_delete_rule` attributes on the
|
||||
:class:`ReferenceField` definition, like this::
|
||||
|
||||
class Employee(Document):
|
||||
class ProfilePage(Document):
|
||||
...
|
||||
profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY)
|
||||
employee = ReferenceField('Employee', reverse_delete_rule=mongoengine.CASCADE)
|
||||
|
||||
The declaration in this example means that when an :class:`Employee` object is
|
||||
removed, the :class:`ProfilePage` that belongs to that employee is removed as
|
||||
removed, the :class:`ProfilePage` that references that employee is removed as
|
||||
well. If a whole batch of employees is removed, all profile pages that are
|
||||
linked are removed as well.
|
||||
|
||||
@@ -401,7 +404,7 @@ MongoEngine allows you to specify that a field should be unique across a
|
||||
collection by providing ``unique=True`` to a :class:`~mongoengine.fields.Field`\ 's
|
||||
constructor. If you try to save a document that has the same value for a unique
|
||||
field as a document that is already in the database, a
|
||||
:class:`~mongoengine.OperationError` will be raised. You may also specify
|
||||
:class:`~mongoengine.NotUniqueError` will be raised. You may also specify
|
||||
multi-field uniqueness constraints by using :attr:`unique_with`, which may be
|
||||
either a single field name, or a list or tuple of field names::
|
||||
|
||||
@@ -445,8 +448,10 @@ A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying
|
||||
:attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary.
|
||||
:attr:`max_documents` is the maximum number of documents that is allowed to be
|
||||
stored in the collection, and :attr:`max_size` is the maximum size of the
|
||||
collection in bytes. If :attr:`max_size` is not specified and
|
||||
:attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB).
|
||||
collection in bytes. :attr:`max_size` is rounded up to the next multiple of 256
|
||||
by MongoDB internally and mongoengine before. Use also a multiple of 256 to
|
||||
avoid confusions. If :attr:`max_size` is not specified and
|
||||
:attr:`max_documents` is, :attr:`max_size` defaults to 10485760 bytes (10MB).
|
||||
The following example shows a :class:`Log` document that will be limited to
|
||||
1000 entries and 2MB of disk space::
|
||||
|
||||
@@ -463,19 +468,26 @@ You can specify indexes on collections to make querying faster. This is done
|
||||
by creating a list of index specifications called :attr:`indexes` in the
|
||||
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
|
||||
either be a single field name, a tuple containing multiple field names, or a
|
||||
dictionary containing a full index definition. A direction may be specified on
|
||||
fields by prefixing the field name with a **+** (for ascending) or a **-** sign
|
||||
(for descending). Note that direction only matters on multi-field indexes.
|
||||
Text indexes may be specified by prefixing the field name with a **$**. ::
|
||||
dictionary containing a full index definition.
|
||||
|
||||
A direction may be specified on fields by prefixing the field name with a
|
||||
**+** (for ascending) or a **-** sign (for descending). Note that direction
|
||||
only matters on multi-field indexes. Text indexes may be specified by prefixing
|
||||
the field name with a **$**. Hashed indexes may be specified by prefixing
|
||||
the field name with a **#**::
|
||||
|
||||
class Page(Document):
|
||||
category = IntField()
|
||||
title = StringField()
|
||||
rating = StringField()
|
||||
created = DateTimeField()
|
||||
meta = {
|
||||
'indexes': [
|
||||
'title',
|
||||
'$title', # text index
|
||||
'#title', # hashed index
|
||||
('title', '-rating'),
|
||||
('category', '_cls'),
|
||||
{
|
||||
'fields': ['created'],
|
||||
'expireAfterSeconds': 3600
|
||||
@@ -530,11 +542,14 @@ There are a few top level defaults for all indexes that can be set::
|
||||
:attr:`index_background` (Optional)
|
||||
Set the default value for if an index should be indexed in the background
|
||||
|
||||
:attr:`index_cls` (Optional)
|
||||
A way to turn off a specific index for _cls.
|
||||
|
||||
:attr:`index_drop_dups` (Optional)
|
||||
Set the default value for if an index should drop duplicates
|
||||
|
||||
:attr:`index_cls` (Optional)
|
||||
A way to turn off a specific index for _cls.
|
||||
.. note:: Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning
|
||||
and has no effect
|
||||
|
||||
|
||||
Compound Indexes and Indexing sub documents
|
||||
|
@@ -13,3 +13,4 @@ User Guide
|
||||
gridfs
|
||||
signals
|
||||
text-indexes
|
||||
mongomock
|
||||
|
21
docs/guide/mongomock.rst
Normal file
21
docs/guide/mongomock.rst
Normal file
@@ -0,0 +1,21 @@
|
||||
==============================
|
||||
Use mongomock for testing
|
||||
==============================
|
||||
|
||||
`mongomock <https://github.com/vmalloc/mongomock/>`_ is a package to do just
|
||||
what the name implies, mocking a mongo database.
|
||||
|
||||
To use with mongoengine, simply specify mongomock when connecting with
|
||||
mongoengine:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
connect('mongoenginetest', host='mongomock://localhost')
|
||||
conn = get_connection()
|
||||
|
||||
or with an alias:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
connect('mongoenginetest', host='mongomock://localhost', alias='testdb')
|
||||
conn = get_connection('testdb')
|
@@ -39,6 +39,14 @@ syntax::
|
||||
# been written by a user whose 'country' field is set to 'uk'
|
||||
uk_pages = Page.objects(author__country='uk')
|
||||
|
||||
.. note::
|
||||
|
||||
(version **0.9.1+**) if your field name is like mongodb operator name (for example
|
||||
type, lte, lt...) and you want to place it at the end of lookup keyword
|
||||
mongoengine automatically prepend $ to it. To avoid this use __ at the end of
|
||||
your lookup keyword. For example if your field name is ``type`` and you want to
|
||||
query by this field you must use ``.objects(user__type__="admin")`` instead of
|
||||
``.objects(user__type="admin")``
|
||||
|
||||
Query operators
|
||||
===============
|
||||
@@ -138,9 +146,10 @@ The following were added in MongoEngine 0.8 for
|
||||
loc.objects(point__near=[40, 5])
|
||||
loc.objects(point__near={"type": "Point", "coordinates": [40, 5]})
|
||||
|
||||
You can also set the maximum distance in meters as well::
|
||||
You can also set the maximum and/or the minimum distance in meters as well::
|
||||
|
||||
loc.objects(point__near=[40, 5], point__max_distance=1000)
|
||||
loc.objects(point__near=[40, 5], point__min_distance=100)
|
||||
|
||||
The older 2D indexes are still supported with the
|
||||
:class:`~mongoengine.fields.GeoPointField`:
|
||||
@@ -160,7 +169,8 @@ The older 2D indexes are still supported with the
|
||||
|
||||
* ``max_distance`` -- can be added to your location queries to set a maximum
|
||||
distance.
|
||||
|
||||
* ``min_distance`` -- can be added to your location queries to set a minimum
|
||||
distance.
|
||||
|
||||
Querying lists
|
||||
--------------
|
||||
@@ -227,7 +237,7 @@ is preferred for achieving this::
|
||||
# All except for the first 5 people
|
||||
users = User.objects[5:]
|
||||
|
||||
# 5 users, starting from the 10th user found
|
||||
# 5 users, starting from the 11th user found
|
||||
users = User.objects[10:15]
|
||||
|
||||
You may also index the query to retrieve a single result. If an item at that
|
||||
@@ -255,21 +265,11 @@ no document matches the query, and
|
||||
if more than one document matched the query. These exceptions are merged into
|
||||
your document definitions eg: `MyDoc.DoesNotExist`
|
||||
|
||||
A variation of this method exists,
|
||||
:meth:`~mongoengine.queryset.QuerySet.get_or_create`, that will create a new
|
||||
document with the query arguments if no documents match the query. An
|
||||
additional keyword argument, :attr:`defaults` may be provided, which will be
|
||||
used as default values for the new document, in the case that it should need
|
||||
to be created::
|
||||
|
||||
>>> a, created = User.objects.get_or_create(name='User A', defaults={'age': 30})
|
||||
>>> b, created = User.objects.get_or_create(name='User A', defaults={'age': 40})
|
||||
>>> a.name == b.name and a.age == b.age
|
||||
True
|
||||
|
||||
.. warning::
|
||||
:meth:`~mongoengine.queryset.QuerySet.get_or_create` method is deprecated
|
||||
since :mod:`mongoengine` 0.8.
|
||||
A variation of this method, get_or_create() existed, but it was unsafe. It
|
||||
could not be made safe, because there are no transactions in mongoDB. Other
|
||||
approaches should be investigated, to ensure you don't accidentally duplicate
|
||||
data when using something similar to this method. Therefore it was deprecated
|
||||
in 0.8 and removed in 0.10.
|
||||
|
||||
Default Document queries
|
||||
========================
|
||||
@@ -347,6 +347,8 @@ way of achieving this::
|
||||
|
||||
num_users = len(User.objects)
|
||||
|
||||
Even if len() is the Pythonic way of counting results, keep in mind that if you concerned about performance, :meth:`~mongoengine.queryset.QuerySet.count` is the way to go since it only execute a server side count query, while len() retrieves the results, places them in cache, and finally counts them. If we compare the performance of the two operations, len() is much slower than :meth:`~mongoengine.queryset.QuerySet.count`.
|
||||
|
||||
Further aggregation
|
||||
-------------------
|
||||
You may sum over the values of a specific field on documents using
|
||||
@@ -598,7 +600,7 @@ Some variables are made available in the scope of the Javascript function:
|
||||
|
||||
The following example demonstrates the intended usage of
|
||||
:meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums
|
||||
over a field on a document (this functionality is already available throught
|
||||
over a field on a document (this functionality is already available through
|
||||
:meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of
|
||||
example)::
|
||||
|
||||
@@ -663,4 +665,3 @@ following example shows how the substitutions are made::
|
||||
return comments;
|
||||
}
|
||||
""")
|
||||
|
||||
|
@@ -17,7 +17,7 @@ Use the *$* prefix to set a text index, Look the declaration::
|
||||
meta = {'indexes': [
|
||||
{'fields': ['$title', "$content"],
|
||||
'default_language': 'english',
|
||||
'weight': {'title': 10, 'content': 2}
|
||||
'weights': {'title': 10, 'content': 2}
|
||||
}
|
||||
]}
|
||||
|
||||
|
@@ -2,6 +2,49 @@
|
||||
Upgrading
|
||||
#########
|
||||
|
||||
0.11.0
|
||||
******
|
||||
This release includes a major rehaul of MongoEngine's code quality and
|
||||
introduces a few breaking changes. It also touches many different parts of
|
||||
the package and although all the changes have been tested and scrutinized,
|
||||
you're encouraged to thorougly test the upgrade.
|
||||
|
||||
First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`.
|
||||
If you import or catch this exception, you'll need to rename it in your code.
|
||||
|
||||
Second breaking change drops Python v2.6 support. If you run MongoEngine on
|
||||
that Python version, you'll need to upgrade it first.
|
||||
|
||||
Third breaking change drops an old backward compatibility measure where
|
||||
`from mongoengine.base import ErrorClass` would work on top of
|
||||
`from mongoengine.errors import ErrorClass` (where `ErrorClass` is e.g.
|
||||
`ValidationError`). If you import any exceptions from `mongoengine.base`,
|
||||
change it to `mongoengine.errors`.
|
||||
|
||||
0.10.8
|
||||
******
|
||||
This version fixed an issue where specifying a MongoDB URI host would override
|
||||
more information than it should. These changes are minor, but they still
|
||||
subtly modify the connection logic and thus you're encouraged to test your
|
||||
MongoDB connection before shipping v0.10.8 in production.
|
||||
|
||||
0.10.7
|
||||
******
|
||||
|
||||
`QuerySet.aggregate_sum` and `QuerySet.aggregate_average` are dropped. Use
|
||||
`QuerySet.sum` and `QuerySet.average` instead which use the aggreation framework
|
||||
by default from now on.
|
||||
|
||||
0.9.0
|
||||
*****
|
||||
|
||||
The 0.8.7 package on pypi was corrupted. If upgrading from 0.8.7 to 0.9.0 please follow: ::
|
||||
|
||||
pip uninstall pymongo
|
||||
pip uninstall mongoengine
|
||||
pip install pymongo==2.8
|
||||
pip install mongoengine
|
||||
|
||||
0.8.7
|
||||
*****
|
||||
|
||||
|
@@ -1,26 +1,36 @@
|
||||
import document
|
||||
from document import *
|
||||
import fields
|
||||
from fields import *
|
||||
import connection
|
||||
from connection import *
|
||||
import queryset
|
||||
from queryset import *
|
||||
import signals
|
||||
from signals import *
|
||||
from errors import *
|
||||
import errors
|
||||
import django
|
||||
# Import submodules so that we can expose their __all__
|
||||
from mongoengine import connection
|
||||
from mongoengine import document
|
||||
from mongoengine import errors
|
||||
from mongoengine import fields
|
||||
from mongoengine import queryset
|
||||
from mongoengine import signals
|
||||
|
||||
__all__ = (list(document.__all__) + fields.__all__ + connection.__all__ +
|
||||
list(queryset.__all__) + signals.__all__ + list(errors.__all__))
|
||||
# Import everything from each submodule so that it can be accessed via
|
||||
# mongoengine, e.g. instead of `from mongoengine.connection import connect`,
|
||||
# users can simply use `from mongoengine import connect`, or even
|
||||
# `from mongoengine import *` and then `connect('testdb')`.
|
||||
from mongoengine.connection import *
|
||||
from mongoengine.document import *
|
||||
from mongoengine.errors import *
|
||||
from mongoengine.fields import *
|
||||
from mongoengine.queryset import *
|
||||
from mongoengine.signals import *
|
||||
|
||||
VERSION = (0, 9, 0)
|
||||
|
||||
__all__ = (list(document.__all__) + list(fields.__all__) +
|
||||
list(connection.__all__) + list(queryset.__all__) +
|
||||
list(signals.__all__) + list(errors.__all__))
|
||||
|
||||
|
||||
VERSION = (0, 11, 0)
|
||||
|
||||
|
||||
def get_version():
|
||||
if isinstance(VERSION[-1], basestring):
|
||||
return '.'.join(map(str, VERSION[:-1])) + VERSION[-1]
|
||||
"""Return the VERSION as a string, e.g. for VERSION == (0, 10, 7),
|
||||
return '0.10.7'.
|
||||
"""
|
||||
return '.'.join(map(str, VERSION))
|
||||
|
||||
|
||||
__version__ = get_version()
|
||||
|
@@ -1,8 +1,28 @@
|
||||
# Base module is split into several files for convenience. Files inside of
|
||||
# this module should import from a specific submodule (e.g.
|
||||
# `from mongoengine.base.document import BaseDocument`), but all of the
|
||||
# other modules should import directly from the top-level module (e.g.
|
||||
# `from mongoengine.base import BaseDocument`). This approach is cleaner and
|
||||
# also helps with cyclical import errors.
|
||||
from mongoengine.base.common import *
|
||||
from mongoengine.base.datastructures import *
|
||||
from mongoengine.base.document import *
|
||||
from mongoengine.base.fields import *
|
||||
from mongoengine.base.metaclasses import *
|
||||
|
||||
# Help with backwards compatibility
|
||||
from mongoengine.errors import *
|
||||
__all__ = (
|
||||
# common
|
||||
'UPDATE_OPERATORS', '_document_registry', 'get_document',
|
||||
|
||||
# datastructures
|
||||
'BaseDict', 'BaseList', 'EmbeddedDocumentList',
|
||||
|
||||
# document
|
||||
'BaseDocument',
|
||||
|
||||
# fields
|
||||
'BaseField', 'ComplexBaseField', 'ObjectIdField', 'GeoJsonBaseField',
|
||||
|
||||
# metaclasses
|
||||
'DocumentMetaclass', 'TopLevelDocumentMetaclass'
|
||||
)
|
||||
|
@@ -1,13 +1,18 @@
|
||||
from mongoengine.errors import NotRegistered
|
||||
|
||||
__all__ = ('ALLOW_INHERITANCE', 'get_document', '_document_registry')
|
||||
__all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry')
|
||||
|
||||
|
||||
UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push',
|
||||
'push_all', 'pull', 'pull_all', 'add_to_set',
|
||||
'set_on_insert', 'min', 'max'])
|
||||
|
||||
ALLOW_INHERITANCE = False
|
||||
|
||||
_document_registry = {}
|
||||
|
||||
|
||||
def get_document(name):
|
||||
"""Get a document class by name."""
|
||||
doc = _document_registry.get(name, None)
|
||||
if not doc:
|
||||
# Possible old style name
|
||||
|
@@ -1,14 +1,16 @@
|
||||
import weakref
|
||||
import functools
|
||||
import itertools
|
||||
import weakref
|
||||
|
||||
import six
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
|
||||
|
||||
__all__ = ("BaseDict", "BaseList", "EmbeddedDocumentList")
|
||||
__all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList')
|
||||
|
||||
|
||||
class BaseDict(dict):
|
||||
"""A special dict so we can watch any changes"""
|
||||
"""A special dict so we can watch any changes."""
|
||||
|
||||
_dereferenced = False
|
||||
_instance = None
|
||||
@@ -21,7 +23,7 @@ class BaseDict(dict):
|
||||
if isinstance(instance, (Document, EmbeddedDocument)):
|
||||
self._instance = weakref.proxy(instance)
|
||||
self._name = name
|
||||
return super(BaseDict, self).__init__(dict_items)
|
||||
super(BaseDict, self).__init__(dict_items)
|
||||
|
||||
def __getitem__(self, key, *args, **kwargs):
|
||||
value = super(BaseDict, self).__getitem__(key)
|
||||
@@ -66,7 +68,7 @@ class BaseDict(dict):
|
||||
|
||||
def clear(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).clear(*args, **kwargs)
|
||||
return super(BaseDict, self).clear()
|
||||
|
||||
def pop(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
@@ -74,7 +76,7 @@ class BaseDict(dict):
|
||||
|
||||
def popitem(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).popitem(*args, **kwargs)
|
||||
return super(BaseDict, self).popitem()
|
||||
|
||||
def setdefault(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
@@ -93,8 +95,7 @@ class BaseDict(dict):
|
||||
|
||||
|
||||
class BaseList(list):
|
||||
"""A special list so we can watch any changes
|
||||
"""
|
||||
"""A special list so we can watch any changes."""
|
||||
|
||||
_dereferenced = False
|
||||
_instance = None
|
||||
@@ -125,6 +126,10 @@ class BaseList(list):
|
||||
value._instance = self._instance
|
||||
return value
|
||||
|
||||
def __iter__(self):
|
||||
for i in xrange(self.__len__()):
|
||||
yield self[i]
|
||||
|
||||
def __setitem__(self, key, value, *args, **kwargs):
|
||||
if isinstance(key, slice):
|
||||
self._mark_as_changed()
|
||||
@@ -156,6 +161,14 @@ class BaseList(list):
|
||||
self = state
|
||||
return self
|
||||
|
||||
def __iadd__(self, other):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__iadd__(other)
|
||||
|
||||
def __imul__(self, other):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__imul__(other)
|
||||
|
||||
def append(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).append(*args, **kwargs)
|
||||
@@ -178,7 +191,7 @@ class BaseList(list):
|
||||
|
||||
def reverse(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).reverse(*args, **kwargs)
|
||||
return super(BaseList, self).reverse()
|
||||
|
||||
def sort(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
@@ -187,7 +200,9 @@ class BaseList(list):
|
||||
def _mark_as_changed(self, key=None):
|
||||
if hasattr(self._instance, '_mark_as_changed'):
|
||||
if key:
|
||||
self._instance._mark_as_changed('%s.%s' % (self._name, key))
|
||||
self._instance._mark_as_changed(
|
||||
'%s.%s' % (self._name, key % len(self))
|
||||
)
|
||||
else:
|
||||
self._instance._mark_as_changed(self._name)
|
||||
|
||||
@@ -195,17 +210,22 @@ class BaseList(list):
|
||||
class EmbeddedDocumentList(BaseList):
|
||||
|
||||
@classmethod
|
||||
def __match_all(cls, i, kwargs):
|
||||
items = kwargs.items()
|
||||
return all([
|
||||
getattr(i, k) == v or str(getattr(i, k)) == v for k, v in items
|
||||
])
|
||||
def __match_all(cls, embedded_doc, kwargs):
|
||||
"""Return True if a given embedded doc matches all the filter
|
||||
kwargs. If it doesn't return False.
|
||||
"""
|
||||
for key, expected_value in kwargs.items():
|
||||
doc_val = getattr(embedded_doc, key)
|
||||
if doc_val != expected_value and six.text_type(doc_val) != expected_value:
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def __only_matches(cls, obj, kwargs):
|
||||
def __only_matches(cls, embedded_docs, kwargs):
|
||||
"""Return embedded docs that match the filter kwargs."""
|
||||
if not kwargs:
|
||||
return obj
|
||||
return filter(lambda i: cls.__match_all(i, kwargs), obj)
|
||||
return embedded_docs
|
||||
return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)]
|
||||
|
||||
def __init__(self, list_items, instance, name):
|
||||
super(EmbeddedDocumentList, self).__init__(list_items, instance, name)
|
||||
@@ -271,18 +291,18 @@ class EmbeddedDocumentList(BaseList):
|
||||
values = self.__only_matches(self, kwargs)
|
||||
if len(values) == 0:
|
||||
raise DoesNotExist(
|
||||
"%s matching query does not exist." % self._name
|
||||
'%s matching query does not exist.' % self._name
|
||||
)
|
||||
elif len(values) > 1:
|
||||
raise MultipleObjectsReturned(
|
||||
"%d items returned, instead of 1" % len(values)
|
||||
'%d items returned, instead of 1' % len(values)
|
||||
)
|
||||
|
||||
return values[0]
|
||||
|
||||
def first(self):
|
||||
"""
|
||||
Returns the first embedded document in the list, or ``None`` if empty.
|
||||
"""Return the first embedded document in the list, or ``None``
|
||||
if empty.
|
||||
"""
|
||||
if len(self) > 0:
|
||||
return self[0]
|
||||
@@ -357,25 +377,31 @@ class StrictDict(object):
|
||||
__slots__ = ()
|
||||
_special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create'])
|
||||
_classes = {}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
for k,v in kwargs.iteritems():
|
||||
for k, v in kwargs.iteritems():
|
||||
setattr(self, k, v)
|
||||
|
||||
def __getitem__(self, key):
|
||||
key = '_reserved_' + key if key in self._special_fields else key
|
||||
try:
|
||||
return getattr(self, key)
|
||||
except AttributeError:
|
||||
raise KeyError(key)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
key = '_reserved_' + key if key in self._special_fields else key
|
||||
return setattr(self, key, value)
|
||||
|
||||
def __contains__(self, key):
|
||||
return hasattr(self, key)
|
||||
|
||||
def get(self, key, default=None):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def pop(self, key, default=None):
|
||||
v = self.get(key, default)
|
||||
try:
|
||||
@@ -383,19 +409,29 @@ class StrictDict(object):
|
||||
except AttributeError:
|
||||
pass
|
||||
return v
|
||||
|
||||
def iteritems(self):
|
||||
for key in self:
|
||||
yield key, self[key]
|
||||
|
||||
def items(self):
|
||||
return [(k, self[k]) for k in iter(self)]
|
||||
|
||||
def iterkeys(self):
|
||||
return iter(self)
|
||||
|
||||
def keys(self):
|
||||
return list(iter(self))
|
||||
|
||||
def __iter__(self):
|
||||
return (key for key in self.__slots__ if hasattr(self, key))
|
||||
|
||||
def __len__(self):
|
||||
return len(list(self.iteritems()))
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.items() == other.items()
|
||||
|
||||
def __neq__(self, other):
|
||||
return self.items() != other.items()
|
||||
|
||||
@@ -406,15 +442,18 @@ class StrictDict(object):
|
||||
if allowed_keys not in cls._classes:
|
||||
class SpecificStrictDict(cls):
|
||||
__slots__ = allowed_keys_tuple
|
||||
|
||||
def __repr__(self):
|
||||
return "{%s}" % ', '.join('"{0!s}": {0!r}'.format(k,v) for (k,v) in self.iteritems())
|
||||
return '{%s}' % ', '.join('"{0!s}": {1!r}'.format(k, v) for k, v in self.items())
|
||||
|
||||
cls._classes[allowed_keys] = SpecificStrictDict
|
||||
return cls._classes[allowed_keys]
|
||||
|
||||
|
||||
class SemiStrictDict(StrictDict):
|
||||
__slots__ = ('_extras')
|
||||
__slots__ = ('_extras', )
|
||||
_classes = {}
|
||||
|
||||
def __getattr__(self, attr):
|
||||
try:
|
||||
super(SemiStrictDict, self).__getattr__(attr)
|
||||
@@ -423,6 +462,7 @@ class SemiStrictDict(StrictDict):
|
||||
return self.__getattribute__('_extras')[attr]
|
||||
except KeyError as e:
|
||||
raise AttributeError(e)
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
try:
|
||||
super(SemiStrictDict, self).__setattr__(attr, value)
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -4,27 +4,25 @@ import weakref
|
||||
|
||||
from bson import DBRef, ObjectId, SON
|
||||
import pymongo
|
||||
import six
|
||||
|
||||
from mongoengine.base.common import UPDATE_OPERATORS
|
||||
from mongoengine.base.datastructures import (BaseDict, BaseList,
|
||||
EmbeddedDocumentList)
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import ValidationError
|
||||
|
||||
from mongoengine.base.common import ALLOW_INHERITANCE
|
||||
from mongoengine.base.datastructures import (
|
||||
BaseDict, BaseList, EmbeddedDocumentList
|
||||
)
|
||||
|
||||
__all__ = ("BaseField", "ComplexBaseField",
|
||||
"ObjectIdField", "GeoJsonBaseField")
|
||||
__all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField',
|
||||
'GeoJsonBaseField')
|
||||
|
||||
|
||||
class BaseField(object):
|
||||
|
||||
"""A base class for fields in a MongoDB document. Instances of this class
|
||||
may be added to subclasses of `Document` to define a document's schema.
|
||||
|
||||
.. versionchanged:: 0.5 - added verbose and help text
|
||||
"""
|
||||
|
||||
name = None
|
||||
_geo_index = False
|
||||
_auto_gen = False # Call `generate` to generate a value
|
||||
@@ -38,8 +36,8 @@ class BaseField(object):
|
||||
|
||||
def __init__(self, db_field=None, name=None, required=False, default=None,
|
||||
unique=False, unique_with=None, primary_key=False,
|
||||
validation=None, choices=None, verbose_name=None,
|
||||
help_text=None, null=False, sparse=False):
|
||||
validation=None, choices=None, null=False, sparse=False,
|
||||
**kwargs):
|
||||
"""
|
||||
:param db_field: The database field to store this field in
|
||||
(defaults to the name of the field)
|
||||
@@ -57,20 +55,20 @@ class BaseField(object):
|
||||
field. Generally this is deprecated in favour of the
|
||||
`FIELD.validate` method
|
||||
:param choices: (optional) The valid choices
|
||||
:param verbose_name: (optional) The verbose name for the field.
|
||||
Designed to be human readable and is often used when generating
|
||||
model forms from the document model.
|
||||
:param help_text: (optional) The help text for this field and is often
|
||||
used when generating model forms from the document model.
|
||||
:param null: (optional) Is the field value can be null. If no and there is a default value
|
||||
then the default value is set
|
||||
:param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False`
|
||||
means that uniqueness won't be enforced for `None` values
|
||||
:param **kwargs: (optional) Arbitrary indirection-free metadata for
|
||||
this field can be supplied as additional keyword arguments and
|
||||
accessed as attributes of the field. Must not conflict with any
|
||||
existing attributes. Common metadata includes `verbose_name` and
|
||||
`help_text`.
|
||||
"""
|
||||
self.db_field = (db_field or name) if not primary_key else '_id'
|
||||
|
||||
if name:
|
||||
msg = "Fields' 'name' attribute deprecated in favour of 'db_field'"
|
||||
msg = 'Field\'s "name" attribute deprecated in favour of "db_field"'
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
self.required = required or primary_key
|
||||
self.default = default
|
||||
@@ -79,10 +77,19 @@ class BaseField(object):
|
||||
self.primary_key = primary_key
|
||||
self.validation = validation
|
||||
self.choices = choices
|
||||
self.verbose_name = verbose_name
|
||||
self.help_text = help_text
|
||||
self.null = null
|
||||
self.sparse = sparse
|
||||
self._owner_document = None
|
||||
|
||||
# Detect and report conflicts between metadata and base properties.
|
||||
conflicts = set(dir(self)) & set(kwargs)
|
||||
if conflicts:
|
||||
raise TypeError('%s already has attribute(s): %s' % (
|
||||
self.__class__.__name__, ', '.join(conflicts)))
|
||||
|
||||
# Assign metadata to the instance
|
||||
# This efficient method is available because no __slots__ are defined.
|
||||
self.__dict__.update(kwargs)
|
||||
|
||||
# Adjust the appropriate creation counter, and save our local copy.
|
||||
if self.db_field == '_id':
|
||||
@@ -106,7 +113,7 @@ class BaseField(object):
|
||||
"""Descriptor for assigning a value to a field in a document.
|
||||
"""
|
||||
|
||||
# If setting to None and theres a default
|
||||
# If setting to None and there is a default
|
||||
# Then set the value to the default value
|
||||
if value is None:
|
||||
if self.null:
|
||||
@@ -121,7 +128,7 @@ class BaseField(object):
|
||||
if (self.name not in instance._data or
|
||||
instance._data[self.name] != value):
|
||||
instance._mark_as_changed(self.name)
|
||||
except:
|
||||
except Exception:
|
||||
# Values cant be compared eg: naive and tz datetimes
|
||||
# So mark it as changed
|
||||
instance._mark_as_changed(self.name)
|
||||
@@ -129,54 +136,71 @@ class BaseField(object):
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
if isinstance(value, EmbeddedDocument):
|
||||
value._instance = weakref.proxy(instance)
|
||||
elif isinstance(value, (list, tuple)):
|
||||
for v in value:
|
||||
if isinstance(v, EmbeddedDocument):
|
||||
v._instance = weakref.proxy(instance)
|
||||
instance._data[self.name] = value
|
||||
|
||||
def error(self, message="", errors=None, field_name=None):
|
||||
"""Raises a ValidationError.
|
||||
"""
|
||||
def error(self, message='', errors=None, field_name=None):
|
||||
"""Raise a ValidationError."""
|
||||
field_name = field_name if field_name else self.name
|
||||
raise ValidationError(message, errors=errors, field_name=field_name)
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert a MongoDB-compatible type to a Python type.
|
||||
"""
|
||||
"""Convert a MongoDB-compatible type to a Python type."""
|
||||
return value
|
||||
|
||||
def to_mongo(self, value):
|
||||
"""Convert a Python type to a MongoDB-compatible type.
|
||||
"""
|
||||
"""Convert a Python type to a MongoDB-compatible type."""
|
||||
return self.to_python(value)
|
||||
|
||||
def _to_mongo_safe_call(self, value, use_db_field=True, fields=None):
|
||||
"""Helper method to call to_mongo with proper inputs."""
|
||||
f_inputs = self.to_mongo.__code__.co_varnames
|
||||
ex_vars = {}
|
||||
if 'fields' in f_inputs:
|
||||
ex_vars['fields'] = fields
|
||||
|
||||
if 'use_db_field' in f_inputs:
|
||||
ex_vars['use_db_field'] = use_db_field
|
||||
|
||||
return self.to_mongo(value, **ex_vars)
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
"""Prepare a value that is being used in a query for PyMongo.
|
||||
"""
|
||||
"""Prepare a value that is being used in a query for PyMongo."""
|
||||
if op in UPDATE_OPERATORS:
|
||||
self.validate(value)
|
||||
return value
|
||||
|
||||
def validate(self, value, clean=True):
|
||||
"""Perform validation on a value.
|
||||
"""
|
||||
"""Perform validation on a value."""
|
||||
pass
|
||||
|
||||
def _validate(self, value, **kwargs):
|
||||
def _validate_choices(self, value):
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
|
||||
choice_list = self.choices
|
||||
if isinstance(choice_list[0], (list, tuple)):
|
||||
choice_list = [k for k, _ in choice_list]
|
||||
|
||||
# Choices which are other types of Documents
|
||||
if isinstance(value, (Document, EmbeddedDocument)):
|
||||
if not any(isinstance(value, c) for c in choice_list):
|
||||
self.error(
|
||||
'Value must be an instance of %s' % (
|
||||
six.text_type(choice_list)
|
||||
)
|
||||
)
|
||||
# Choices which are types other than Documents
|
||||
elif value not in choice_list:
|
||||
self.error('Value must be one of %s' % six.text_type(choice_list))
|
||||
|
||||
def _validate(self, value, **kwargs):
|
||||
# Check the Choices Constraint
|
||||
if self.choices:
|
||||
|
||||
choice_list = self.choices
|
||||
if isinstance(self.choices[0], (list, tuple)):
|
||||
choice_list = [k for k, v in self.choices]
|
||||
|
||||
# Choices which are other types of Documents
|
||||
if isinstance(value, (Document, EmbeddedDocument)):
|
||||
if not any(isinstance(value, c) for c in choice_list):
|
||||
self.error(
|
||||
'Value must be instance of %s' % unicode(choice_list)
|
||||
)
|
||||
# Choices which are types other than Documents
|
||||
elif value not in choice_list:
|
||||
self.error('Value must be one of %s' % unicode(choice_list))
|
||||
self._validate_choices(value)
|
||||
|
||||
# check validation argument
|
||||
if self.validation is not None:
|
||||
@@ -189,9 +213,19 @@ class BaseField(object):
|
||||
|
||||
self.validate(value, **kwargs)
|
||||
|
||||
@property
|
||||
def owner_document(self):
|
||||
return self._owner_document
|
||||
|
||||
def _set_owner_document(self, owner_document):
|
||||
self._owner_document = owner_document
|
||||
|
||||
@owner_document.setter
|
||||
def owner_document(self, owner_document):
|
||||
self._set_owner_document(owner_document)
|
||||
|
||||
|
||||
class ComplexBaseField(BaseField):
|
||||
|
||||
"""Handles complex fields, such as lists / dictionaries.
|
||||
|
||||
Allows for nesting of embedded documents inside complex types.
|
||||
@@ -204,8 +238,7 @@ class ComplexBaseField(BaseField):
|
||||
field = None
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
"""Descriptor to automatically dereference references.
|
||||
"""
|
||||
"""Descriptor to automatically dereference references."""
|
||||
if instance is None:
|
||||
# Document class being used rather than a document object
|
||||
return self
|
||||
@@ -217,7 +250,7 @@ class ComplexBaseField(BaseField):
|
||||
(self.field is None or isinstance(self.field,
|
||||
(GenericReferenceField, ReferenceField))))
|
||||
|
||||
_dereference = _import_class("DeReference")()
|
||||
_dereference = _import_class('DeReference')()
|
||||
|
||||
self._auto_dereference = instance._fields[self.name]._auto_dereference
|
||||
if instance._initialised and dereference and instance._data.get(self.name):
|
||||
@@ -241,8 +274,8 @@ class ComplexBaseField(BaseField):
|
||||
instance._data[self.name] = value
|
||||
|
||||
if (self._auto_dereference and instance._initialised and
|
||||
isinstance(value, (BaseList, BaseDict))
|
||||
and not value._dereferenced):
|
||||
isinstance(value, (BaseList, BaseDict)) and
|
||||
not value._dereferenced):
|
||||
value = _dereference(
|
||||
value, max_depth=1, instance=instance, name=self.name
|
||||
)
|
||||
@@ -252,11 +285,8 @@ class ComplexBaseField(BaseField):
|
||||
return value
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert a MongoDB-compatible type to a Python type.
|
||||
"""
|
||||
Document = _import_class('Document')
|
||||
|
||||
if isinstance(value, basestring):
|
||||
"""Convert a MongoDB-compatible type to a Python type."""
|
||||
if isinstance(value, six.string_types):
|
||||
return value
|
||||
|
||||
if hasattr(value, 'to_python'):
|
||||
@@ -266,14 +296,16 @@ class ComplexBaseField(BaseField):
|
||||
if not hasattr(value, 'items'):
|
||||
try:
|
||||
is_list = True
|
||||
value = dict([(k, v) for k, v in enumerate(value)])
|
||||
value = {k: v for k, v in enumerate(value)}
|
||||
except TypeError: # Not iterable return the value
|
||||
return value
|
||||
|
||||
if self.field:
|
||||
value_dict = dict([(key, self.field.to_python(item))
|
||||
for key, item in value.items()])
|
||||
self.field._auto_dereference = self._auto_dereference
|
||||
value_dict = {key: self.field.to_python(item)
|
||||
for key, item in value.items()}
|
||||
else:
|
||||
Document = _import_class('Document')
|
||||
value_dict = {}
|
||||
for k, v in value.items():
|
||||
if isinstance(v, Document):
|
||||
@@ -289,27 +321,26 @@ class ComplexBaseField(BaseField):
|
||||
value_dict[k] = self.to_python(v)
|
||||
|
||||
if is_list: # Convert back to a list
|
||||
return [v for k, v in sorted(value_dict.items(),
|
||||
return [v for _, v in sorted(value_dict.items(),
|
||||
key=operator.itemgetter(0))]
|
||||
return value_dict
|
||||
|
||||
def to_mongo(self, value):
|
||||
"""Convert a Python type to a MongoDB-compatible type.
|
||||
"""
|
||||
Document = _import_class("Document")
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
GenericReferenceField = _import_class("GenericReferenceField")
|
||||
def to_mongo(self, value, use_db_field=True, fields=None):
|
||||
"""Convert a Python type to a MongoDB-compatible type."""
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
|
||||
if isinstance(value, basestring):
|
||||
if isinstance(value, six.string_types):
|
||||
return value
|
||||
|
||||
if hasattr(value, 'to_mongo'):
|
||||
if isinstance(value, Document):
|
||||
return GenericReferenceField().to_mongo(value)
|
||||
cls = value.__class__
|
||||
val = value.to_mongo()
|
||||
# If we its a document thats not inherited add _cls
|
||||
if (isinstance(value, EmbeddedDocument)):
|
||||
val = value.to_mongo(use_db_field, fields)
|
||||
# If it's a document that is not inherited add _cls
|
||||
if isinstance(value, EmbeddedDocument):
|
||||
val['_cls'] = cls.__name__
|
||||
return val
|
||||
|
||||
@@ -317,13 +348,15 @@ class ComplexBaseField(BaseField):
|
||||
if not hasattr(value, 'items'):
|
||||
try:
|
||||
is_list = True
|
||||
value = dict([(k, v) for k, v in enumerate(value)])
|
||||
value = {k: v for k, v in enumerate(value)}
|
||||
except TypeError: # Not iterable return the value
|
||||
return value
|
||||
|
||||
if self.field:
|
||||
value_dict = dict([(key, self.field.to_mongo(item))
|
||||
for key, item in value.iteritems()])
|
||||
value_dict = {
|
||||
key: self.field._to_mongo_safe_call(item, use_db_field, fields)
|
||||
for key, item in value.iteritems()
|
||||
}
|
||||
else:
|
||||
value_dict = {}
|
||||
for k, v in value.iteritems():
|
||||
@@ -337,9 +370,7 @@ class ComplexBaseField(BaseField):
|
||||
# any _cls data so make it a generic reference allows
|
||||
# us to dereference
|
||||
meta = getattr(v, '_meta', {})
|
||||
allow_inheritance = (
|
||||
meta.get('allow_inheritance', ALLOW_INHERITANCE)
|
||||
is True)
|
||||
allow_inheritance = meta.get('allow_inheritance')
|
||||
if not allow_inheritance and not self.field:
|
||||
value_dict[k] = GenericReferenceField().to_mongo(v)
|
||||
else:
|
||||
@@ -347,22 +378,21 @@ class ComplexBaseField(BaseField):
|
||||
value_dict[k] = DBRef(collection, v.pk)
|
||||
elif hasattr(v, 'to_mongo'):
|
||||
cls = v.__class__
|
||||
val = v.to_mongo()
|
||||
# If we its a document thats not inherited add _cls
|
||||
if (isinstance(v, (Document, EmbeddedDocument))):
|
||||
val = v.to_mongo(use_db_field, fields)
|
||||
# If it's a document that is not inherited add _cls
|
||||
if isinstance(v, (Document, EmbeddedDocument)):
|
||||
val['_cls'] = cls.__name__
|
||||
value_dict[k] = val
|
||||
else:
|
||||
value_dict[k] = self.to_mongo(v)
|
||||
value_dict[k] = self.to_mongo(v, use_db_field, fields)
|
||||
|
||||
if is_list: # Convert back to a list
|
||||
return [v for k, v in sorted(value_dict.items(),
|
||||
return [v for _, v in sorted(value_dict.items(),
|
||||
key=operator.itemgetter(0))]
|
||||
return value_dict
|
||||
|
||||
def validate(self, value):
|
||||
"""If field is provided ensure the value is valid.
|
||||
"""
|
||||
"""If field is provided ensure the value is valid."""
|
||||
errors = {}
|
||||
if self.field:
|
||||
if hasattr(value, 'iteritems') or hasattr(value, 'items'):
|
||||
@@ -372,9 +402,9 @@ class ComplexBaseField(BaseField):
|
||||
for k, v in sequence:
|
||||
try:
|
||||
self.field._validate(v)
|
||||
except ValidationError, error:
|
||||
except ValidationError as error:
|
||||
errors[k] = error.errors or error
|
||||
except (ValueError, AssertionError), error:
|
||||
except (ValueError, AssertionError) as error:
|
||||
errors[k] = error
|
||||
|
||||
if errors:
|
||||
@@ -398,29 +428,25 @@ class ComplexBaseField(BaseField):
|
||||
self.field.owner_document = owner_document
|
||||
self._owner_document = owner_document
|
||||
|
||||
def _get_owner_document(self, owner_document):
|
||||
self._owner_document = owner_document
|
||||
|
||||
owner_document = property(_get_owner_document, _set_owner_document)
|
||||
|
||||
|
||||
class ObjectIdField(BaseField):
|
||||
|
||||
"""A field wrapper around MongoDB's ObjectIds.
|
||||
"""
|
||||
"""A field wrapper around MongoDB's ObjectIds."""
|
||||
|
||||
def to_python(self, value):
|
||||
if not isinstance(value, ObjectId):
|
||||
value = ObjectId(value)
|
||||
try:
|
||||
if not isinstance(value, ObjectId):
|
||||
value = ObjectId(value)
|
||||
except Exception:
|
||||
pass
|
||||
return value
|
||||
|
||||
def to_mongo(self, value):
|
||||
if not isinstance(value, ObjectId):
|
||||
try:
|
||||
return ObjectId(unicode(value))
|
||||
except Exception, e:
|
||||
return ObjectId(six.text_type(value))
|
||||
except Exception as e:
|
||||
# e.message attribute has been deprecated since Python 2.6
|
||||
self.error(unicode(e))
|
||||
self.error(six.text_type(e))
|
||||
return value
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
@@ -428,34 +454,32 @@ class ObjectIdField(BaseField):
|
||||
|
||||
def validate(self, value):
|
||||
try:
|
||||
ObjectId(unicode(value))
|
||||
except:
|
||||
ObjectId(six.text_type(value))
|
||||
except Exception:
|
||||
self.error('Invalid Object ID')
|
||||
|
||||
|
||||
class GeoJsonBaseField(BaseField):
|
||||
|
||||
"""A geo json field storing a geojson style object.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
|
||||
_geo_index = pymongo.GEOSPHERE
|
||||
_type = "GeoBase"
|
||||
_type = 'GeoBase'
|
||||
|
||||
def __init__(self, auto_index=True, *args, **kwargs):
|
||||
"""
|
||||
:param bool auto_index: Automatically create a "2dsphere" index.\
|
||||
:param bool auto_index: Automatically create a '2dsphere' index.\
|
||||
Defaults to `True`.
|
||||
"""
|
||||
self._name = "%sField" % self._type
|
||||
self._name = '%sField' % self._type
|
||||
if not auto_index:
|
||||
self._geo_index = False
|
||||
super(GeoJsonBaseField, self).__init__(*args, **kwargs)
|
||||
|
||||
def validate(self, value):
|
||||
"""Validate the GeoJson object based on its type
|
||||
"""
|
||||
"""Validate the GeoJson object based on its type."""
|
||||
if isinstance(value, dict):
|
||||
if set(value.keys()) == set(['type', 'coordinates']):
|
||||
if value['type'] != self._type:
|
||||
@@ -470,7 +494,7 @@ class GeoJsonBaseField(BaseField):
|
||||
self.error('%s can only accept lists of [x, y]' % self._name)
|
||||
return
|
||||
|
||||
validate = getattr(self, "_validate_%s" % self._type.lower())
|
||||
validate = getattr(self, '_validate_%s' % self._type.lower())
|
||||
error = validate(value)
|
||||
if error:
|
||||
self.error(error)
|
||||
@@ -482,8 +506,8 @@ class GeoJsonBaseField(BaseField):
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0]
|
||||
except:
|
||||
return "Invalid Polygon must contain at least one valid linestring"
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid Polygon must contain at least one valid linestring'
|
||||
|
||||
errors = []
|
||||
for val in value:
|
||||
@@ -494,20 +518,20 @@ class GeoJsonBaseField(BaseField):
|
||||
errors.append(error)
|
||||
if errors:
|
||||
if top_level:
|
||||
return "Invalid Polygon:\n%s" % ", ".join(errors)
|
||||
return 'Invalid Polygon:\n%s' % ', '.join(errors)
|
||||
else:
|
||||
return "%s" % ", ".join(errors)
|
||||
return '%s' % ', '.join(errors)
|
||||
|
||||
def _validate_linestring(self, value, top_level=True):
|
||||
"""Validates a linestring"""
|
||||
"""Validate a linestring."""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'LineStrings must contain list of coordinate pairs'
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0]
|
||||
except:
|
||||
return "Invalid LineString must contain at least one valid point"
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid LineString must contain at least one valid point'
|
||||
|
||||
errors = []
|
||||
for val in value:
|
||||
@@ -516,19 +540,19 @@ class GeoJsonBaseField(BaseField):
|
||||
errors.append(error)
|
||||
if errors:
|
||||
if top_level:
|
||||
return "Invalid LineString:\n%s" % ", ".join(errors)
|
||||
return 'Invalid LineString:\n%s' % ', '.join(errors)
|
||||
else:
|
||||
return "%s" % ", ".join(errors)
|
||||
return '%s' % ', '.join(errors)
|
||||
|
||||
def _validate_point(self, value):
|
||||
"""Validate each set of coords"""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'Points must be a list of coordinate pairs'
|
||||
elif not len(value) == 2:
|
||||
return "Value (%s) must be a two-dimensional point" % repr(value)
|
||||
return 'Value (%s) must be a two-dimensional point' % repr(value)
|
||||
elif (not isinstance(value[0], (float, int)) or
|
||||
not isinstance(value[1], (float, int))):
|
||||
return "Both values (%s) in point must be float or int" % repr(value)
|
||||
return 'Both values (%s) in point must be float or int' % repr(value)
|
||||
|
||||
def _validate_multipoint(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
@@ -537,8 +561,8 @@ class GeoJsonBaseField(BaseField):
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0]
|
||||
except:
|
||||
return "Invalid MultiPoint must contain at least one valid point"
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid MultiPoint must contain at least one valid point'
|
||||
|
||||
errors = []
|
||||
for point in value:
|
||||
@@ -547,7 +571,7 @@ class GeoJsonBaseField(BaseField):
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
return "%s" % ", ".join(errors)
|
||||
return '%s' % ', '.join(errors)
|
||||
|
||||
def _validate_multilinestring(self, value, top_level=True):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
@@ -556,8 +580,8 @@ class GeoJsonBaseField(BaseField):
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0]
|
||||
except:
|
||||
return "Invalid MultiLineString must contain at least one valid linestring"
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid MultiLineString must contain at least one valid linestring'
|
||||
|
||||
errors = []
|
||||
for linestring in value:
|
||||
@@ -567,9 +591,9 @@ class GeoJsonBaseField(BaseField):
|
||||
|
||||
if errors:
|
||||
if top_level:
|
||||
return "Invalid MultiLineString:\n%s" % ", ".join(errors)
|
||||
return 'Invalid MultiLineString:\n%s' % ', '.join(errors)
|
||||
else:
|
||||
return "%s" % ", ".join(errors)
|
||||
return '%s' % ', '.join(errors)
|
||||
|
||||
def _validate_multipolygon(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
@@ -578,8 +602,8 @@ class GeoJsonBaseField(BaseField):
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0][0]
|
||||
except:
|
||||
return "Invalid MultiPolygon must contain at least one valid Polygon"
|
||||
except (TypeError, IndexError):
|
||||
return 'Invalid MultiPolygon must contain at least one valid Polygon'
|
||||
|
||||
errors = []
|
||||
for polygon in value:
|
||||
@@ -588,9 +612,9 @@ class GeoJsonBaseField(BaseField):
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
return "Invalid MultiPolygon:\n%s" % ", ".join(errors)
|
||||
return 'Invalid MultiPolygon:\n%s' % ', '.join(errors)
|
||||
|
||||
def to_mongo(self, value):
|
||||
if isinstance(value, dict):
|
||||
return value
|
||||
return SON([("type", self._type), ("coordinates", value)])
|
||||
return SON([('type', self._type), ('coordinates', value)])
|
||||
|
@@ -1,25 +1,23 @@
|
||||
import warnings
|
||||
|
||||
import pymongo
|
||||
import six
|
||||
|
||||
from mongoengine.base.common import _document_registry
|
||||
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import InvalidDocumentError
|
||||
from mongoengine.python_support import PY3
|
||||
from mongoengine.queryset import (DO_NOTHING, DoesNotExist,
|
||||
MultipleObjectsReturned,
|
||||
QuerySet, QuerySetManager)
|
||||
QuerySetManager)
|
||||
|
||||
from mongoengine.base.common import _document_registry, ALLOW_INHERITANCE
|
||||
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
|
||||
|
||||
__all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass')
|
||||
|
||||
|
||||
class DocumentMetaclass(type):
|
||||
"""Metaclass for all documents."""
|
||||
|
||||
"""Metaclass for all documents.
|
||||
"""
|
||||
|
||||
# TODO lower complexity of this method
|
||||
def __new__(cls, name, bases, attrs):
|
||||
flattened_bases = cls._get_bases(bases)
|
||||
super_new = super(DocumentMetaclass, cls).__new__
|
||||
@@ -48,7 +46,8 @@ class DocumentMetaclass(type):
|
||||
attrs['_meta'] = meta
|
||||
attrs['_meta']['abstract'] = False # 789: EmbeddedDocument shouldn't inherit abstract
|
||||
|
||||
if attrs['_meta'].get('allow_inheritance', ALLOW_INHERITANCE):
|
||||
# If allow_inheritance is True, add a "_cls" string field to the attrs
|
||||
if attrs['_meta'].get('allow_inheritance'):
|
||||
StringField = _import_class('StringField')
|
||||
attrs['_cls'] = StringField()
|
||||
|
||||
@@ -90,16 +89,17 @@ class DocumentMetaclass(type):
|
||||
# Ensure no duplicate db_fields
|
||||
duplicate_db_fields = [k for k, v in field_names.items() if v > 1]
|
||||
if duplicate_db_fields:
|
||||
msg = ("Multiple db_fields defined for: %s " %
|
||||
", ".join(duplicate_db_fields))
|
||||
msg = ('Multiple db_fields defined for: %s ' %
|
||||
', '.join(duplicate_db_fields))
|
||||
raise InvalidDocumentError(msg)
|
||||
|
||||
# Set _fields and db_field maps
|
||||
attrs['_fields'] = doc_fields
|
||||
attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k))
|
||||
for k, v in doc_fields.iteritems()])
|
||||
attrs['_reverse_db_field_map'] = dict(
|
||||
(v, k) for k, v in attrs['_db_field_map'].iteritems())
|
||||
attrs['_db_field_map'] = {k: getattr(v, 'db_field', k)
|
||||
for k, v in doc_fields.items()}
|
||||
attrs['_reverse_db_field_map'] = {
|
||||
v: k for k, v in attrs['_db_field_map'].items()
|
||||
}
|
||||
|
||||
attrs['_fields_ordered'] = tuple(i[1] for i in sorted(
|
||||
(v.creation_counter, v.name)
|
||||
@@ -113,16 +113,14 @@ class DocumentMetaclass(type):
|
||||
for base in flattened_bases:
|
||||
if (not getattr(base, '_is_base_cls', True) and
|
||||
not getattr(base, '_meta', {}).get('abstract', True)):
|
||||
# Collate heirarchy for _cls and _subclasses
|
||||
# Collate hierarchy for _cls and _subclasses
|
||||
class_name.append(base.__name__)
|
||||
|
||||
if hasattr(base, '_meta'):
|
||||
# Warn if allow_inheritance isn't set and prevent
|
||||
# inheritance of classes where inheritance is set to False
|
||||
allow_inheritance = base._meta.get('allow_inheritance',
|
||||
ALLOW_INHERITANCE)
|
||||
if (allow_inheritance is not True and
|
||||
not base._meta.get('abstract')):
|
||||
allow_inheritance = base._meta.get('allow_inheritance')
|
||||
if not allow_inheritance and not base._meta.get('abstract'):
|
||||
raise ValueError('Document %s may not be subclassed' %
|
||||
base.__name__)
|
||||
|
||||
@@ -146,7 +144,7 @@ class DocumentMetaclass(type):
|
||||
for base in document_bases:
|
||||
if _cls not in base._subclasses:
|
||||
base._subclasses += (_cls,)
|
||||
base._types = base._subclasses # TODO depreciate _types
|
||||
base._types = base._subclasses # TODO depreciate _types
|
||||
|
||||
(Document, EmbeddedDocument, DictField,
|
||||
CachedReferenceField) = cls._import_classes()
|
||||
@@ -164,8 +162,8 @@ class DocumentMetaclass(type):
|
||||
# module continues to use im_func and im_self, so the code below
|
||||
# copies __func__ into im_func and __self__ into im_self for
|
||||
# classmethod objects in Document derived classes.
|
||||
if PY3:
|
||||
for key, val in new_class.__dict__.items():
|
||||
if six.PY3:
|
||||
for val in new_class.__dict__.values():
|
||||
if isinstance(val, classmethod):
|
||||
f = val.__get__(new_class)
|
||||
if hasattr(f, '__func__') and not hasattr(f, 'im_func'):
|
||||
@@ -176,16 +174,17 @@ class DocumentMetaclass(type):
|
||||
# Handle delete rules
|
||||
for field in new_class._fields.itervalues():
|
||||
f = field
|
||||
f.owner_document = new_class
|
||||
if f.owner_document is None:
|
||||
f.owner_document = new_class
|
||||
delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING)
|
||||
if isinstance(f, CachedReferenceField):
|
||||
|
||||
if issubclass(new_class, EmbeddedDocument):
|
||||
raise InvalidDocumentError(
|
||||
"CachedReferenceFields is not allowed in EmbeddedDocuments")
|
||||
raise InvalidDocumentError('CachedReferenceFields is not '
|
||||
'allowed in EmbeddedDocuments')
|
||||
if not f.document_type:
|
||||
raise InvalidDocumentError(
|
||||
"Document is not avaiable to sync")
|
||||
'Document is not available to sync')
|
||||
|
||||
if f.auto_sync:
|
||||
f.start_listener()
|
||||
@@ -197,8 +196,8 @@ class DocumentMetaclass(type):
|
||||
'reverse_delete_rule',
|
||||
DO_NOTHING)
|
||||
if isinstance(f, DictField) and delete_rule != DO_NOTHING:
|
||||
msg = ("Reverse delete rules are not supported "
|
||||
"for %s (field: %s)" %
|
||||
msg = ('Reverse delete rules are not supported '
|
||||
'for %s (field: %s)' %
|
||||
(field.__class__.__name__, field.name))
|
||||
raise InvalidDocumentError(msg)
|
||||
|
||||
@@ -206,16 +205,16 @@ class DocumentMetaclass(type):
|
||||
|
||||
if delete_rule != DO_NOTHING:
|
||||
if issubclass(new_class, EmbeddedDocument):
|
||||
msg = ("Reverse delete rules are not supported for "
|
||||
"EmbeddedDocuments (field: %s)" % field.name)
|
||||
msg = ('Reverse delete rules are not supported for '
|
||||
'EmbeddedDocuments (field: %s)' % field.name)
|
||||
raise InvalidDocumentError(msg)
|
||||
f.document_type.register_delete_rule(new_class,
|
||||
field.name, delete_rule)
|
||||
|
||||
if (field.name and hasattr(Document, field.name) and
|
||||
EmbeddedDocument not in new_class.mro()):
|
||||
msg = ("%s is a document method and not a valid "
|
||||
"field name" % field.name)
|
||||
msg = ('%s is a document method and not a valid '
|
||||
'field name' % field.name)
|
||||
raise InvalidDocumentError(msg)
|
||||
|
||||
return new_class
|
||||
@@ -247,11 +246,10 @@ class DocumentMetaclass(type):
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
DictField = _import_class('DictField')
|
||||
CachedReferenceField = _import_class('CachedReferenceField')
|
||||
return (Document, EmbeddedDocument, DictField, CachedReferenceField)
|
||||
return Document, EmbeddedDocument, DictField, CachedReferenceField
|
||||
|
||||
|
||||
class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
"""Metaclass for top-level documents (i.e. documents that have their own
|
||||
collection in the database.
|
||||
"""
|
||||
@@ -261,7 +259,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
super_new = super(TopLevelDocumentMetaclass, cls).__new__
|
||||
|
||||
# Set default _meta data if base class, otherwise get user defined meta
|
||||
if (attrs.get('my_metaclass') == TopLevelDocumentMetaclass):
|
||||
if attrs.get('my_metaclass') == TopLevelDocumentMetaclass:
|
||||
# defaults
|
||||
attrs['_meta'] = {
|
||||
'abstract': True,
|
||||
@@ -274,13 +272,18 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
'index_drop_dups': False,
|
||||
'index_opts': None,
|
||||
'delete_rules': None,
|
||||
|
||||
# allow_inheritance can be True, False, and None. True means
|
||||
# "allow inheritance", False means "don't allow inheritance",
|
||||
# None means "do whatever your parent does, or don't allow
|
||||
# inheritance if you're a top-level class".
|
||||
'allow_inheritance': None,
|
||||
}
|
||||
attrs['_is_base_cls'] = True
|
||||
attrs['_meta'].update(attrs.get('meta', {}))
|
||||
else:
|
||||
attrs['_meta'] = attrs.get('meta', {})
|
||||
# Explictly set abstract to false unless set
|
||||
# Explicitly set abstract to false unless set
|
||||
attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False)
|
||||
attrs['_is_base_cls'] = False
|
||||
|
||||
@@ -295,7 +298,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
# Clean up top level meta
|
||||
if 'meta' in attrs:
|
||||
del(attrs['meta'])
|
||||
del attrs['meta']
|
||||
|
||||
# Find the parent document class
|
||||
parent_doc_cls = [b for b in flattened_bases
|
||||
@@ -304,17 +307,17 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
# Prevent classes setting collection different to their parents
|
||||
# If parent wasn't an abstract class
|
||||
if (parent_doc_cls and 'collection' in attrs.get('_meta', {})
|
||||
and not parent_doc_cls._meta.get('abstract', True)):
|
||||
msg = "Trying to set a collection on a subclass (%s)" % name
|
||||
if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and
|
||||
not parent_doc_cls._meta.get('abstract', True)):
|
||||
msg = 'Trying to set a collection on a subclass (%s)' % name
|
||||
warnings.warn(msg, SyntaxWarning)
|
||||
del(attrs['_meta']['collection'])
|
||||
del attrs['_meta']['collection']
|
||||
|
||||
# Ensure abstract documents have abstract bases
|
||||
if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
|
||||
if (parent_doc_cls and
|
||||
not parent_doc_cls._meta.get('abstract', False)):
|
||||
msg = "Abstract document cannot have non-abstract base"
|
||||
msg = 'Abstract document cannot have non-abstract base'
|
||||
raise ValueError(msg)
|
||||
return super_new(cls, name, bases, attrs)
|
||||
|
||||
@@ -337,12 +340,16 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
meta.merge(attrs.get('_meta', {})) # Top level meta
|
||||
|
||||
# Only simple classes (direct subclasses of Document)
|
||||
# may set allow_inheritance to False
|
||||
# Only simple classes (i.e. direct subclasses of Document) may set
|
||||
# allow_inheritance to False. If the base Document allows inheritance,
|
||||
# none of its subclasses can override allow_inheritance to False.
|
||||
simple_class = all([b._meta.get('abstract')
|
||||
for b in flattened_bases if hasattr(b, '_meta')])
|
||||
if (not simple_class and meta['allow_inheritance'] is False and
|
||||
not meta['abstract']):
|
||||
if (
|
||||
not simple_class and
|
||||
meta['allow_inheritance'] is False and
|
||||
not meta['abstract']
|
||||
):
|
||||
raise ValueError('Only direct subclasses of Document may set '
|
||||
'"allow_inheritance" to False')
|
||||
|
||||
@@ -386,15 +393,17 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
new_class._auto_id_field = getattr(parent_doc_cls,
|
||||
'_auto_id_field', False)
|
||||
if not new_class._meta.get('id_field'):
|
||||
# After 0.10, find not existing names, instead of overwriting
|
||||
id_name, id_db_name = cls.get_auto_id_names(new_class)
|
||||
new_class._auto_id_field = True
|
||||
new_class._meta['id_field'] = 'id'
|
||||
new_class._fields['id'] = ObjectIdField(db_field='_id')
|
||||
new_class._fields['id'].name = 'id'
|
||||
new_class.id = new_class._fields['id']
|
||||
|
||||
# Prepend id field to _fields_ordered
|
||||
if 'id' in new_class._fields and 'id' not in new_class._fields_ordered:
|
||||
new_class._fields_ordered = ('id', ) + new_class._fields_ordered
|
||||
new_class._meta['id_field'] = id_name
|
||||
new_class._fields[id_name] = ObjectIdField(db_field=id_db_name)
|
||||
new_class._fields[id_name].name = id_name
|
||||
new_class.id = new_class._fields[id_name]
|
||||
new_class._db_field_map[id_name] = id_db_name
|
||||
new_class._reverse_db_field_map[id_db_name] = id_name
|
||||
# Prepend id field to _fields_ordered
|
||||
new_class._fields_ordered = (id_name, ) + new_class._fields_ordered
|
||||
|
||||
# Merge in exceptions with parent hierarchy
|
||||
exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned)
|
||||
@@ -409,9 +418,22 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
return new_class
|
||||
|
||||
@classmethod
|
||||
def get_auto_id_names(cls, new_class):
|
||||
id_name, id_db_name = ('id', '_id')
|
||||
if id_name not in new_class._fields and \
|
||||
id_db_name not in (v.db_field for v in new_class._fields.values()):
|
||||
return id_name, id_db_name
|
||||
id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0
|
||||
while id_name in new_class._fields or \
|
||||
id_db_name in (v.db_field for v in new_class._fields.values()):
|
||||
id_name = '{0}_{1}'.format(id_basename, i)
|
||||
id_db_name = '{0}_{1}'.format(id_db_basename, i)
|
||||
i += 1
|
||||
return id_name, id_db_name
|
||||
|
||||
|
||||
class MetaDict(dict):
|
||||
|
||||
"""Custom dictionary for meta classes.
|
||||
Handles the merging of set indexes
|
||||
"""
|
||||
@@ -426,6 +448,5 @@ class MetaDict(dict):
|
||||
|
||||
|
||||
class BasesTuple(tuple):
|
||||
|
||||
"""Special class to handle introspection of bases tuple in __new__"""
|
||||
pass
|
||||
|
@@ -1,15 +1,25 @@
|
||||
import pymongo
|
||||
from pymongo import MongoClient, MongoReplicaSetClient, uri_parser
|
||||
from pymongo import MongoClient, ReadPreference, uri_parser
|
||||
import six
|
||||
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
|
||||
__all__ = ['ConnectionError', 'connect', 'register_connection',
|
||||
__all__ = ['MongoEngineConnectionError', 'connect', 'register_connection',
|
||||
'DEFAULT_CONNECTION_NAME']
|
||||
|
||||
|
||||
DEFAULT_CONNECTION_NAME = 'default'
|
||||
|
||||
if IS_PYMONGO_3:
|
||||
READ_PREFERENCE = ReadPreference.PRIMARY
|
||||
else:
|
||||
from pymongo import MongoReplicaSetClient
|
||||
READ_PREFERENCE = False
|
||||
|
||||
class ConnectionError(Exception):
|
||||
|
||||
class MongoEngineConnectionError(Exception):
|
||||
"""Error raised when the database connection can't be established or
|
||||
when a connection with a requested alias can't be retrieved.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@@ -19,8 +29,10 @@ _dbs = {}
|
||||
|
||||
|
||||
def register_connection(alias, name=None, host=None, port=None,
|
||||
read_preference=False,
|
||||
username=None, password=None, authentication_source=None,
|
||||
read_preference=READ_PREFERENCE,
|
||||
username=None, password=None,
|
||||
authentication_source=None,
|
||||
authentication_mechanism=None,
|
||||
**kwargs):
|
||||
"""Add a connection.
|
||||
|
||||
@@ -34,11 +46,15 @@ def register_connection(alias, name=None, host=None, port=None,
|
||||
:param username: username to authenticate with
|
||||
:param password: password to authenticate with
|
||||
:param authentication_source: database to authenticate against
|
||||
:param authentication_mechanism: database authentication mechanisms.
|
||||
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
|
||||
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
|
||||
:param is_mock: explicitly use mongomock for this connection
|
||||
(can also be done by using `mongomock://` as db host prefix)
|
||||
:param kwargs: allow ad-hoc parameters to be passed into the pymongo driver
|
||||
|
||||
.. versionchanged:: 0.10.6 - added mongomock support
|
||||
"""
|
||||
global _connection_settings
|
||||
|
||||
conn_settings = {
|
||||
'name': name or 'test',
|
||||
'host': host or 'localhost',
|
||||
@@ -46,20 +62,48 @@ def register_connection(alias, name=None, host=None, port=None,
|
||||
'read_preference': read_preference,
|
||||
'username': username,
|
||||
'password': password,
|
||||
'authentication_source': authentication_source
|
||||
'authentication_source': authentication_source,
|
||||
'authentication_mechanism': authentication_mechanism
|
||||
}
|
||||
|
||||
# Handle uri style connections
|
||||
if "://" in conn_settings['host']:
|
||||
uri_dict = uri_parser.parse_uri(conn_settings['host'])
|
||||
conn_settings.update({
|
||||
'name': uri_dict.get('database') or name,
|
||||
'username': uri_dict.get('username'),
|
||||
'password': uri_dict.get('password'),
|
||||
'read_preference': read_preference,
|
||||
})
|
||||
if "replicaSet" in conn_settings['host']:
|
||||
conn_settings['replicaSet'] = True
|
||||
conn_host = conn_settings['host']
|
||||
# host can be a list or a string, so if string, force to a list
|
||||
if isinstance(conn_host, six.string_types):
|
||||
conn_host = [conn_host]
|
||||
|
||||
resolved_hosts = []
|
||||
for entity in conn_host:
|
||||
|
||||
# Handle Mongomock
|
||||
if entity.startswith('mongomock://'):
|
||||
conn_settings['is_mock'] = True
|
||||
# `mongomock://` is not a valid url prefix and must be replaced by `mongodb://`
|
||||
resolved_hosts.append(entity.replace('mongomock://', 'mongodb://', 1))
|
||||
|
||||
# Handle URI style connections, only updating connection params which
|
||||
# were explicitly specified in the URI.
|
||||
elif '://' in entity:
|
||||
uri_dict = uri_parser.parse_uri(entity)
|
||||
resolved_hosts.append(entity)
|
||||
|
||||
if uri_dict.get('database'):
|
||||
conn_settings['name'] = uri_dict.get('database')
|
||||
|
||||
for param in ('read_preference', 'username', 'password'):
|
||||
if uri_dict.get(param):
|
||||
conn_settings[param] = uri_dict[param]
|
||||
|
||||
uri_options = uri_dict['options']
|
||||
if 'replicaset' in uri_options:
|
||||
conn_settings['replicaSet'] = True
|
||||
if 'authsource' in uri_options:
|
||||
conn_settings['authentication_source'] = uri_options['authsource']
|
||||
if 'authmechanism' in uri_options:
|
||||
conn_settings['authentication_mechanism'] = uri_options['authmechanism']
|
||||
else:
|
||||
resolved_hosts.append(entity)
|
||||
conn_settings['host'] = resolved_hosts
|
||||
|
||||
# Deprecated parameters that should not be passed on
|
||||
kwargs.pop('slaves', None)
|
||||
@@ -70,65 +114,109 @@ def register_connection(alias, name=None, host=None, port=None,
|
||||
|
||||
|
||||
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
||||
global _connections
|
||||
global _dbs
|
||||
|
||||
"""Close the connection with a given alias."""
|
||||
if alias in _connections:
|
||||
get_connection(alias=alias).disconnect()
|
||||
get_connection(alias=alias).close()
|
||||
del _connections[alias]
|
||||
if alias in _dbs:
|
||||
del _dbs[alias]
|
||||
|
||||
|
||||
def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
global _connections
|
||||
"""Return a connection with a given alias."""
|
||||
|
||||
# Connect to the database if not already connected
|
||||
if reconnect:
|
||||
disconnect(alias)
|
||||
|
||||
if alias not in _connections:
|
||||
if alias not in _connection_settings:
|
||||
# If the requested alias already exists in the _connections list, return
|
||||
# it immediately.
|
||||
if alias in _connections:
|
||||
return _connections[alias]
|
||||
|
||||
# Validate that the requested alias exists in the _connection_settings.
|
||||
# Raise MongoEngineConnectionError if it doesn't.
|
||||
if alias not in _connection_settings:
|
||||
if alias == DEFAULT_CONNECTION_NAME:
|
||||
msg = 'You have not defined a default connection'
|
||||
else:
|
||||
msg = 'Connection with alias "%s" has not been defined' % alias
|
||||
if alias == DEFAULT_CONNECTION_NAME:
|
||||
msg = 'You have not defined a default connection'
|
||||
raise ConnectionError(msg)
|
||||
conn_settings = _connection_settings[alias].copy()
|
||||
raise MongoEngineConnectionError(msg)
|
||||
|
||||
conn_settings.pop('name', None)
|
||||
conn_settings.pop('username', None)
|
||||
conn_settings.pop('password', None)
|
||||
conn_settings.pop('authentication_source', None)
|
||||
def _clean_settings(settings_dict):
|
||||
irrelevant_fields = set([
|
||||
'name', 'username', 'password', 'authentication_source',
|
||||
'authentication_mechanism'
|
||||
])
|
||||
return {
|
||||
k: v for k, v in settings_dict.items()
|
||||
if k not in irrelevant_fields
|
||||
}
|
||||
|
||||
# Retrieve a copy of the connection settings associated with the requested
|
||||
# alias and remove the database name and authentication info (we don't
|
||||
# care about them at this point).
|
||||
conn_settings = _clean_settings(_connection_settings[alias].copy())
|
||||
|
||||
# Determine if we should use PyMongo's or mongomock's MongoClient.
|
||||
is_mock = conn_settings.pop('is_mock', False)
|
||||
if is_mock:
|
||||
try:
|
||||
import mongomock
|
||||
except ImportError:
|
||||
raise RuntimeError('You need mongomock installed to mock '
|
||||
'MongoEngine.')
|
||||
connection_class = mongomock.MongoClient
|
||||
else:
|
||||
connection_class = MongoClient
|
||||
|
||||
# Handle replica set connections
|
||||
if 'replicaSet' in conn_settings:
|
||||
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
|
||||
|
||||
# Discard port since it can't be used on MongoReplicaSetClient
|
||||
conn_settings.pop('port', None)
|
||||
# Discard replicaSet if not base string
|
||||
if not isinstance(conn_settings['replicaSet'], basestring):
|
||||
conn_settings.pop('replicaSet', None)
|
||||
connection_class = MongoReplicaSetClient
|
||||
|
||||
# Discard replicaSet if it's not a string
|
||||
if not isinstance(conn_settings['replicaSet'], six.string_types):
|
||||
del conn_settings['replicaSet']
|
||||
|
||||
# For replica set connections with PyMongo 2.x, use
|
||||
# MongoReplicaSetClient.
|
||||
# TODO remove this once we stop supporting PyMongo 2.x.
|
||||
if not IS_PYMONGO_3:
|
||||
connection_class = MongoReplicaSetClient
|
||||
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
|
||||
|
||||
# Iterate over all of the connection settings and if a connection with
|
||||
# the same parameters is already established, use it instead of creating
|
||||
# a new one.
|
||||
existing_connection = None
|
||||
connection_settings_iterator = (
|
||||
(db_alias, settings.copy())
|
||||
for db_alias, settings in _connection_settings.items()
|
||||
)
|
||||
for db_alias, connection_settings in connection_settings_iterator:
|
||||
connection_settings = _clean_settings(connection_settings)
|
||||
if conn_settings == connection_settings and _connections.get(db_alias):
|
||||
existing_connection = _connections[db_alias]
|
||||
break
|
||||
|
||||
# If an existing connection was found, assign it to the new alias
|
||||
if existing_connection:
|
||||
_connections[alias] = existing_connection
|
||||
else:
|
||||
# Otherwise, create the new connection for this alias. Raise
|
||||
# MongoEngineConnectionError if it can't be established.
|
||||
try:
|
||||
connection = None
|
||||
# check for shared connections
|
||||
connection_settings_iterator = ((db_alias, settings.copy()) for db_alias, settings in _connection_settings.iteritems())
|
||||
for db_alias, connection_settings in connection_settings_iterator:
|
||||
connection_settings.pop('name', None)
|
||||
connection_settings.pop('username', None)
|
||||
connection_settings.pop('password', None)
|
||||
if conn_settings == connection_settings and _connections.get(db_alias, None):
|
||||
connection = _connections[db_alias]
|
||||
break
|
||||
_connections[alias] = connection_class(**conn_settings)
|
||||
except Exception as e:
|
||||
raise MongoEngineConnectionError(
|
||||
'Cannot connect to database %s :\n%s' % (alias, e))
|
||||
|
||||
_connections[alias] = connection if connection else connection_class(**conn_settings)
|
||||
except Exception, e:
|
||||
raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e))
|
||||
return _connections[alias]
|
||||
|
||||
|
||||
def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
global _dbs
|
||||
if reconnect:
|
||||
disconnect(alias)
|
||||
|
||||
@@ -136,11 +224,13 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
conn = get_connection(alias)
|
||||
conn_settings = _connection_settings[alias]
|
||||
db = conn[conn_settings['name']]
|
||||
auth_kwargs = {'source': conn_settings['authentication_source']}
|
||||
if conn_settings['authentication_mechanism'] is not None:
|
||||
auth_kwargs['mechanism'] = conn_settings['authentication_mechanism']
|
||||
# Authenticate if necessary
|
||||
if conn_settings['username'] and conn_settings['password']:
|
||||
db.authenticate(conn_settings['username'],
|
||||
conn_settings['password'],
|
||||
source=conn_settings['authentication_source'])
|
||||
if conn_settings['username'] and (conn_settings['password'] or
|
||||
conn_settings['authentication_mechanism'] == 'MONGODB-X509'):
|
||||
db.authenticate(conn_settings['username'], conn_settings['password'], **auth_kwargs)
|
||||
_dbs[alias] = db
|
||||
return _dbs[alias]
|
||||
|
||||
@@ -157,7 +247,6 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||
|
||||
.. versionchanged:: 0.6 - added multiple database support.
|
||||
"""
|
||||
global _connections
|
||||
if alias not in _connections:
|
||||
register_connection(alias, db, **kwargs)
|
||||
|
||||
|
@@ -2,12 +2,12 @@ from mongoengine.common import _import_class
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||
|
||||
|
||||
__all__ = ("switch_db", "switch_collection", "no_dereference",
|
||||
"no_sub_classes", "query_counter")
|
||||
__all__ = ('switch_db', 'switch_collection', 'no_dereference',
|
||||
'no_sub_classes', 'query_counter')
|
||||
|
||||
|
||||
class switch_db(object):
|
||||
""" switch_db alias context manager.
|
||||
"""switch_db alias context manager.
|
||||
|
||||
Example ::
|
||||
|
||||
@@ -18,15 +18,14 @@ class switch_db(object):
|
||||
class Group(Document):
|
||||
name = StringField()
|
||||
|
||||
Group(name="test").save() # Saves in the default db
|
||||
Group(name='test').save() # Saves in the default db
|
||||
|
||||
with switch_db(Group, 'testdb-1') as Group:
|
||||
Group(name="hello testdb!").save() # Saves in testdb-1
|
||||
|
||||
Group(name='hello testdb!').save() # Saves in testdb-1
|
||||
"""
|
||||
|
||||
def __init__(self, cls, db_alias):
|
||||
""" Construct the switch_db context manager
|
||||
"""Construct the switch_db context manager
|
||||
|
||||
:param cls: the class to change the registered db
|
||||
:param db_alias: the name of the specific database to use
|
||||
@@ -34,37 +33,36 @@ class switch_db(object):
|
||||
self.cls = cls
|
||||
self.collection = cls._get_collection()
|
||||
self.db_alias = db_alias
|
||||
self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME)
|
||||
self.ori_db_alias = cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME)
|
||||
|
||||
def __enter__(self):
|
||||
""" change the db_alias and clear the cached collection """
|
||||
self.cls._meta["db_alias"] = self.db_alias
|
||||
"""Change the db_alias and clear the cached collection."""
|
||||
self.cls._meta['db_alias'] = self.db_alias
|
||||
self.cls._collection = None
|
||||
return self.cls
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
""" Reset the db_alias and collection """
|
||||
self.cls._meta["db_alias"] = self.ori_db_alias
|
||||
"""Reset the db_alias and collection."""
|
||||
self.cls._meta['db_alias'] = self.ori_db_alias
|
||||
self.cls._collection = self.collection
|
||||
|
||||
|
||||
class switch_collection(object):
|
||||
""" switch_collection alias context manager.
|
||||
"""switch_collection alias context manager.
|
||||
|
||||
Example ::
|
||||
|
||||
class Group(Document):
|
||||
name = StringField()
|
||||
|
||||
Group(name="test").save() # Saves in the default db
|
||||
Group(name='test').save() # Saves in the default db
|
||||
|
||||
with switch_collection(Group, 'group1') as Group:
|
||||
Group(name="hello testdb!").save() # Saves in group1 collection
|
||||
|
||||
Group(name='hello testdb!').save() # Saves in group1 collection
|
||||
"""
|
||||
|
||||
def __init__(self, cls, collection_name):
|
||||
""" Construct the switch_collection context manager
|
||||
"""Construct the switch_collection context manager.
|
||||
|
||||
:param cls: the class to change the registered db
|
||||
:param collection_name: the name of the collection to use
|
||||
@@ -75,7 +73,7 @@ class switch_collection(object):
|
||||
self.collection_name = collection_name
|
||||
|
||||
def __enter__(self):
|
||||
""" change the _get_collection_name and clear the cached collection """
|
||||
"""Change the _get_collection_name and clear the cached collection."""
|
||||
|
||||
@classmethod
|
||||
def _get_collection_name(cls):
|
||||
@@ -86,24 +84,23 @@ class switch_collection(object):
|
||||
return self.cls
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
""" Reset the collection """
|
||||
"""Reset the collection."""
|
||||
self.cls._collection = self.ori_collection
|
||||
self.cls._get_collection_name = self.ori_get_collection_name
|
||||
|
||||
|
||||
class no_dereference(object):
|
||||
""" no_dereference context manager.
|
||||
"""no_dereference context manager.
|
||||
|
||||
Turns off all dereferencing in Documents for the duration of the context
|
||||
manager::
|
||||
|
||||
with no_dereference(Group) as Group:
|
||||
Group.objects.find()
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, cls):
|
||||
""" Construct the no_dereference context manager.
|
||||
"""Construct the no_dereference context manager.
|
||||
|
||||
:param cls: the class to turn dereferencing off on
|
||||
"""
|
||||
@@ -119,103 +116,102 @@ class no_dereference(object):
|
||||
ComplexBaseField))]
|
||||
|
||||
def __enter__(self):
|
||||
""" change the objects default and _auto_dereference values"""
|
||||
"""Change the objects default and _auto_dereference values."""
|
||||
for field in self.deref_fields:
|
||||
self.cls._fields[field]._auto_dereference = False
|
||||
return self.cls
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
""" Reset the default and _auto_dereference values"""
|
||||
"""Reset the default and _auto_dereference values."""
|
||||
for field in self.deref_fields:
|
||||
self.cls._fields[field]._auto_dereference = True
|
||||
return self.cls
|
||||
|
||||
|
||||
class no_sub_classes(object):
|
||||
""" no_sub_classes context manager.
|
||||
"""no_sub_classes context manager.
|
||||
|
||||
Only returns instances of this class and no sub (inherited) classes::
|
||||
|
||||
with no_sub_classes(Group) as Group:
|
||||
Group.objects.find()
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, cls):
|
||||
""" Construct the no_sub_classes context manager.
|
||||
"""Construct the no_sub_classes context manager.
|
||||
|
||||
:param cls: the class to turn querying sub classes on
|
||||
"""
|
||||
self.cls = cls
|
||||
|
||||
def __enter__(self):
|
||||
""" change the objects default and _auto_dereference values"""
|
||||
"""Change the objects default and _auto_dereference values."""
|
||||
self.cls._all_subclasses = self.cls._subclasses
|
||||
self.cls._subclasses = (self.cls,)
|
||||
return self.cls
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
""" Reset the default and _auto_dereference values"""
|
||||
"""Reset the default and _auto_dereference values."""
|
||||
self.cls._subclasses = self.cls._all_subclasses
|
||||
delattr(self.cls, '_all_subclasses')
|
||||
return self.cls
|
||||
|
||||
|
||||
class query_counter(object):
|
||||
""" Query_counter context manager to get the number of queries. """
|
||||
"""Query_counter context manager to get the number of queries."""
|
||||
|
||||
def __init__(self):
|
||||
""" Construct the query_counter. """
|
||||
"""Construct the query_counter."""
|
||||
self.counter = 0
|
||||
self.db = get_db()
|
||||
|
||||
def __enter__(self):
|
||||
""" On every with block we need to drop the profile collection. """
|
||||
"""On every with block we need to drop the profile collection."""
|
||||
self.db.set_profiling_level(0)
|
||||
self.db.system.profile.drop()
|
||||
self.db.set_profiling_level(2)
|
||||
return self
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
""" Reset the profiling level. """
|
||||
"""Reset the profiling level."""
|
||||
self.db.set_profiling_level(0)
|
||||
|
||||
def __eq__(self, value):
|
||||
""" == Compare querycounter. """
|
||||
"""== Compare querycounter."""
|
||||
counter = self._get_count()
|
||||
return value == counter
|
||||
|
||||
def __ne__(self, value):
|
||||
""" != Compare querycounter. """
|
||||
"""!= Compare querycounter."""
|
||||
return not self.__eq__(value)
|
||||
|
||||
def __lt__(self, value):
|
||||
""" < Compare querycounter. """
|
||||
"""< Compare querycounter."""
|
||||
return self._get_count() < value
|
||||
|
||||
def __le__(self, value):
|
||||
""" <= Compare querycounter. """
|
||||
"""<= Compare querycounter."""
|
||||
return self._get_count() <= value
|
||||
|
||||
def __gt__(self, value):
|
||||
""" > Compare querycounter. """
|
||||
"""> Compare querycounter."""
|
||||
return self._get_count() > value
|
||||
|
||||
def __ge__(self, value):
|
||||
""" >= Compare querycounter. """
|
||||
""">= Compare querycounter."""
|
||||
return self._get_count() >= value
|
||||
|
||||
def __int__(self):
|
||||
""" int representation. """
|
||||
"""int representation."""
|
||||
return self._get_count()
|
||||
|
||||
def __repr__(self):
|
||||
""" repr query_counter as the number of queries. """
|
||||
"""repr query_counter as the number of queries."""
|
||||
return u"%s" % self._get_count()
|
||||
|
||||
def _get_count(self):
|
||||
""" Get the number of queries. """
|
||||
ignore_query = {"ns": {"$ne": "%s.system.indexes" % self.db.name}}
|
||||
"""Get the number of queries."""
|
||||
ignore_query = {'ns': {'$ne': '%s.system.indexes' % self.db.name}}
|
||||
count = self.db.system.profile.find(ignore_query).count() - self.counter
|
||||
self.counter += 1
|
||||
return count
|
||||
|
@@ -1,17 +1,15 @@
|
||||
from bson import DBRef, SON
|
||||
import six
|
||||
|
||||
from base import (
|
||||
BaseDict, BaseList, EmbeddedDocumentList,
|
||||
TopLevelDocumentMetaclass, get_document
|
||||
)
|
||||
from fields import (ReferenceField, ListField, DictField, MapField)
|
||||
from connection import get_db
|
||||
from queryset import QuerySet
|
||||
from document import Document, EmbeddedDocument
|
||||
from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList,
|
||||
TopLevelDocumentMetaclass, get_document)
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.document import Document, EmbeddedDocument
|
||||
from mongoengine.fields import DictField, ListField, MapField, ReferenceField
|
||||
from mongoengine.queryset import QuerySet
|
||||
|
||||
|
||||
class DeReference(object):
|
||||
|
||||
def __call__(self, items, max_depth=1, instance=None, name=None):
|
||||
"""
|
||||
Cheaply dereferences the items to a set depth.
|
||||
@@ -25,7 +23,7 @@ class DeReference(object):
|
||||
:class:`~mongoengine.base.ComplexBaseField`
|
||||
:param get: A boolean determining if being called by __get__
|
||||
"""
|
||||
if items is None or isinstance(items, basestring):
|
||||
if items is None or isinstance(items, six.string_types):
|
||||
return items
|
||||
|
||||
# cheapest way to convert a queryset to a list
|
||||
@@ -49,8 +47,8 @@ class DeReference(object):
|
||||
|
||||
if is_list and all([i.__class__ == doc_type for i in items]):
|
||||
return items
|
||||
elif not is_list and all([i.__class__ == doc_type
|
||||
for i in items.values()]):
|
||||
elif not is_list and all(
|
||||
[i.__class__ == doc_type for i in items.values()]):
|
||||
return items
|
||||
elif not field.dbref:
|
||||
if not hasattr(items, 'items'):
|
||||
@@ -68,11 +66,11 @@ class DeReference(object):
|
||||
|
||||
items = _get_items(items)
|
||||
else:
|
||||
items = dict([
|
||||
(k, field.to_python(v))
|
||||
if not isinstance(v, (DBRef, Document)) else (k, v)
|
||||
for k, v in items.iteritems()]
|
||||
)
|
||||
items = {
|
||||
k: (v if isinstance(v, (DBRef, Document))
|
||||
else field.to_python(v))
|
||||
for k, v in items.iteritems()
|
||||
}
|
||||
|
||||
self.reference_map = self._find_references(items)
|
||||
self.object_map = self._fetch_objects(doc_type=doc_type)
|
||||
@@ -90,36 +88,36 @@ class DeReference(object):
|
||||
return reference_map
|
||||
|
||||
# Determine the iterator to use
|
||||
if not hasattr(items, 'items'):
|
||||
iterator = enumerate(items)
|
||||
if isinstance(items, dict):
|
||||
iterator = items.values()
|
||||
else:
|
||||
iterator = items.iteritems()
|
||||
iterator = items
|
||||
|
||||
# Recursively find dbreferences
|
||||
depth += 1
|
||||
for k, item in iterator:
|
||||
for item in iterator:
|
||||
if isinstance(item, (Document, EmbeddedDocument)):
|
||||
for field_name, field in item._fields.iteritems():
|
||||
v = item._data.get(field_name, None)
|
||||
if isinstance(v, (DBRef)):
|
||||
reference_map.setdefault(field.document_type, []).append(v.id)
|
||||
if isinstance(v, DBRef):
|
||||
reference_map.setdefault(field.document_type, set()).add(v.id)
|
||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||
reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id)
|
||||
reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
|
||||
references = self._find_references(v, depth)
|
||||
for key, refs in references.iteritems():
|
||||
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
||||
key = field_cls
|
||||
reference_map.setdefault(key, []).extend(refs)
|
||||
elif isinstance(item, (DBRef)):
|
||||
reference_map.setdefault(item.collection, []).append(item.id)
|
||||
reference_map.setdefault(key, set()).update(refs)
|
||||
elif isinstance(item, DBRef):
|
||||
reference_map.setdefault(item.collection, set()).add(item.id)
|
||||
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
||||
reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id)
|
||||
reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id)
|
||||
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
||||
references = self._find_references(item, depth - 1)
|
||||
for key, refs in references.iteritems():
|
||||
reference_map.setdefault(key, []).extend(refs)
|
||||
reference_map.setdefault(key, set()).update(refs)
|
||||
|
||||
return reference_map
|
||||
|
||||
@@ -128,33 +126,37 @@ class DeReference(object):
|
||||
"""
|
||||
object_map = {}
|
||||
for collection, dbrefs in self.reference_map.iteritems():
|
||||
keys = object_map.keys()
|
||||
refs = list(set([dbref for dbref in dbrefs if unicode(dbref).encode('utf-8') not in keys]))
|
||||
if hasattr(collection, 'objects'): # We have a document class for the refs
|
||||
col_name = collection._get_collection_name()
|
||||
refs = [dbref for dbref in dbrefs
|
||||
if (col_name, dbref) not in object_map]
|
||||
references = collection.objects.in_bulk(refs)
|
||||
for key, doc in references.iteritems():
|
||||
object_map[key] = doc
|
||||
object_map[(col_name, key)] = doc
|
||||
else: # Generic reference: use the refs data to convert to document
|
||||
if isinstance(doc_type, (ListField, DictField, MapField,)):
|
||||
continue
|
||||
|
||||
refs = [dbref for dbref in dbrefs
|
||||
if (collection, dbref) not in object_map]
|
||||
|
||||
if doc_type:
|
||||
references = doc_type._get_db()[collection].find({'_id': {'$in': refs}})
|
||||
for ref in references:
|
||||
doc = doc_type._from_son(ref)
|
||||
object_map[doc.id] = doc
|
||||
object_map[(collection, doc.id)] = doc
|
||||
else:
|
||||
references = get_db()[collection].find({'_id': {'$in': refs}})
|
||||
for ref in references:
|
||||
if '_cls' in ref:
|
||||
doc = get_document(ref["_cls"])._from_son(ref)
|
||||
doc = get_document(ref['_cls'])._from_son(ref)
|
||||
elif doc_type is None:
|
||||
doc = get_document(
|
||||
''.join(x.capitalize()
|
||||
for x in collection.split('_')))._from_son(ref)
|
||||
for x in collection.split('_')))._from_son(ref)
|
||||
else:
|
||||
doc = doc_type._from_son(ref)
|
||||
object_map[doc.id] = doc
|
||||
object_map[(collection, doc.id)] = doc
|
||||
return object_map
|
||||
|
||||
def _attach_objects(self, items, depth=0, instance=None, name=None):
|
||||
@@ -180,7 +182,8 @@ class DeReference(object):
|
||||
|
||||
if isinstance(items, (dict, SON)):
|
||||
if '_ref' in items:
|
||||
return self.object_map.get(items['_ref'].id, items)
|
||||
return self.object_map.get(
|
||||
(items['_ref'].collection, items['_ref'].id), items)
|
||||
elif '_cls' in items:
|
||||
doc = get_document(items['_cls'])._from_son(items)
|
||||
_cls = doc._data.pop('_cls', None)
|
||||
@@ -213,21 +216,22 @@ class DeReference(object):
|
||||
if k in self.object_map and not is_list:
|
||||
data[k] = self.object_map[k]
|
||||
elif isinstance(v, (Document, EmbeddedDocument)):
|
||||
for field_name, field in v._fields.iteritems():
|
||||
for field_name in v._fields:
|
||||
v = data[k]._data.get(field_name, None)
|
||||
if isinstance(v, (DBRef)):
|
||||
data[k]._data[field_name] = self.object_map.get(v.id, v)
|
||||
if isinstance(v, DBRef):
|
||||
data[k]._data[field_name] = self.object_map.get(
|
||||
(v.collection, v.id), v)
|
||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||
data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v)
|
||||
elif isinstance(v, dict) and depth <= self.max_depth:
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
||||
elif isinstance(v, (list, tuple)) and depth <= self.max_depth:
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
||||
data[k]._data[field_name] = self.object_map.get(
|
||||
(v['_ref'].collection, v['_ref'].id), v)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
item_name = six.text_type('{0}.{1}.{2}').format(name, k, field_name)
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
item_name = '%s.%s' % (name, k) if name else name
|
||||
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name)
|
||||
elif hasattr(v, 'id'):
|
||||
data[k] = self.object_map.get(v.id, v)
|
||||
data[k] = self.object_map.get((v.collection, v.id), v)
|
||||
|
||||
if instance and name:
|
||||
if is_list:
|
||||
|
@@ -1,412 +0,0 @@
|
||||
from mongoengine import *
|
||||
|
||||
from django.utils.encoding import smart_str
|
||||
from django.contrib.auth.models import _user_has_perm, _user_get_all_permissions, _user_has_module_perms
|
||||
from django.db import models
|
||||
from django.contrib.contenttypes.models import ContentTypeManager
|
||||
from django.contrib import auth
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from .utils import datetime_now
|
||||
|
||||
REDIRECT_FIELD_NAME = 'next'
|
||||
|
||||
try:
|
||||
from django.contrib.auth.hashers import check_password, make_password
|
||||
except ImportError:
|
||||
"""Handle older versions of Django"""
|
||||
from django.utils.hashcompat import md5_constructor, sha_constructor
|
||||
|
||||
def get_hexdigest(algorithm, salt, raw_password):
|
||||
raw_password, salt = smart_str(raw_password), smart_str(salt)
|
||||
if algorithm == 'md5':
|
||||
return md5_constructor(salt + raw_password).hexdigest()
|
||||
elif algorithm == 'sha1':
|
||||
return sha_constructor(salt + raw_password).hexdigest()
|
||||
raise ValueError('Got unknown password algorithm type in password')
|
||||
|
||||
def check_password(raw_password, password):
|
||||
algo, salt, hash = password.split('$')
|
||||
return hash == get_hexdigest(algo, salt, raw_password)
|
||||
|
||||
def make_password(raw_password):
|
||||
from random import random
|
||||
algo = 'sha1'
|
||||
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
|
||||
hash = get_hexdigest(algo, salt, raw_password)
|
||||
return '%s$%s$%s' % (algo, salt, hash)
|
||||
|
||||
|
||||
class ContentType(Document):
|
||||
name = StringField(max_length=100)
|
||||
app_label = StringField(max_length=100)
|
||||
model = StringField(max_length=100, verbose_name=_('python model class name'),
|
||||
unique_with='app_label')
|
||||
objects = ContentTypeManager()
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('content type')
|
||||
verbose_name_plural = _('content types')
|
||||
# db_table = 'django_content_type'
|
||||
# ordering = ('name',)
|
||||
# unique_together = (('app_label', 'model'),)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
def model_class(self):
|
||||
"Returns the Python model class for this type of content."
|
||||
from django.db import models
|
||||
return models.get_model(self.app_label, self.model)
|
||||
|
||||
def get_object_for_this_type(self, **kwargs):
|
||||
"""
|
||||
Returns an object of this type for the keyword arguments given.
|
||||
Basically, this is a proxy around this object_type's get_object() model
|
||||
method. The ObjectNotExist exception, if thrown, will not be caught,
|
||||
so code that calls this method should catch it.
|
||||
"""
|
||||
return self.model_class()._default_manager.using(self._state.db).get(**kwargs)
|
||||
|
||||
def natural_key(self):
|
||||
return (self.app_label, self.model)
|
||||
|
||||
|
||||
class SiteProfileNotAvailable(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class PermissionManager(models.Manager):
|
||||
def get_by_natural_key(self, codename, app_label, model):
|
||||
return self.get(
|
||||
codename=codename,
|
||||
content_type=ContentType.objects.get_by_natural_key(app_label, model)
|
||||
)
|
||||
|
||||
|
||||
class Permission(Document):
|
||||
"""The permissions system provides a way to assign permissions to specific
|
||||
users and groups of users.
|
||||
|
||||
The permission system is used by the Django admin site, but may also be
|
||||
useful in your own code. The Django admin site uses permissions as follows:
|
||||
|
||||
- The "add" permission limits the user's ability to view the "add"
|
||||
form and add an object.
|
||||
- The "change" permission limits a user's ability to view the change
|
||||
list, view the "change" form and change an object.
|
||||
- The "delete" permission limits the ability to delete an object.
|
||||
|
||||
Permissions are set globally per type of object, not per specific object
|
||||
instance. It is possible to say "Mary may change news stories," but it's
|
||||
not currently possible to say "Mary may change news stories, but only the
|
||||
ones she created herself" or "Mary may only change news stories that have
|
||||
a certain status or publication date."
|
||||
|
||||
Three basic permissions -- add, change and delete -- are automatically
|
||||
created for each Django model.
|
||||
"""
|
||||
name = StringField(max_length=50, verbose_name=_('username'))
|
||||
content_type = ReferenceField(ContentType)
|
||||
codename = StringField(max_length=100, verbose_name=_('codename'))
|
||||
# FIXME: don't access field of the other class
|
||||
# unique_with=['content_type__app_label', 'content_type__model'])
|
||||
|
||||
objects = PermissionManager()
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('permission')
|
||||
verbose_name_plural = _('permissions')
|
||||
# unique_together = (('content_type', 'codename'),)
|
||||
# ordering = ('content_type__app_label', 'content_type__model', 'codename')
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s | %s | %s" % (
|
||||
unicode(self.content_type.app_label),
|
||||
unicode(self.content_type),
|
||||
unicode(self.name))
|
||||
|
||||
def natural_key(self):
|
||||
return (self.codename,) + self.content_type.natural_key()
|
||||
natural_key.dependencies = ['contenttypes.contenttype']
|
||||
|
||||
|
||||
class Group(Document):
|
||||
"""Groups are a generic way of categorizing users to apply permissions,
|
||||
or some other label, to those users. A user can belong to any number of
|
||||
groups.
|
||||
|
||||
A user in a group automatically has all the permissions granted to that
|
||||
group. For example, if the group Site editors has the permission
|
||||
can_edit_home_page, any user in that group will have that permission.
|
||||
|
||||
Beyond permissions, groups are a convenient way to categorize users to
|
||||
apply some label, or extended functionality, to them. For example, you
|
||||
could create a group 'Special users', and you could write code that would
|
||||
do special things to those users -- such as giving them access to a
|
||||
members-only portion of your site, or sending them members-only
|
||||
e-mail messages.
|
||||
"""
|
||||
name = StringField(max_length=80, unique=True, verbose_name=_('name'))
|
||||
permissions = ListField(ReferenceField(Permission, verbose_name=_('permissions'), required=False))
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('group')
|
||||
verbose_name_plural = _('groups')
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class UserManager(models.Manager):
|
||||
def create_user(self, username, email, password=None):
|
||||
"""
|
||||
Creates and saves a User with the given username, e-mail and password.
|
||||
"""
|
||||
now = datetime_now()
|
||||
|
||||
# Normalize the address by lowercasing the domain part of the email
|
||||
# address.
|
||||
try:
|
||||
email_name, domain_part = email.strip().split('@', 1)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
email = '@'.join([email_name, domain_part.lower()])
|
||||
|
||||
user = self.model(username=username, email=email, is_staff=False,
|
||||
is_active=True, is_superuser=False, last_login=now,
|
||||
date_joined=now)
|
||||
|
||||
user.set_password(password)
|
||||
user.save(using=self._db)
|
||||
return user
|
||||
|
||||
def create_superuser(self, username, email, password):
|
||||
u = self.create_user(username, email, password)
|
||||
u.is_staff = True
|
||||
u.is_active = True
|
||||
u.is_superuser = True
|
||||
u.save(using=self._db)
|
||||
return u
|
||||
|
||||
def make_random_password(self, length=10, allowed_chars='abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789'):
|
||||
"Generates a random password with the given length and given allowed_chars"
|
||||
# Note that default value of allowed_chars does not have "I" or letters
|
||||
# that look like it -- just to avoid confusion.
|
||||
from random import choice
|
||||
return ''.join([choice(allowed_chars) for i in range(length)])
|
||||
|
||||
|
||||
class User(Document):
|
||||
"""A User document that aims to mirror most of the API specified by Django
|
||||
at http://docs.djangoproject.com/en/dev/topics/auth/#users
|
||||
"""
|
||||
username = StringField(max_length=30, required=True,
|
||||
verbose_name=_('username'),
|
||||
help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters"))
|
||||
|
||||
first_name = StringField(max_length=30,
|
||||
verbose_name=_('first name'))
|
||||
|
||||
last_name = StringField(max_length=30,
|
||||
verbose_name=_('last name'))
|
||||
email = EmailField(verbose_name=_('e-mail address'))
|
||||
password = StringField(max_length=128,
|
||||
verbose_name=_('password'),
|
||||
help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
|
||||
is_staff = BooleanField(default=False,
|
||||
verbose_name=_('staff status'),
|
||||
help_text=_("Designates whether the user can log into this admin site."))
|
||||
is_active = BooleanField(default=True,
|
||||
verbose_name=_('active'),
|
||||
help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts."))
|
||||
is_superuser = BooleanField(default=False,
|
||||
verbose_name=_('superuser status'),
|
||||
help_text=_("Designates that this user has all permissions without explicitly assigning them."))
|
||||
last_login = DateTimeField(default=datetime_now,
|
||||
verbose_name=_('last login'))
|
||||
date_joined = DateTimeField(default=datetime_now,
|
||||
verbose_name=_('date joined'))
|
||||
|
||||
user_permissions = ListField(ReferenceField(Permission), verbose_name=_('user permissions'),
|
||||
help_text=_('Permissions for the user.'))
|
||||
|
||||
USERNAME_FIELD = 'username'
|
||||
REQUIRED_FIELDS = ['email']
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'indexes': [
|
||||
{'fields': ['username'], 'unique': True, 'sparse': True}
|
||||
]
|
||||
}
|
||||
|
||||
def __unicode__(self):
|
||||
return self.username
|
||||
|
||||
def get_full_name(self):
|
||||
"""Returns the users first and last names, separated by a space.
|
||||
"""
|
||||
full_name = u'%s %s' % (self.first_name or '', self.last_name or '')
|
||||
return full_name.strip()
|
||||
|
||||
def is_anonymous(self):
|
||||
return False
|
||||
|
||||
def is_authenticated(self):
|
||||
return True
|
||||
|
||||
def set_password(self, raw_password):
|
||||
"""Sets the user's password - always use this rather than directly
|
||||
assigning to :attr:`~mongoengine.django.auth.User.password` as the
|
||||
password is hashed before storage.
|
||||
"""
|
||||
self.password = make_password(raw_password)
|
||||
self.save()
|
||||
return self
|
||||
|
||||
def check_password(self, raw_password):
|
||||
"""Checks the user's password against a provided password - always use
|
||||
this rather than directly comparing to
|
||||
:attr:`~mongoengine.django.auth.User.password` as the password is
|
||||
hashed before storage.
|
||||
"""
|
||||
return check_password(raw_password, self.password)
|
||||
|
||||
@classmethod
|
||||
def create_user(cls, username, password, email=None):
|
||||
"""Create (and save) a new user with the given username, password and
|
||||
email address.
|
||||
"""
|
||||
now = datetime_now()
|
||||
|
||||
# Normalize the address by lowercasing the domain part of the email
|
||||
# address.
|
||||
if email is not None:
|
||||
try:
|
||||
email_name, domain_part = email.strip().split('@', 1)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
email = '@'.join([email_name, domain_part.lower()])
|
||||
|
||||
user = cls(username=username, email=email, date_joined=now)
|
||||
user.set_password(password)
|
||||
user.save()
|
||||
return user
|
||||
|
||||
def get_group_permissions(self, obj=None):
|
||||
"""
|
||||
Returns a list of permission strings that this user has through his/her
|
||||
groups. This method queries all available auth backends. If an object
|
||||
is passed in, only permissions matching this object are returned.
|
||||
"""
|
||||
permissions = set()
|
||||
for backend in auth.get_backends():
|
||||
if hasattr(backend, "get_group_permissions"):
|
||||
permissions.update(backend.get_group_permissions(self, obj))
|
||||
return permissions
|
||||
|
||||
def get_all_permissions(self, obj=None):
|
||||
return _user_get_all_permissions(self, obj)
|
||||
|
||||
def has_perm(self, perm, obj=None):
|
||||
"""
|
||||
Returns True if the user has the specified permission. This method
|
||||
queries all available auth backends, but returns immediately if any
|
||||
backend returns True. Thus, a user who has permission from a single
|
||||
auth backend is assumed to have permission in general. If an object is
|
||||
provided, permissions for this specific object are checked.
|
||||
"""
|
||||
|
||||
# Active superusers have all permissions.
|
||||
if self.is_active and self.is_superuser:
|
||||
return True
|
||||
|
||||
# Otherwise we need to check the backends.
|
||||
return _user_has_perm(self, perm, obj)
|
||||
|
||||
def has_module_perms(self, app_label):
|
||||
"""
|
||||
Returns True if the user has any permissions in the given app label.
|
||||
Uses pretty much the same logic as has_perm, above.
|
||||
"""
|
||||
# Active superusers have all permissions.
|
||||
if self.is_active and self.is_superuser:
|
||||
return True
|
||||
|
||||
return _user_has_module_perms(self, app_label)
|
||||
|
||||
def email_user(self, subject, message, from_email=None):
|
||||
"Sends an e-mail to this User."
|
||||
from django.core.mail import send_mail
|
||||
send_mail(subject, message, from_email, [self.email])
|
||||
|
||||
def get_profile(self):
|
||||
"""
|
||||
Returns site-specific profile for this user. Raises
|
||||
SiteProfileNotAvailable if this site does not allow profiles.
|
||||
"""
|
||||
if not hasattr(self, '_profile_cache'):
|
||||
from django.conf import settings
|
||||
if not getattr(settings, 'AUTH_PROFILE_MODULE', False):
|
||||
raise SiteProfileNotAvailable('You need to set AUTH_PROFILE_MO'
|
||||
'DULE in your project settings')
|
||||
try:
|
||||
app_label, model_name = settings.AUTH_PROFILE_MODULE.split('.')
|
||||
except ValueError:
|
||||
raise SiteProfileNotAvailable('app_label and model_name should'
|
||||
' be separated by a dot in the AUTH_PROFILE_MODULE set'
|
||||
'ting')
|
||||
|
||||
try:
|
||||
model = models.get_model(app_label, model_name)
|
||||
if model is None:
|
||||
raise SiteProfileNotAvailable('Unable to load the profile '
|
||||
'model, check AUTH_PROFILE_MODULE in your project sett'
|
||||
'ings')
|
||||
self._profile_cache = model._default_manager.using(self._state.db).get(user__id__exact=self.id)
|
||||
self._profile_cache.user = self
|
||||
except (ImportError, ImproperlyConfigured):
|
||||
raise SiteProfileNotAvailable
|
||||
return self._profile_cache
|
||||
|
||||
|
||||
class MongoEngineBackend(object):
|
||||
"""Authenticate using MongoEngine and mongoengine.django.auth.User.
|
||||
"""
|
||||
|
||||
supports_object_permissions = False
|
||||
supports_anonymous_user = False
|
||||
supports_inactive_user = False
|
||||
_user_doc = False
|
||||
|
||||
def authenticate(self, username=None, password=None):
|
||||
user = self.user_document.objects(username=username).first()
|
||||
if user:
|
||||
if password and user.check_password(password):
|
||||
backend = auth.get_backends()[0]
|
||||
user.backend = "%s.%s" % (backend.__module__, backend.__class__.__name__)
|
||||
return user
|
||||
return None
|
||||
|
||||
def get_user(self, user_id):
|
||||
return self.user_document.objects.with_id(user_id)
|
||||
|
||||
@property
|
||||
def user_document(self):
|
||||
if self._user_doc is False:
|
||||
from .mongo_auth.models import get_user_document
|
||||
self._user_doc = get_user_document()
|
||||
return self._user_doc
|
||||
|
||||
def get_user(userid):
|
||||
"""Returns a User object from an id (User.id). Django's equivalent takes
|
||||
request, but taking an id instead leaves it up to the developer to store
|
||||
the id in any way they want (session, signed cookie, etc.)
|
||||
"""
|
||||
if not userid:
|
||||
return AnonymousUser()
|
||||
return MongoEngineBackend().get_user(userid) or AnonymousUser()
|
@@ -1,119 +0,0 @@
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.hashers import make_password
|
||||
from django.contrib.auth.models import UserManager
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db import models
|
||||
try:
|
||||
from django.utils.module_loading import import_module
|
||||
except ImportError:
|
||||
"""Handle older versions of Django"""
|
||||
from django.utils.importlib import import_module
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
||||
__all__ = (
|
||||
'get_user_document',
|
||||
)
|
||||
|
||||
|
||||
MONGOENGINE_USER_DOCUMENT = getattr(
|
||||
settings, 'MONGOENGINE_USER_DOCUMENT', 'mongoengine.django.auth.User')
|
||||
|
||||
|
||||
def get_user_document():
|
||||
"""Get the user document class used for authentication.
|
||||
|
||||
This is the class defined in settings.MONGOENGINE_USER_DOCUMENT, which
|
||||
defaults to `mongoengine.django.auth.User`.
|
||||
|
||||
"""
|
||||
|
||||
name = MONGOENGINE_USER_DOCUMENT
|
||||
dot = name.rindex('.')
|
||||
module = import_module(name[:dot])
|
||||
return getattr(module, name[dot + 1:])
|
||||
|
||||
|
||||
class MongoUserManager(UserManager):
|
||||
"""A User manager wich allows the use of MongoEngine documents in Django.
|
||||
|
||||
To use the manager, you must tell django.contrib.auth to use MongoUser as
|
||||
the user model. In you settings.py, you need:
|
||||
|
||||
INSTALLED_APPS = (
|
||||
...
|
||||
'django.contrib.auth',
|
||||
'mongoengine.django.mongo_auth',
|
||||
...
|
||||
)
|
||||
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
|
||||
|
||||
Django will use the model object to access the custom Manager, which will
|
||||
replace the original queryset with MongoEngine querysets.
|
||||
|
||||
By default, mongoengine.django.auth.User will be used to store users. You
|
||||
can specify another document class in MONGOENGINE_USER_DOCUMENT in your
|
||||
settings.py.
|
||||
|
||||
The User Document class has the same requirements as a standard custom user
|
||||
model: https://docs.djangoproject.com/en/dev/topics/auth/customizing/
|
||||
|
||||
In particular, the User Document class must define USERNAME_FIELD and
|
||||
REQUIRED_FIELDS.
|
||||
|
||||
`AUTH_USER_MODEL` has been added in Django 1.5.
|
||||
|
||||
"""
|
||||
|
||||
def contribute_to_class(self, model, name):
|
||||
super(MongoUserManager, self).contribute_to_class(model, name)
|
||||
self.dj_model = self.model
|
||||
self.model = get_user_document()
|
||||
|
||||
self.dj_model.USERNAME_FIELD = self.model.USERNAME_FIELD
|
||||
username = models.CharField(_('username'), max_length=30, unique=True)
|
||||
username.contribute_to_class(self.dj_model, self.dj_model.USERNAME_FIELD)
|
||||
|
||||
self.dj_model.REQUIRED_FIELDS = self.model.REQUIRED_FIELDS
|
||||
for name in self.dj_model.REQUIRED_FIELDS:
|
||||
field = models.CharField(_(name), max_length=30)
|
||||
field.contribute_to_class(self.dj_model, name)
|
||||
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
try:
|
||||
return self.get_query_set().get(*args, **kwargs)
|
||||
except self.model.DoesNotExist:
|
||||
# ModelBackend expects this exception
|
||||
raise self.dj_model.DoesNotExist
|
||||
|
||||
@property
|
||||
def db(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_empty_query_set(self):
|
||||
return self.model.objects.none()
|
||||
|
||||
def get_query_set(self):
|
||||
return self.model.objects
|
||||
|
||||
|
||||
class MongoUser(models.Model):
|
||||
""""Dummy user model for Django.
|
||||
|
||||
MongoUser is used to replace Django's UserManager with MongoUserManager.
|
||||
The actual user document class is mongoengine.django.auth.User or any
|
||||
other document class specified in MONGOENGINE_USER_DOCUMENT.
|
||||
|
||||
To get the user document class, use `get_user_document()`.
|
||||
|
||||
"""
|
||||
|
||||
objects = MongoUserManager()
|
||||
|
||||
class Meta:
|
||||
app_label = 'mongo_auth'
|
||||
|
||||
def set_password(self, password):
|
||||
"""Doesn't do anything, but works around the issue with Django 1.6."""
|
||||
make_password(password)
|
@@ -1,124 +0,0 @@
|
||||
from bson import json_util
|
||||
from django.conf import settings
|
||||
from django.contrib.sessions.backends.base import SessionBase, CreateError
|
||||
from django.core.exceptions import SuspiciousOperation
|
||||
try:
|
||||
from django.utils.encoding import force_unicode
|
||||
except ImportError:
|
||||
from django.utils.encoding import force_text as force_unicode
|
||||
|
||||
from mongoengine.document import Document
|
||||
from mongoengine import fields
|
||||
from mongoengine.queryset import OperationError
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME
|
||||
|
||||
from .utils import datetime_now
|
||||
|
||||
|
||||
MONGOENGINE_SESSION_DB_ALIAS = getattr(
|
||||
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
|
||||
DEFAULT_CONNECTION_NAME)
|
||||
|
||||
# a setting for the name of the collection used to store sessions
|
||||
MONGOENGINE_SESSION_COLLECTION = getattr(
|
||||
settings, 'MONGOENGINE_SESSION_COLLECTION',
|
||||
'django_session')
|
||||
|
||||
# a setting for whether session data is stored encoded or not
|
||||
MONGOENGINE_SESSION_DATA_ENCODE = getattr(
|
||||
settings, 'MONGOENGINE_SESSION_DATA_ENCODE',
|
||||
True)
|
||||
|
||||
|
||||
class MongoSession(Document):
|
||||
session_key = fields.StringField(primary_key=True, max_length=40)
|
||||
session_data = fields.StringField() if MONGOENGINE_SESSION_DATA_ENCODE \
|
||||
else fields.DictField()
|
||||
expire_date = fields.DateTimeField()
|
||||
|
||||
meta = {
|
||||
'collection': MONGOENGINE_SESSION_COLLECTION,
|
||||
'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
|
||||
'allow_inheritance': False,
|
||||
'indexes': [
|
||||
{
|
||||
'fields': ['expire_date'],
|
||||
'expireAfterSeconds': 0
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
def get_decoded(self):
|
||||
return SessionStore().decode(self.session_data)
|
||||
|
||||
|
||||
class SessionStore(SessionBase):
|
||||
"""A MongoEngine-based session store for Django.
|
||||
"""
|
||||
|
||||
def _get_session(self, *args, **kwargs):
|
||||
sess = super(SessionStore, self)._get_session(*args, **kwargs)
|
||||
if sess.get('_auth_user_id', None):
|
||||
sess['_auth_user_id'] = str(sess.get('_auth_user_id'))
|
||||
return sess
|
||||
|
||||
def load(self):
|
||||
try:
|
||||
s = MongoSession.objects(session_key=self.session_key,
|
||||
expire_date__gt=datetime_now)[0]
|
||||
if MONGOENGINE_SESSION_DATA_ENCODE:
|
||||
return self.decode(force_unicode(s.session_data))
|
||||
else:
|
||||
return s.session_data
|
||||
except (IndexError, SuspiciousOperation):
|
||||
self.create()
|
||||
return {}
|
||||
|
||||
def exists(self, session_key):
|
||||
return bool(MongoSession.objects(session_key=session_key).first())
|
||||
|
||||
def create(self):
|
||||
while True:
|
||||
self._session_key = self._get_new_session_key()
|
||||
try:
|
||||
self.save(must_create=True)
|
||||
except CreateError:
|
||||
continue
|
||||
self.modified = True
|
||||
self._session_cache = {}
|
||||
return
|
||||
|
||||
def save(self, must_create=False):
|
||||
if self.session_key is None:
|
||||
self._session_key = self._get_new_session_key()
|
||||
s = MongoSession(session_key=self.session_key)
|
||||
if MONGOENGINE_SESSION_DATA_ENCODE:
|
||||
s.session_data = self.encode(self._get_session(no_load=must_create))
|
||||
else:
|
||||
s.session_data = self._get_session(no_load=must_create)
|
||||
s.expire_date = self.get_expiry_date()
|
||||
try:
|
||||
s.save(force_insert=must_create)
|
||||
except OperationError:
|
||||
if must_create:
|
||||
raise CreateError
|
||||
raise
|
||||
|
||||
def delete(self, session_key=None):
|
||||
if session_key is None:
|
||||
if self.session_key is None:
|
||||
return
|
||||
session_key = self.session_key
|
||||
MongoSession.objects(session_key=session_key).delete()
|
||||
|
||||
|
||||
class BSONSerializer(object):
|
||||
"""
|
||||
Serializer that can handle BSON types (eg ObjectId).
|
||||
"""
|
||||
def dumps(self, obj):
|
||||
return json_util.dumps(obj, separators=(',', ':')).encode('ascii')
|
||||
|
||||
def loads(self, data):
|
||||
return json_util.loads(data.decode('ascii'))
|
||||
|
@@ -1,47 +0,0 @@
|
||||
from mongoengine.queryset import QuerySet
|
||||
from mongoengine.base import BaseDocument
|
||||
from mongoengine.errors import ValidationError
|
||||
|
||||
def _get_queryset(cls):
|
||||
"""Inspired by django.shortcuts.*"""
|
||||
if isinstance(cls, QuerySet):
|
||||
return cls
|
||||
else:
|
||||
return cls.objects
|
||||
|
||||
def get_document_or_404(cls, *args, **kwargs):
|
||||
"""
|
||||
Uses get() to return an document, or raises a Http404 exception if the document
|
||||
does not exist.
|
||||
|
||||
cls may be a Document or QuerySet object. All other passed
|
||||
arguments and keyword arguments are used in the get() query.
|
||||
|
||||
Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
|
||||
object is found.
|
||||
|
||||
Inspired by django.shortcuts.*
|
||||
"""
|
||||
queryset = _get_queryset(cls)
|
||||
try:
|
||||
return queryset.get(*args, **kwargs)
|
||||
except (queryset._document.DoesNotExist, ValidationError):
|
||||
from django.http import Http404
|
||||
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
||||
|
||||
def get_list_or_404(cls, *args, **kwargs):
|
||||
"""
|
||||
Uses filter() to return a list of documents, or raise a Http404 exception if
|
||||
the list is empty.
|
||||
|
||||
cls may be a Document or QuerySet object. All other passed
|
||||
arguments and keyword arguments are used in the filter() query.
|
||||
|
||||
Inspired by django.shortcuts.*
|
||||
"""
|
||||
queryset = _get_queryset(cls)
|
||||
obj_list = list(queryset.filter(*args, **kwargs))
|
||||
if not obj_list:
|
||||
from django.http import Http404
|
||||
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
||||
return obj_list
|
@@ -1,112 +0,0 @@
|
||||
import os
|
||||
import itertools
|
||||
import urlparse
|
||||
|
||||
from mongoengine import *
|
||||
from django.conf import settings
|
||||
from django.core.files.storage import Storage
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
|
||||
|
||||
class FileDocument(Document):
|
||||
"""A document used to store a single file in GridFS.
|
||||
"""
|
||||
file = FileField()
|
||||
|
||||
|
||||
class GridFSStorage(Storage):
|
||||
"""A custom storage backend to store files in GridFS
|
||||
"""
|
||||
|
||||
def __init__(self, base_url=None):
|
||||
|
||||
if base_url is None:
|
||||
base_url = settings.MEDIA_URL
|
||||
self.base_url = base_url
|
||||
self.document = FileDocument
|
||||
self.field = 'file'
|
||||
|
||||
def delete(self, name):
|
||||
"""Deletes the specified file from the storage system.
|
||||
"""
|
||||
if self.exists(name):
|
||||
doc = self.document.objects.first()
|
||||
field = getattr(doc, self.field)
|
||||
self._get_doc_with_name(name).delete() # Delete the FileField
|
||||
field.delete() # Delete the FileDocument
|
||||
|
||||
def exists(self, name):
|
||||
"""Returns True if a file referened by the given name already exists in the
|
||||
storage system, or False if the name is available for a new file.
|
||||
"""
|
||||
doc = self._get_doc_with_name(name)
|
||||
if doc:
|
||||
field = getattr(doc, self.field)
|
||||
return bool(field.name)
|
||||
else:
|
||||
return False
|
||||
|
||||
def listdir(self, path=None):
|
||||
"""Lists the contents of the specified path, returning a 2-tuple of lists;
|
||||
the first item being directories, the second item being files.
|
||||
"""
|
||||
def name(doc):
|
||||
return getattr(doc, self.field).name
|
||||
docs = self.document.objects
|
||||
return [], [name(d) for d in docs if name(d)]
|
||||
|
||||
def size(self, name):
|
||||
"""Returns the total size, in bytes, of the file specified by name.
|
||||
"""
|
||||
doc = self._get_doc_with_name(name)
|
||||
if doc:
|
||||
return getattr(doc, self.field).length
|
||||
else:
|
||||
raise ValueError("No such file or directory: '%s'" % name)
|
||||
|
||||
def url(self, name):
|
||||
"""Returns an absolute URL where the file's contents can be accessed
|
||||
directly by a web browser.
|
||||
"""
|
||||
if self.base_url is None:
|
||||
raise ValueError("This file is not accessible via a URL.")
|
||||
return urlparse.urljoin(self.base_url, name).replace('\\', '/')
|
||||
|
||||
def _get_doc_with_name(self, name):
|
||||
"""Find the documents in the store with the given name
|
||||
"""
|
||||
docs = self.document.objects
|
||||
doc = [d for d in docs if hasattr(getattr(d, self.field), 'name') and getattr(d, self.field).name == name]
|
||||
if doc:
|
||||
return doc[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def _open(self, name, mode='rb'):
|
||||
doc = self._get_doc_with_name(name)
|
||||
if doc:
|
||||
return getattr(doc, self.field)
|
||||
else:
|
||||
raise ValueError("No file found with the name '%s'." % name)
|
||||
|
||||
def get_available_name(self, name):
|
||||
"""Returns a filename that's free on the target storage system, and
|
||||
available for new content to be written to.
|
||||
"""
|
||||
file_root, file_ext = os.path.splitext(name)
|
||||
# If the filename already exists, add an underscore and a number (before
|
||||
# the file extension, if one exists) to the filename until the generated
|
||||
# filename doesn't exist.
|
||||
count = itertools.count(1)
|
||||
while self.exists(name):
|
||||
# file_ext includes the dot.
|
||||
name = os.path.join("%s_%s%s" % (file_root, count.next(), file_ext))
|
||||
|
||||
return name
|
||||
|
||||
def _save(self, name, content):
|
||||
doc = self.document()
|
||||
getattr(doc, self.field).put(content, filename=name)
|
||||
doc.save()
|
||||
|
||||
return name
|
@@ -1,31 +0,0 @@
|
||||
#coding: utf-8
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
from mongoengine import connect
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
|
||||
class MongoTestCase(TestCase):
|
||||
"""
|
||||
TestCase class that clear the collection between the tests
|
||||
"""
|
||||
|
||||
@property
|
||||
def db_name(self):
|
||||
from django.conf import settings
|
||||
return 'test_%s' % getattr(settings, 'MONGO_DATABASE_NAME', 'dummy')
|
||||
|
||||
def __init__(self, methodName='runtest'):
|
||||
connect(self.db_name)
|
||||
self.db = get_db()
|
||||
super(MongoTestCase, self).__init__(methodName)
|
||||
|
||||
def dropCollections(self):
|
||||
for collection in self.db.collection_names():
|
||||
if collection.startswith('system.'):
|
||||
continue
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def tearDown(self):
|
||||
self.dropCollections()
|
@@ -1,6 +0,0 @@
|
||||
try:
|
||||
# django >= 1.4
|
||||
from django.utils.timezone import now as datetime_now
|
||||
except ImportError:
|
||||
from datetime import datetime
|
||||
datetime_now = datetime.now
|
@@ -1,29 +1,23 @@
|
||||
import re
|
||||
import warnings
|
||||
|
||||
import hashlib
|
||||
import pymongo
|
||||
import re
|
||||
|
||||
from pymongo.read_preferences import ReadPreference
|
||||
from bson import ObjectId
|
||||
from bson.dbref import DBRef
|
||||
import pymongo
|
||||
from pymongo.read_preferences import ReadPreference
|
||||
import six
|
||||
|
||||
from mongoengine import signals
|
||||
from mongoengine.base import (BaseDict, BaseDocument, BaseList,
|
||||
DocumentMetaclass, EmbeddedDocumentList,
|
||||
TopLevelDocumentMetaclass, get_document)
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.base import (
|
||||
DocumentMetaclass,
|
||||
TopLevelDocumentMetaclass,
|
||||
BaseDocument,
|
||||
BaseDict,
|
||||
BaseList,
|
||||
EmbeddedDocumentList,
|
||||
ALLOW_INHERITANCE,
|
||||
get_document
|
||||
)
|
||||
from mongoengine.errors import ValidationError, InvalidQueryError, InvalidDocumentError
|
||||
from mongoengine.queryset import (OperationError, NotUniqueError,
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||
from mongoengine.context_managers import switch_collection, switch_db
|
||||
from mongoengine.errors import (InvalidDocumentError, InvalidQueryError,
|
||||
SaveConditionError)
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
from mongoengine.queryset import (NotUniqueError, OperationError,
|
||||
QuerySet, transform)
|
||||
from mongoengine.connection import get_db, DEFAULT_CONNECTION_NAME
|
||||
from mongoengine.context_managers import switch_db, switch_collection
|
||||
|
||||
__all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument',
|
||||
'DynamicEmbeddedDocument', 'OperationError',
|
||||
@@ -31,12 +25,10 @@ __all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument',
|
||||
|
||||
|
||||
def includes_cls(fields):
|
||||
""" Helper function used for ensuring and comparing indexes
|
||||
"""
|
||||
|
||||
"""Helper function used for ensuring and comparing indexes."""
|
||||
first_field = None
|
||||
if len(fields):
|
||||
if isinstance(fields[0], basestring):
|
||||
if isinstance(fields[0], six.string_types):
|
||||
first_field = fields[0]
|
||||
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
|
||||
first_field = fields[0][0]
|
||||
@@ -48,7 +40,6 @@ class InvalidCollectionError(Exception):
|
||||
|
||||
|
||||
class EmbeddedDocument(BaseDocument):
|
||||
|
||||
"""A :class:`~mongoengine.Document` that isn't stored in its own
|
||||
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
|
||||
fields on :class:`~mongoengine.Document`\ s through the
|
||||
@@ -58,12 +49,11 @@ class EmbeddedDocument(BaseDocument):
|
||||
to create a specialised version of the embedded document that will be
|
||||
stored in the same collection. To facilitate this behaviour a `_cls`
|
||||
field is added to documents (hidden though the MongoEngine interface).
|
||||
To disable this behaviour and remove the dependence on the presence of
|
||||
`_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
|
||||
dictionary.
|
||||
To enable this behaviour set :attr:`allow_inheritance` to ``True`` in the
|
||||
:attr:`meta` dictionary.
|
||||
"""
|
||||
|
||||
__slots__ = ('_instance')
|
||||
__slots__ = ('_instance', )
|
||||
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
@@ -83,6 +73,15 @@ class EmbeddedDocument(BaseDocument):
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def to_mongo(self, *args, **kwargs):
|
||||
data = super(EmbeddedDocument, self).to_mongo(*args, **kwargs)
|
||||
|
||||
# remove _id from the SON if it's in it and it's None
|
||||
if '_id' in data and data['_id'] is None:
|
||||
del data['_id']
|
||||
|
||||
return data
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self._instance.save(*args, **kwargs)
|
||||
|
||||
@@ -91,7 +90,6 @@ class EmbeddedDocument(BaseDocument):
|
||||
|
||||
|
||||
class Document(BaseDocument):
|
||||
|
||||
"""The base class used for defining the structure and properties of
|
||||
collections of documents stored in MongoDB. Inherit from this class, and
|
||||
add fields as class attributes to define a document's structure.
|
||||
@@ -108,17 +106,18 @@ class Document(BaseDocument):
|
||||
create a specialised version of the document that will be stored in the
|
||||
same collection. To facilitate this behaviour a `_cls`
|
||||
field is added to documents (hidden though the MongoEngine interface).
|
||||
To disable this behaviour and remove the dependence on the presence of
|
||||
`_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
|
||||
dictionary.
|
||||
To enable this behaviourset :attr:`allow_inheritance` to ``True`` in the
|
||||
:attr:`meta` dictionary.
|
||||
|
||||
A :class:`~mongoengine.Document` may use a **Capped Collection** by
|
||||
specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta`
|
||||
dictionary. :attr:`max_documents` is the maximum number of documents that
|
||||
is allowed to be stored in the collection, and :attr:`max_size` is the
|
||||
maximum size of the collection in bytes. If :attr:`max_size` is not
|
||||
maximum size of the collection in bytes. :attr:`max_size` is rounded up
|
||||
to the next multiple of 256 by MongoDB internally and mongoengine before.
|
||||
Use also a multiple of 256 to avoid confusions. If :attr:`max_size` is not
|
||||
specified and :attr:`max_documents` is, :attr:`max_size` defaults to
|
||||
10000000 bytes (10MB).
|
||||
10485760 bytes (10MB).
|
||||
|
||||
Indexes may be created by specifying :attr:`indexes` in the :attr:`meta`
|
||||
dictionary. The value should be a list of field names or tuples of field
|
||||
@@ -135,6 +134,11 @@ class Document(BaseDocument):
|
||||
doesn't contain a list) if allow_inheritance is True. This can be
|
||||
disabled by either setting cls to False on the specific index or
|
||||
by setting index_cls to False on the meta dictionary for the document.
|
||||
|
||||
By default, any extra attribute existing in stored data but not declared
|
||||
in your model will raise a :class:`~mongoengine.FieldDoesNotExist` error.
|
||||
This can be disabled by setting :attr:`strict` to ``False``
|
||||
in the :attr:`meta` dictionary.
|
||||
"""
|
||||
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
@@ -142,36 +146,40 @@ class Document(BaseDocument):
|
||||
my_metaclass = TopLevelDocumentMetaclass
|
||||
__metaclass__ = TopLevelDocumentMetaclass
|
||||
|
||||
__slots__ = ('__objects')
|
||||
__slots__ = ('__objects',)
|
||||
|
||||
def pk():
|
||||
"""Primary key alias
|
||||
"""
|
||||
@property
|
||||
def pk(self):
|
||||
"""Get the primary key."""
|
||||
if 'id_field' not in self._meta:
|
||||
return None
|
||||
return getattr(self, self._meta['id_field'])
|
||||
|
||||
def fget(self):
|
||||
return getattr(self, self._meta['id_field'])
|
||||
|
||||
def fset(self, value):
|
||||
return setattr(self, self._meta['id_field'], value)
|
||||
return property(fget, fset)
|
||||
pk = pk()
|
||||
@pk.setter
|
||||
def pk(self, value):
|
||||
"""Set the primary key."""
|
||||
return setattr(self, self._meta['id_field'], value)
|
||||
|
||||
@classmethod
|
||||
def _get_db(cls):
|
||||
"""Some Model using other db_alias"""
|
||||
return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME))
|
||||
return get_db(cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME))
|
||||
|
||||
@classmethod
|
||||
def _get_collection(cls):
|
||||
"""Returns the collection for the document."""
|
||||
# TODO: use new get_collection() with PyMongo3 ?
|
||||
if not hasattr(cls, '_collection') or cls._collection is None:
|
||||
db = cls._get_db()
|
||||
collection_name = cls._get_collection_name()
|
||||
# Create collection as a capped collection if specified
|
||||
if cls._meta['max_size'] or cls._meta['max_documents']:
|
||||
if cls._meta.get('max_size') or cls._meta.get('max_documents'):
|
||||
# Get max document limit and max byte size from meta
|
||||
max_size = cls._meta['max_size'] or 10000000 # 10MB default
|
||||
max_documents = cls._meta['max_documents']
|
||||
max_size = cls._meta.get('max_size') or 10 * 2 ** 20 # 10MB default
|
||||
max_documents = cls._meta.get('max_documents')
|
||||
# Round up to next 256 bytes as MongoDB would do it to avoid exception
|
||||
if max_size % 256:
|
||||
max_size = (max_size // 256 + 1) * 256
|
||||
|
||||
if collection_name in db.collection_names():
|
||||
cls._collection = db[collection_name]
|
||||
@@ -179,7 +187,7 @@ class Document(BaseDocument):
|
||||
# options match the specified capped options
|
||||
options = cls._collection.options()
|
||||
if options.get('max') != max_documents or \
|
||||
options.get('size') != max_size:
|
||||
options.get('size') != max_size:
|
||||
msg = (('Cannot create collection "%s" as a capped '
|
||||
'collection as it already exists')
|
||||
% cls._collection)
|
||||
@@ -198,31 +206,46 @@ class Document(BaseDocument):
|
||||
cls.ensure_indexes()
|
||||
return cls._collection
|
||||
|
||||
def modify(self, query={}, **update):
|
||||
def to_mongo(self, *args, **kwargs):
|
||||
data = super(Document, self).to_mongo(*args, **kwargs)
|
||||
|
||||
# If '_id' is None, try and set it from self._data. If that
|
||||
# doesn't exist either, remote '_id' from the SON completely.
|
||||
if data['_id'] is None:
|
||||
if self._data.get('id') is None:
|
||||
del data['_id']
|
||||
else:
|
||||
data['_id'] = self._data['id']
|
||||
|
||||
return data
|
||||
|
||||
def modify(self, query=None, **update):
|
||||
"""Perform an atomic update of the document in the database and reload
|
||||
the document object using updated version.
|
||||
|
||||
Returns True if the document has been updated or False if the document
|
||||
in the database doesn't match the query.
|
||||
|
||||
.. note:: All unsaved changes that has been made to the document are
|
||||
.. note:: All unsaved changes that have been made to the document are
|
||||
rejected if the method returns True.
|
||||
|
||||
:param query: the update will be performed only if the document in the
|
||||
database matches the query
|
||||
:param update: Django-style update keyword arguments
|
||||
"""
|
||||
if query is None:
|
||||
query = {}
|
||||
|
||||
if self.pk is None:
|
||||
raise InvalidDocumentError("The document does not have a primary key.")
|
||||
raise InvalidDocumentError('The document does not have a primary key.')
|
||||
|
||||
id_field = self._meta["id_field"]
|
||||
id_field = self._meta['id_field']
|
||||
query = query.copy() if isinstance(query, dict) else query.to_query(self)
|
||||
|
||||
if id_field not in query:
|
||||
query[id_field] = self.pk
|
||||
elif query[id_field] != self.pk:
|
||||
raise InvalidQueryError("Invalid document modify query: it must modify only this document.")
|
||||
raise InvalidQueryError('Invalid document modify query: it must modify only this document.')
|
||||
|
||||
updated = self._qs(**query).modify(new=True, **update)
|
||||
if updated is None:
|
||||
@@ -237,8 +260,8 @@ class Document(BaseDocument):
|
||||
return True
|
||||
|
||||
def save(self, force_insert=False, validate=True, clean=True,
|
||||
write_concern=None, cascade=None, cascade_kwargs=None,
|
||||
_refs=None, save_condition=None, **kwargs):
|
||||
write_concern=None, cascade=None, cascade_kwargs=None,
|
||||
_refs=None, save_condition=None, signal_kwargs=None, **kwargs):
|
||||
"""Save the :class:`~mongoengine.Document` to the database. If the
|
||||
document already exists, it will be updated, otherwise it will be
|
||||
created.
|
||||
@@ -262,7 +285,10 @@ class Document(BaseDocument):
|
||||
to cascading saves. Implies ``cascade=True``.
|
||||
:param _refs: A list of processed references used in cascading saves
|
||||
:param save_condition: only perform save if matching record in db
|
||||
satisfies condition(s) (e.g., version number)
|
||||
satisfies condition(s) (e.g. version number).
|
||||
Raises :class:`OperationError` if the conditions are not satisfied
|
||||
:parm signal_kwargs: (optional) kwargs dictionary to be passed to
|
||||
the signal calls.
|
||||
|
||||
.. versionchanged:: 0.5
|
||||
In existing documents it only saves changed fields using
|
||||
@@ -280,21 +306,28 @@ class Document(BaseDocument):
|
||||
.. versionchanged:: 0.8.5
|
||||
Optional save_condition that only overwrites existing documents
|
||||
if the condition is satisfied in the current db record.
|
||||
.. versionchanged:: 0.10
|
||||
:class:`OperationError` exception raised if save_condition fails.
|
||||
.. versionchanged:: 0.10.1
|
||||
:class: save_condition failure now raises a `SaveConditionError`
|
||||
.. versionchanged:: 0.10.7
|
||||
Add signal_kwargs argument
|
||||
"""
|
||||
signals.pre_save.send(self.__class__, document=self)
|
||||
signal_kwargs = signal_kwargs or {}
|
||||
signals.pre_save.send(self.__class__, document=self, **signal_kwargs)
|
||||
|
||||
if validate:
|
||||
self.validate(clean=clean)
|
||||
|
||||
if write_concern is None:
|
||||
write_concern = {"w": 1}
|
||||
write_concern = {'w': 1}
|
||||
|
||||
doc = self.to_mongo()
|
||||
|
||||
created = ('_id' not in doc or self._created or force_insert)
|
||||
|
||||
signals.pre_save_post_validation.send(self.__class__, document=self,
|
||||
created=created)
|
||||
created=created, **signal_kwargs)
|
||||
|
||||
try:
|
||||
collection = self._get_collection()
|
||||
@@ -305,6 +338,15 @@ class Document(BaseDocument):
|
||||
object_id = collection.insert(doc, **write_concern)
|
||||
else:
|
||||
object_id = collection.save(doc, **write_concern)
|
||||
# In PyMongo 3.0, the save() call calls internally the _update() call
|
||||
# but they forget to return the _id value passed back, therefore getting it back here
|
||||
# Correct behaviour in 2.X and in 3.0.1+ versions
|
||||
if not object_id and pymongo.version_tuple == (3, 0):
|
||||
pk_as_mongo_obj = self._fields.get(self._meta['id_field']).to_mongo(self.pk)
|
||||
object_id = (
|
||||
self._qs.filter(pk=pk_as_mongo_obj).first() and
|
||||
self._qs.filter(pk=pk_as_mongo_obj).first().pk
|
||||
) # TODO doesn't this make 2 queries?
|
||||
else:
|
||||
object_id = doc['_id']
|
||||
updates, removals = self._delta()
|
||||
@@ -315,14 +357,18 @@ class Document(BaseDocument):
|
||||
else:
|
||||
select_dict = {}
|
||||
select_dict['_id'] = object_id
|
||||
shard_key = self.__class__._meta.get('shard_key', tuple())
|
||||
shard_key = self._meta.get('shard_key', tuple())
|
||||
for k in shard_key:
|
||||
actual_key = self._db_field_map.get(k, k)
|
||||
select_dict[actual_key] = doc[actual_key]
|
||||
path = self._lookup_field(k.split('.'))
|
||||
actual_key = [p.db_field for p in path]
|
||||
val = doc
|
||||
for ak in actual_key:
|
||||
val = val[ak]
|
||||
select_dict['.'.join(actual_key)] = val
|
||||
|
||||
def is_new_object(last_error):
|
||||
if last_error is not None:
|
||||
updated = last_error.get("updatedExisting")
|
||||
updated = last_error.get('updatedExisting')
|
||||
if updated is not None:
|
||||
return not updated
|
||||
return created
|
||||
@@ -330,13 +376,16 @@ class Document(BaseDocument):
|
||||
update_query = {}
|
||||
|
||||
if updates:
|
||||
update_query["$set"] = updates
|
||||
update_query['$set'] = updates
|
||||
if removals:
|
||||
update_query["$unset"] = removals
|
||||
update_query['$unset'] = removals
|
||||
if updates or removals:
|
||||
upsert = save_condition is None
|
||||
last_error = collection.update(select_dict, update_query,
|
||||
upsert=upsert, **write_concern)
|
||||
if not upsert and last_error['n'] == 0:
|
||||
raise SaveConditionError('Race condition preventing'
|
||||
' document update detected')
|
||||
created = is_new_object(last_error)
|
||||
|
||||
if cascade is None:
|
||||
@@ -345,39 +394,42 @@ class Document(BaseDocument):
|
||||
|
||||
if cascade:
|
||||
kwargs = {
|
||||
"force_insert": force_insert,
|
||||
"validate": validate,
|
||||
"write_concern": write_concern,
|
||||
"cascade": cascade
|
||||
'force_insert': force_insert,
|
||||
'validate': validate,
|
||||
'write_concern': write_concern,
|
||||
'cascade': cascade
|
||||
}
|
||||
if cascade_kwargs: # Allow granular control over cascades
|
||||
kwargs.update(cascade_kwargs)
|
||||
kwargs['_refs'] = _refs
|
||||
self.cascade_save(**kwargs)
|
||||
except pymongo.errors.DuplicateKeyError, err:
|
||||
except pymongo.errors.DuplicateKeyError as err:
|
||||
message = u'Tried to save duplicate unique keys (%s)'
|
||||
raise NotUniqueError(message % unicode(err))
|
||||
except pymongo.errors.OperationFailure, err:
|
||||
raise NotUniqueError(message % six.text_type(err))
|
||||
except pymongo.errors.OperationFailure as err:
|
||||
message = 'Could not save document (%s)'
|
||||
if re.match('^E1100[01] duplicate key', unicode(err)):
|
||||
if re.match('^E1100[01] duplicate key', six.text_type(err)):
|
||||
# E11000 - duplicate key error index
|
||||
# E11001 - duplicate key on update
|
||||
message = u'Tried to save duplicate unique keys (%s)'
|
||||
raise NotUniqueError(message % unicode(err))
|
||||
raise OperationError(message % unicode(err))
|
||||
raise NotUniqueError(message % six.text_type(err))
|
||||
raise OperationError(message % six.text_type(err))
|
||||
|
||||
id_field = self._meta['id_field']
|
||||
if created or id_field not in self._meta.get('shard_key', []):
|
||||
self[id_field] = self._fields[id_field].to_python(object_id)
|
||||
|
||||
signals.post_save.send(self.__class__, document=self, created=created)
|
||||
signals.post_save.send(self.__class__, document=self,
|
||||
created=created, **signal_kwargs)
|
||||
self._clear_changed_fields()
|
||||
self._created = False
|
||||
return self
|
||||
|
||||
def cascade_save(self, *args, **kwargs):
|
||||
"""Recursively saves any references /
|
||||
generic references on an objects"""
|
||||
_refs = kwargs.get('_refs', []) or []
|
||||
def cascade_save(self, **kwargs):
|
||||
"""Recursively save any references and generic references on the
|
||||
document.
|
||||
"""
|
||||
_refs = kwargs.get('_refs') or []
|
||||
|
||||
ReferenceField = _import_class('ReferenceField')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
@@ -403,21 +455,27 @@ class Document(BaseDocument):
|
||||
|
||||
@property
|
||||
def _qs(self):
|
||||
"""
|
||||
Returns the queryset to use for updating / reloading / deletions
|
||||
"""
|
||||
"""Return the queryset to use for updating / reloading / deletions."""
|
||||
if not hasattr(self, '__objects'):
|
||||
self.__objects = QuerySet(self, self._get_collection())
|
||||
return self.__objects
|
||||
|
||||
@property
|
||||
def _object_key(self):
|
||||
"""Dict to identify object in collection
|
||||
"""Get the query dict that can be used to fetch this object from
|
||||
the database. Most of the time it's a simple PK lookup, but in
|
||||
case of a sharded collection with a compound shard key, it can
|
||||
contain a more complex query.
|
||||
"""
|
||||
select_dict = {'pk': self.pk}
|
||||
shard_key = self.__class__._meta.get('shard_key', tuple())
|
||||
for k in shard_key:
|
||||
select_dict[k] = getattr(self, k)
|
||||
path = self._lookup_field(k.split('.'))
|
||||
actual_key = [p.db_field for p in path]
|
||||
val = self
|
||||
for ak in actual_key:
|
||||
val = getattr(val, ak)
|
||||
select_dict['__'.join(actual_key)] = val
|
||||
return select_dict
|
||||
|
||||
def update(self, **kwargs):
|
||||
@@ -427,11 +485,11 @@ class Document(BaseDocument):
|
||||
Raises :class:`OperationError` if called on an object that has not yet
|
||||
been saved.
|
||||
"""
|
||||
if not self.pk:
|
||||
if self.pk is None:
|
||||
if kwargs.get('upsert', False):
|
||||
query = self.to_mongo()
|
||||
if "_cls" in query:
|
||||
del(query["_cls"])
|
||||
if '_cls' in query:
|
||||
del query['_cls']
|
||||
return self._qs.filter(**query).update_one(**kwargs)
|
||||
else:
|
||||
raise OperationError(
|
||||
@@ -440,28 +498,40 @@ class Document(BaseDocument):
|
||||
# Need to add shard key to query, or you get an error
|
||||
return self._qs.filter(**self._object_key).update_one(**kwargs)
|
||||
|
||||
def delete(self, **write_concern):
|
||||
def delete(self, signal_kwargs=None, **write_concern):
|
||||
"""Delete the :class:`~mongoengine.Document` from the database. This
|
||||
will only take effect if the document has been previously saved.
|
||||
|
||||
:parm signal_kwargs: (optional) kwargs dictionary to be passed to
|
||||
the signal calls.
|
||||
:param write_concern: Extra keyword arguments are passed down which
|
||||
will be used as options for the resultant
|
||||
``getLastError`` command. For example,
|
||||
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
|
||||
wait until at least two servers have recorded the write and
|
||||
will force an fsync on the primary server.
|
||||
|
||||
.. versionchanged:: 0.10.7
|
||||
Add signal_kwargs argument
|
||||
"""
|
||||
signals.pre_delete.send(self.__class__, document=self)
|
||||
signal_kwargs = signal_kwargs or {}
|
||||
signals.pre_delete.send(self.__class__, document=self, **signal_kwargs)
|
||||
|
||||
# Delete FileFields separately
|
||||
FileField = _import_class('FileField')
|
||||
for name, field in self._fields.iteritems():
|
||||
if isinstance(field, FileField):
|
||||
getattr(self, name).delete()
|
||||
|
||||
try:
|
||||
self._qs.filter(
|
||||
**self._object_key).delete(write_concern=write_concern, _from_doc_delete=True)
|
||||
except pymongo.errors.OperationFailure, err:
|
||||
except pymongo.errors.OperationFailure as err:
|
||||
message = u'Could not delete document (%s)' % err.message
|
||||
raise OperationError(message)
|
||||
signals.post_delete.send(self.__class__, document=self)
|
||||
signals.post_delete.send(self.__class__, document=self, **signal_kwargs)
|
||||
|
||||
def switch_db(self, db_alias):
|
||||
def switch_db(self, db_alias, keep_created=True):
|
||||
"""
|
||||
Temporarily switch the database for a document instance.
|
||||
|
||||
@@ -473,6 +543,9 @@ class Document(BaseDocument):
|
||||
|
||||
:param str db_alias: The database alias to use for saving the document
|
||||
|
||||
:param bool keep_created: keep self._created value after switching db, else is reset to True
|
||||
|
||||
|
||||
.. seealso::
|
||||
Use :class:`~mongoengine.context_managers.switch_collection`
|
||||
if you need to read from another collection
|
||||
@@ -483,12 +556,12 @@ class Document(BaseDocument):
|
||||
self._get_collection = lambda: collection
|
||||
self._get_db = lambda: db
|
||||
self._collection = collection
|
||||
self._created = True
|
||||
self._created = True if not keep_created else self._created
|
||||
self.__objects = self._qs
|
||||
self.__objects._collection_obj = collection
|
||||
return self
|
||||
|
||||
def switch_collection(self, collection_name):
|
||||
def switch_collection(self, collection_name, keep_created=True):
|
||||
"""
|
||||
Temporarily switch the collection for a document instance.
|
||||
|
||||
@@ -501,6 +574,9 @@ class Document(BaseDocument):
|
||||
:param str collection_name: The database alias to use for saving the
|
||||
document
|
||||
|
||||
:param bool keep_created: keep self._created value after switching collection, else is reset to True
|
||||
|
||||
|
||||
.. seealso::
|
||||
Use :class:`~mongoengine.context_managers.switch_db`
|
||||
if you need to read from another database
|
||||
@@ -509,7 +585,7 @@ class Document(BaseDocument):
|
||||
collection = cls._get_collection()
|
||||
self._get_collection = lambda: collection
|
||||
self._collection = collection
|
||||
self._created = True
|
||||
self._created = True if not keep_created else self._created
|
||||
self.__objects = self._qs
|
||||
self.__objects._collection_obj = collection
|
||||
return self
|
||||
@@ -538,29 +614,35 @@ class Document(BaseDocument):
|
||||
if fields and isinstance(fields[0], int):
|
||||
max_depth = fields[0]
|
||||
fields = fields[1:]
|
||||
elif "max_depth" in kwargs:
|
||||
max_depth = kwargs["max_depth"]
|
||||
elif 'max_depth' in kwargs:
|
||||
max_depth = kwargs['max_depth']
|
||||
|
||||
if self.pk is None:
|
||||
raise self.DoesNotExist('Document does not exist')
|
||||
|
||||
if not self.pk:
|
||||
raise self.DoesNotExist("Document does not exist")
|
||||
obj = self._qs.read_preference(ReadPreference.PRIMARY).filter(
|
||||
**self._object_key).only(*fields).limit(1
|
||||
).select_related(max_depth=max_depth)
|
||||
**self._object_key).only(*fields).limit(
|
||||
1).select_related(max_depth=max_depth)
|
||||
|
||||
if obj:
|
||||
obj = obj[0]
|
||||
else:
|
||||
raise self.DoesNotExist("Document does not exist")
|
||||
raise self.DoesNotExist('Document does not exist')
|
||||
|
||||
for field in self._fields_ordered:
|
||||
for field in obj._data:
|
||||
if not fields or field in fields:
|
||||
try:
|
||||
setattr(self, field, self._reload(field, obj[field]))
|
||||
except KeyError:
|
||||
# If field is removed from the database while the object
|
||||
# is in memory, a reload would cause a KeyError
|
||||
# i.e. obj.update(unset__field=1) followed by obj.reload()
|
||||
delattr(self, field)
|
||||
except (KeyError, AttributeError):
|
||||
try:
|
||||
# If field is a special field, e.g. items is stored as _reserved_items,
|
||||
# an KeyError is thrown. So try to retrieve the field from _data
|
||||
setattr(self, field, self._reload(field, obj._data.get(field)))
|
||||
except KeyError:
|
||||
# If field is removed from the database while the object
|
||||
# is in memory, a reload would cause a KeyError
|
||||
# i.e. obj.update(unset__field=1) followed by obj.reload()
|
||||
delattr(self, field)
|
||||
|
||||
self._changed_fields = obj._changed_fields
|
||||
self._created = False
|
||||
@@ -587,8 +669,8 @@ class Document(BaseDocument):
|
||||
def to_dbref(self):
|
||||
"""Returns an instance of :class:`~bson.dbref.DBRef` useful in
|
||||
`__raw__` queries."""
|
||||
if not self.pk:
|
||||
msg = "Only saved documents can have a valid dbref"
|
||||
if self.pk is None:
|
||||
msg = 'Only saved documents can have a valid dbref'
|
||||
raise OperationError(msg)
|
||||
return DBRef(self.__class__._get_collection_name(), self.pk)
|
||||
|
||||
@@ -604,38 +686,76 @@ class Document(BaseDocument):
|
||||
for class_name in document_cls._subclasses
|
||||
if class_name != document_cls.__name__] + [document_cls]
|
||||
|
||||
for cls in classes:
|
||||
for klass in classes:
|
||||
for document_cls in documents:
|
||||
delete_rules = cls._meta.get('delete_rules') or {}
|
||||
delete_rules = klass._meta.get('delete_rules') or {}
|
||||
delete_rules[(document_cls, field_name)] = rule
|
||||
cls._meta['delete_rules'] = delete_rules
|
||||
klass._meta['delete_rules'] = delete_rules
|
||||
|
||||
@classmethod
|
||||
def drop_collection(cls):
|
||||
"""Drops the entire collection associated with this
|
||||
:class:`~mongoengine.Document` type from the database.
|
||||
|
||||
Raises :class:`OperationError` if the document has no collection set
|
||||
(i.g. if it is `abstract`)
|
||||
|
||||
.. versionchanged:: 0.10.7
|
||||
:class:`OperationError` exception raised if no collection available
|
||||
"""
|
||||
col_name = cls._get_collection_name()
|
||||
if not col_name:
|
||||
raise OperationError('Document %s has no collection defined '
|
||||
'(is it abstract ?)' % cls)
|
||||
cls._collection = None
|
||||
db = cls._get_db()
|
||||
db.drop_collection(cls._get_collection_name())
|
||||
db.drop_collection(col_name)
|
||||
|
||||
@classmethod
|
||||
def create_index(cls, keys, background=False, **kwargs):
|
||||
"""Creates the given indexes if required.
|
||||
|
||||
:param keys: a single index key or a list of index keys (to
|
||||
construct a multi-field index); keys may be prefixed with a **+**
|
||||
or a **-** to determine the index ordering
|
||||
:param background: Allows index creation in the background
|
||||
"""
|
||||
index_spec = cls._build_index_spec(keys)
|
||||
index_spec = index_spec.copy()
|
||||
fields = index_spec.pop('fields')
|
||||
drop_dups = kwargs.get('drop_dups', False)
|
||||
if IS_PYMONGO_3 and drop_dups:
|
||||
msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.'
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
elif not IS_PYMONGO_3:
|
||||
index_spec['drop_dups'] = drop_dups
|
||||
index_spec['background'] = background
|
||||
index_spec.update(kwargs)
|
||||
|
||||
if IS_PYMONGO_3:
|
||||
return cls._get_collection().create_index(fields, **index_spec)
|
||||
else:
|
||||
return cls._get_collection().ensure_index(fields, **index_spec)
|
||||
|
||||
@classmethod
|
||||
def ensure_index(cls, key_or_list, drop_dups=False, background=False,
|
||||
**kwargs):
|
||||
"""Ensure that the given indexes are in place.
|
||||
"""Ensure that the given indexes are in place. Deprecated in favour
|
||||
of create_index.
|
||||
|
||||
:param key_or_list: a single index key or a list of index keys (to
|
||||
construct a multi-field index); keys may be prefixed with a **+**
|
||||
or a **-** to determine the index ordering
|
||||
:param background: Allows index creation in the background
|
||||
:param drop_dups: Was removed/ignored with MongoDB >2.7.5. The value
|
||||
will be removed if PyMongo3+ is used
|
||||
"""
|
||||
index_spec = cls._build_index_spec(key_or_list)
|
||||
index_spec = index_spec.copy()
|
||||
fields = index_spec.pop('fields')
|
||||
index_spec['drop_dups'] = drop_dups
|
||||
index_spec['background'] = background
|
||||
index_spec.update(kwargs)
|
||||
|
||||
return cls._get_collection().ensure_index(fields, **index_spec)
|
||||
if IS_PYMONGO_3 and drop_dups:
|
||||
msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.'
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
elif not IS_PYMONGO_3:
|
||||
kwargs.update({'drop_dups': drop_dups})
|
||||
return cls.create_index(key_or_list, background=background, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def ensure_indexes(cls):
|
||||
@@ -650,6 +770,9 @@ class Document(BaseDocument):
|
||||
drop_dups = cls._meta.get('index_drop_dups', False)
|
||||
index_opts = cls._meta.get('index_opts') or {}
|
||||
index_cls = cls._meta.get('index_cls', True)
|
||||
if IS_PYMONGO_3 and drop_dups:
|
||||
msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.'
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
|
||||
collection = cls._get_collection()
|
||||
# 746: when connection is via mongos, the read preference is not necessarily an indication that
|
||||
@@ -672,18 +795,36 @@ class Document(BaseDocument):
|
||||
cls_indexed = cls_indexed or includes_cls(fields)
|
||||
opts = index_opts.copy()
|
||||
opts.update(spec)
|
||||
collection.ensure_index(fields, background=background,
|
||||
drop_dups=drop_dups, **opts)
|
||||
|
||||
# we shouldn't pass 'cls' to the collection.ensureIndex options
|
||||
# because of https://jira.mongodb.org/browse/SERVER-769
|
||||
if 'cls' in opts:
|
||||
del opts['cls']
|
||||
|
||||
if IS_PYMONGO_3:
|
||||
collection.create_index(fields, background=background, **opts)
|
||||
else:
|
||||
collection.ensure_index(fields, background=background,
|
||||
drop_dups=drop_dups, **opts)
|
||||
|
||||
# If _cls is being used (for polymorphism), it needs an index,
|
||||
# only if another index doesn't begin with _cls
|
||||
if (index_cls and not cls_indexed and
|
||||
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
|
||||
collection.ensure_index('_cls', background=background,
|
||||
**index_opts)
|
||||
if index_cls and not cls_indexed and cls._meta.get('allow_inheritance'):
|
||||
|
||||
# we shouldn't pass 'cls' to the collection.ensureIndex options
|
||||
# because of https://jira.mongodb.org/browse/SERVER-769
|
||||
if 'cls' in index_opts:
|
||||
del index_opts['cls']
|
||||
|
||||
if IS_PYMONGO_3:
|
||||
collection.create_index('_cls', background=background,
|
||||
**index_opts)
|
||||
else:
|
||||
collection.ensure_index('_cls', background=background,
|
||||
**index_opts)
|
||||
|
||||
@classmethod
|
||||
def list_indexes(cls, go_up=True, go_down=True):
|
||||
def list_indexes(cls):
|
||||
""" Lists all of the indexes that should be created for given
|
||||
collection. It includes all the indexes from super- and sub-classes.
|
||||
"""
|
||||
@@ -730,24 +871,23 @@ class Document(BaseDocument):
|
||||
return indexes
|
||||
|
||||
indexes = []
|
||||
for cls in classes:
|
||||
for index in get_indexes_spec(cls):
|
||||
for klass in classes:
|
||||
for index in get_indexes_spec(klass):
|
||||
if index not in indexes:
|
||||
indexes.append(index)
|
||||
|
||||
# finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed
|
||||
if [(u'_id', 1)] not in indexes:
|
||||
indexes.append([(u'_id', 1)])
|
||||
if (cls._meta.get('index_cls', True) and
|
||||
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
|
||||
if cls._meta.get('index_cls', True) and cls._meta.get('allow_inheritance'):
|
||||
indexes.append([(u'_cls', 1)])
|
||||
|
||||
return indexes
|
||||
|
||||
@classmethod
|
||||
def compare_indexes(cls):
|
||||
""" Compares the indexes defined in MongoEngine with the ones existing
|
||||
in the database. Returns any missing/extra indexes.
|
||||
""" Compares the indexes defined in MongoEngine with the ones
|
||||
existing in the database. Returns any missing/extra indexes.
|
||||
"""
|
||||
|
||||
required = cls.list_indexes()
|
||||
@@ -770,7 +910,6 @@ class Document(BaseDocument):
|
||||
|
||||
|
||||
class DynamicDocument(Document):
|
||||
|
||||
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
|
||||
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
|
||||
way as an ordinary document but has expando style properties. Any data
|
||||
@@ -792,8 +931,9 @@ class DynamicDocument(Document):
|
||||
_dynamic = True
|
||||
|
||||
def __delattr__(self, *args, **kwargs):
|
||||
"""Deletes the attribute by setting to None and allowing _delta to unset
|
||||
it"""
|
||||
"""Delete the attribute by setting to None and allowing _delta
|
||||
to unset it.
|
||||
"""
|
||||
field_name = args[0]
|
||||
if field_name in self._dynamic_fields:
|
||||
setattr(self, field_name, None)
|
||||
@@ -802,7 +942,6 @@ class DynamicDocument(Document):
|
||||
|
||||
|
||||
class DynamicEmbeddedDocument(EmbeddedDocument):
|
||||
|
||||
"""A Dynamic Embedded Document class allowing flexible, expandable and
|
||||
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
|
||||
information about dynamic documents.
|
||||
@@ -816,8 +955,9 @@ class DynamicEmbeddedDocument(EmbeddedDocument):
|
||||
_dynamic = True
|
||||
|
||||
def __delattr__(self, *args, **kwargs):
|
||||
"""Deletes the attribute by setting to None and allowing _delta to unset
|
||||
it"""
|
||||
"""Delete the attribute by setting to None and allowing _delta
|
||||
to unset it.
|
||||
"""
|
||||
field_name = args[0]
|
||||
if field_name in self._fields:
|
||||
default = self._fields[field_name].default
|
||||
@@ -829,7 +969,6 @@ class DynamicEmbeddedDocument(EmbeddedDocument):
|
||||
|
||||
|
||||
class MapReduceDocument(object):
|
||||
|
||||
"""A document returned from a map/reduce query.
|
||||
|
||||
:param collection: An instance of :class:`~pymongo.Collection`
|
||||
@@ -859,11 +998,11 @@ class MapReduceDocument(object):
|
||||
if not isinstance(self.key, id_field_type):
|
||||
try:
|
||||
self.key = id_field_type(self.key)
|
||||
except:
|
||||
raise Exception("Could not cast key as %s" %
|
||||
except Exception:
|
||||
raise Exception('Could not cast key as %s' %
|
||||
id_field_type.__name__)
|
||||
|
||||
if not hasattr(self, "_key_object"):
|
||||
if not hasattr(self, '_key_object'):
|
||||
self._key_object = self._document.objects.with_id(self.key)
|
||||
return self._key_object
|
||||
return self._key_object
|
||||
|
@@ -1,12 +1,11 @@
|
||||
from collections import defaultdict
|
||||
|
||||
from mongoengine.python_support import txt_type
|
||||
|
||||
import six
|
||||
|
||||
__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError',
|
||||
'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError',
|
||||
'OperationError', 'NotUniqueError', 'FieldDoesNotExist',
|
||||
'ValidationError')
|
||||
'ValidationError', 'SaveConditionError')
|
||||
|
||||
|
||||
class NotRegistered(Exception):
|
||||
@@ -41,10 +40,21 @@ class NotUniqueError(OperationError):
|
||||
pass
|
||||
|
||||
|
||||
class FieldDoesNotExist(Exception):
|
||||
class SaveConditionError(OperationError):
|
||||
pass
|
||||
|
||||
|
||||
class FieldDoesNotExist(Exception):
|
||||
"""Raised when trying to set a field
|
||||
not declared in a :class:`~mongoengine.Document`
|
||||
or an :class:`~mongoengine.EmbeddedDocument`.
|
||||
|
||||
To avoid this behavior on data loading,
|
||||
you should the :attr:`strict` to ``False``
|
||||
in the :attr:`meta` dictionnary.
|
||||
"""
|
||||
|
||||
|
||||
class ValidationError(AssertionError):
|
||||
"""Validation exception.
|
||||
|
||||
@@ -60,13 +70,13 @@ class ValidationError(AssertionError):
|
||||
field_name = None
|
||||
_message = None
|
||||
|
||||
def __init__(self, message="", **kwargs):
|
||||
def __init__(self, message='', **kwargs):
|
||||
self.errors = kwargs.get('errors', {})
|
||||
self.field_name = kwargs.get('field_name')
|
||||
self.message = message
|
||||
|
||||
def __str__(self):
|
||||
return txt_type(self.message)
|
||||
return six.text_type(self.message)
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%s,)' % (self.__class__.__name__, self.message)
|
||||
@@ -100,16 +110,20 @@ class ValidationError(AssertionError):
|
||||
errors_dict = {}
|
||||
if not source:
|
||||
return errors_dict
|
||||
|
||||
if isinstance(source, dict):
|
||||
for field_name, error in source.iteritems():
|
||||
errors_dict[field_name] = build_dict(error)
|
||||
elif isinstance(source, ValidationError) and source.errors:
|
||||
return build_dict(source.errors)
|
||||
else:
|
||||
return unicode(source)
|
||||
return six.text_type(source)
|
||||
|
||||
return errors_dict
|
||||
|
||||
if not self.errors:
|
||||
return {}
|
||||
|
||||
return build_dict(self.errors)
|
||||
|
||||
def _format_errors(self):
|
||||
@@ -118,14 +132,14 @@ class ValidationError(AssertionError):
|
||||
def generate_key(value, prefix=''):
|
||||
if isinstance(value, list):
|
||||
value = ' '.join([generate_key(k) for k in value])
|
||||
if isinstance(value, dict):
|
||||
elif isinstance(value, dict):
|
||||
value = ' '.join(
|
||||
[generate_key(v, k) for k, v in value.iteritems()])
|
||||
[generate_key(v, k) for k, v in value.iteritems()])
|
||||
|
||||
results = "%s.%s" % (prefix, value) if prefix else value
|
||||
results = '%s.%s' % (prefix, value) if prefix else value
|
||||
return results
|
||||
|
||||
error_dict = defaultdict(list)
|
||||
for k, v in self.to_dict().iteritems():
|
||||
error_dict[generate_key(v)].append(k)
|
||||
return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.iteritems()])
|
||||
return ' '.join(['%s: %s' % (k, v) for k, v in error_dict.iteritems()])
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,29 +1,25 @@
|
||||
"""Helper functions and types to aid with Python 2.5 - 3 support."""
|
||||
"""
|
||||
Helper functions, constants, and types to aid with Python v2.7 - v3.x and
|
||||
PyMongo v2.7 - v3.x support.
|
||||
"""
|
||||
import pymongo
|
||||
import six
|
||||
|
||||
import sys
|
||||
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3:
|
||||
import codecs
|
||||
from io import BytesIO as StringIO
|
||||
# return s converted to binary. b('test') should be equivalent to b'test'
|
||||
def b(s):
|
||||
return codecs.latin_1_encode(s)[0]
|
||||
|
||||
bin_type = bytes
|
||||
txt_type = str
|
||||
if pymongo.version_tuple[0] < 3:
|
||||
IS_PYMONGO_3 = False
|
||||
else:
|
||||
IS_PYMONGO_3 = True
|
||||
|
||||
|
||||
# six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3.
|
||||
StringIO = six.BytesIO
|
||||
|
||||
# Additionally for Py2, try to use the faster cStringIO, if available
|
||||
if not six.PY3:
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
import cStringIO
|
||||
except ImportError:
|
||||
from StringIO import StringIO
|
||||
|
||||
# Conversion to binary only necessary in Python 3
|
||||
def b(s):
|
||||
return s
|
||||
|
||||
bin_type = str
|
||||
txt_type = unicode
|
||||
|
||||
str_types = (bin_type, txt_type)
|
||||
pass
|
||||
else:
|
||||
StringIO = cStringIO.StringIO
|
||||
|
@@ -1,11 +1,17 @@
|
||||
from mongoengine.errors import (DoesNotExist, MultipleObjectsReturned,
|
||||
InvalidQueryError, OperationError,
|
||||
NotUniqueError)
|
||||
from mongoengine.errors import *
|
||||
from mongoengine.queryset.field_list import *
|
||||
from mongoengine.queryset.manager import *
|
||||
from mongoengine.queryset.queryset import *
|
||||
from mongoengine.queryset.transform import *
|
||||
from mongoengine.queryset.visitor import *
|
||||
|
||||
__all__ = (field_list.__all__ + manager.__all__ + queryset.__all__ +
|
||||
transform.__all__ + visitor.__all__)
|
||||
# Expose just the public subset of all imported objects and constants.
|
||||
__all__ = (
|
||||
'QuerySet', 'QuerySetNoCache', 'Q', 'queryset_manager', 'QuerySetManager',
|
||||
'QueryFieldList', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL',
|
||||
|
||||
# Errors that might be related to a queryset, mostly here for backward
|
||||
# compatibility
|
||||
'DoesNotExist', 'InvalidQueryError', 'MultipleObjectsReturned',
|
||||
'NotUniqueError', 'OperationError',
|
||||
)
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,3 @@
|
||||
|
||||
__all__ = ('QueryFieldList',)
|
||||
|
||||
|
||||
@@ -68,7 +67,7 @@ class QueryFieldList(object):
|
||||
return bool(self.fields)
|
||||
|
||||
def as_dict(self):
|
||||
field_list = dict((field, self.value) for field in self.fields)
|
||||
field_list = {field: self.value for field in self.fields}
|
||||
if self.slice:
|
||||
field_list.update(self.slice)
|
||||
if self._id is not None:
|
||||
|
@@ -29,7 +29,7 @@ class QuerySetManager(object):
|
||||
Document.objects is accessed.
|
||||
"""
|
||||
if instance is not None:
|
||||
# Document class being used rather than a document object
|
||||
# Document object being used rather than a document class
|
||||
return self
|
||||
|
||||
# owner is the document that contains the QuerySetManager
|
||||
|
@@ -1,6 +1,6 @@
|
||||
from mongoengine.errors import OperationError
|
||||
from mongoengine.queryset.base import (BaseQuerySet, DO_NOTHING, NULLIFY,
|
||||
CASCADE, DENY, PULL)
|
||||
from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING,
|
||||
NULLIFY, PULL)
|
||||
|
||||
__all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE',
|
||||
'DENY', 'PULL')
|
||||
@@ -27,9 +27,10 @@ class QuerySet(BaseQuerySet):
|
||||
in batches of ``ITER_CHUNK_SIZE``.
|
||||
|
||||
If ``self._has_more`` the cursor hasn't been exhausted so cache then
|
||||
batch. Otherwise iterate the result_cache.
|
||||
batch. Otherwise iterate the result_cache.
|
||||
"""
|
||||
self._iter = True
|
||||
|
||||
if self._has_more:
|
||||
return self._iter_results()
|
||||
|
||||
@@ -38,45 +39,60 @@ class QuerySet(BaseQuerySet):
|
||||
|
||||
def __len__(self):
|
||||
"""Since __len__ is called quite frequently (for example, as part of
|
||||
list(qs) we populate the result cache and cache the length.
|
||||
list(qs)), we populate the result cache and cache the length.
|
||||
"""
|
||||
if self._len is not None:
|
||||
return self._len
|
||||
|
||||
# Populate the result cache with *all* of the docs in the cursor
|
||||
if self._has_more:
|
||||
# populate the cache
|
||||
list(self._iter_results())
|
||||
|
||||
# Cache the length of the complete result cache and return it
|
||||
self._len = len(self._result_cache)
|
||||
return self._len
|
||||
|
||||
def __repr__(self):
|
||||
"""Provides the string representation of the QuerySet
|
||||
"""
|
||||
"""Provide a string representation of the QuerySet"""
|
||||
if self._iter:
|
||||
return '.. queryset mid-iteration ..'
|
||||
|
||||
self._populate_cache()
|
||||
data = self._result_cache[:REPR_OUTPUT_SIZE + 1]
|
||||
if len(data) > REPR_OUTPUT_SIZE:
|
||||
data[-1] = "...(remaining elements truncated)..."
|
||||
data[-1] = '...(remaining elements truncated)...'
|
||||
return repr(data)
|
||||
|
||||
|
||||
def _iter_results(self):
|
||||
"""A generator for iterating over the result cache.
|
||||
|
||||
Also populates the cache if there are more possible results to yield.
|
||||
Raises StopIteration when there are no more results"""
|
||||
Also populates the cache if there are more possible results to
|
||||
yield. Raises StopIteration when there are no more results.
|
||||
"""
|
||||
if self._result_cache is None:
|
||||
self._result_cache = []
|
||||
|
||||
pos = 0
|
||||
while True:
|
||||
upper = len(self._result_cache)
|
||||
while pos < upper:
|
||||
|
||||
# For all positions lower than the length of the current result
|
||||
# cache, serve the docs straight from the cache w/o hitting the
|
||||
# database.
|
||||
# XXX it's VERY important to compute the len within the `while`
|
||||
# condition because the result cache might expand mid-iteration
|
||||
# (e.g. if we call len(qs) inside a loop that iterates over the
|
||||
# queryset). Fortunately len(list) is O(1) in Python, so this
|
||||
# doesn't cause performance issues.
|
||||
while pos < len(self._result_cache):
|
||||
yield self._result_cache[pos]
|
||||
pos = pos + 1
|
||||
pos += 1
|
||||
|
||||
# Raise StopIteration if we already established there were no more
|
||||
# docs in the db cursor.
|
||||
if not self._has_more:
|
||||
raise StopIteration
|
||||
|
||||
# Otherwise, populate more of the cache and repeat.
|
||||
if len(self._result_cache) <= pos:
|
||||
self._populate_cache()
|
||||
|
||||
@@ -87,12 +103,22 @@ class QuerySet(BaseQuerySet):
|
||||
"""
|
||||
if self._result_cache is None:
|
||||
self._result_cache = []
|
||||
if self._has_more:
|
||||
try:
|
||||
for i in xrange(ITER_CHUNK_SIZE):
|
||||
self._result_cache.append(self.next())
|
||||
except StopIteration:
|
||||
self._has_more = False
|
||||
|
||||
# Skip populating the cache if we already established there are no
|
||||
# more docs to pull from the database.
|
||||
if not self._has_more:
|
||||
return
|
||||
|
||||
# Pull in ITER_CHUNK_SIZE docs from the database and store them in
|
||||
# the result cache.
|
||||
try:
|
||||
for _ in xrange(ITER_CHUNK_SIZE):
|
||||
self._result_cache.append(self.next())
|
||||
except StopIteration:
|
||||
# Getting this exception means there are no more docs in the
|
||||
# db cursor. Set _has_more to False so that we can use that
|
||||
# information in other places.
|
||||
self._has_more = False
|
||||
|
||||
def count(self, with_limit_and_skip=False):
|
||||
"""Count the selected elements in the query.
|
||||
@@ -115,7 +141,7 @@ class QuerySet(BaseQuerySet):
|
||||
.. versionadded:: 0.8.3 Convert to non caching queryset
|
||||
"""
|
||||
if self._result_cache is not None:
|
||||
raise OperationError("QuerySet already cached")
|
||||
raise OperationError('QuerySet already cached')
|
||||
return self.clone_into(QuerySetNoCache(self._document, self._collection))
|
||||
|
||||
|
||||
@@ -138,13 +164,14 @@ class QuerySetNoCache(BaseQuerySet):
|
||||
return '.. queryset mid-iteration ..'
|
||||
|
||||
data = []
|
||||
for i in xrange(REPR_OUTPUT_SIZE + 1):
|
||||
for _ in xrange(REPR_OUTPUT_SIZE + 1):
|
||||
try:
|
||||
data.append(self.next())
|
||||
except StopIteration:
|
||||
break
|
||||
|
||||
if len(data) > REPR_OUTPUT_SIZE:
|
||||
data[-1] = "...(remaining elements truncated)..."
|
||||
data[-1] = '...(remaining elements truncated)...'
|
||||
|
||||
self.rewind()
|
||||
return repr(data)
|
||||
|
@@ -1,20 +1,23 @@
|
||||
from collections import defaultdict
|
||||
|
||||
from bson import ObjectId, SON
|
||||
from bson.dbref import DBRef
|
||||
import pymongo
|
||||
from bson import SON
|
||||
import six
|
||||
|
||||
from mongoengine.connection import get_connection
|
||||
from mongoengine.base import UPDATE_OPERATORS
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import InvalidQueryError, LookUpError
|
||||
from mongoengine.connection import get_connection
|
||||
from mongoengine.errors import InvalidQueryError
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
|
||||
__all__ = ('query', 'update')
|
||||
|
||||
|
||||
COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
|
||||
'all', 'size', 'exists', 'not', 'elemMatch', 'type')
|
||||
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
|
||||
'within_box', 'within_polygon', 'near', 'near_sphere',
|
||||
'max_distance', 'geo_within', 'geo_within_box',
|
||||
'max_distance', 'min_distance', 'geo_within', 'geo_within_box',
|
||||
'geo_within_polygon', 'geo_within_center',
|
||||
'geo_within_sphere', 'geo_intersects')
|
||||
STRING_OPERATORS = ('contains', 'icontains', 'startswith',
|
||||
@@ -24,18 +27,14 @@ CUSTOM_OPERATORS = ('match',)
|
||||
MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
|
||||
STRING_OPERATORS + CUSTOM_OPERATORS)
|
||||
|
||||
UPDATE_OPERATORS = ('set', 'unset', 'inc', 'dec', 'pop', 'push',
|
||||
'push_all', 'pull', 'pull_all', 'add_to_set',
|
||||
'set_on_insert', 'min', 'max')
|
||||
|
||||
|
||||
def query(_doc_cls=None, _field_operation=False, **query):
|
||||
"""Transform a query from Django-style format to Mongo format.
|
||||
"""
|
||||
# TODO make this less complex
|
||||
def query(_doc_cls=None, **kwargs):
|
||||
"""Transform a query from Django-style format to Mongo format."""
|
||||
mongo_query = {}
|
||||
merge_query = defaultdict(list)
|
||||
for key, value in sorted(query.items()):
|
||||
if key == "__raw__":
|
||||
for key, value in sorted(kwargs.items()):
|
||||
if key == '__raw__':
|
||||
mongo_query.update(value)
|
||||
continue
|
||||
|
||||
@@ -47,6 +46,10 @@ def query(_doc_cls=None, _field_operation=False, **query):
|
||||
if len(parts) > 1 and parts[-1] in MATCH_OPERATORS:
|
||||
op = parts.pop()
|
||||
|
||||
# Allow to escape operator-like field name by __
|
||||
if len(parts) > 1 and parts[-1] == '':
|
||||
parts.pop()
|
||||
|
||||
negate = False
|
||||
if len(parts) > 1 and parts[-1] == 'not':
|
||||
parts.pop()
|
||||
@@ -56,16 +59,17 @@ def query(_doc_cls=None, _field_operation=False, **query):
|
||||
# Switch field names to proper names [set in Field(name='foo')]
|
||||
try:
|
||||
fields = _doc_cls._lookup_field(parts)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise InvalidQueryError(e)
|
||||
parts = []
|
||||
|
||||
CachedReferenceField = _import_class('CachedReferenceField')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
|
||||
cleaned_fields = []
|
||||
for field in fields:
|
||||
append_field = True
|
||||
if isinstance(field, basestring):
|
||||
if isinstance(field, six.string_types):
|
||||
parts.append(field)
|
||||
append_field = False
|
||||
# is last and CachedReferenceField
|
||||
@@ -83,9 +87,9 @@ def query(_doc_cls=None, _field_operation=False, **query):
|
||||
singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not']
|
||||
singular_ops += STRING_OPERATORS
|
||||
if op in singular_ops:
|
||||
if isinstance(field, basestring):
|
||||
if isinstance(field, six.string_types):
|
||||
if (op in STRING_OPERATORS and
|
||||
isinstance(value, basestring)):
|
||||
isinstance(value, six.string_types)):
|
||||
StringField = _import_class('StringField')
|
||||
value = StringField.prepare_query_value(op, value)
|
||||
else:
|
||||
@@ -100,17 +104,35 @@ def query(_doc_cls=None, _field_operation=False, **query):
|
||||
# 'in', 'nin' and 'all' require a list of values
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
|
||||
# If we're querying a GenericReferenceField, we need to alter the
|
||||
# key depending on the value:
|
||||
# * If the value is a DBRef, the key should be "field_name._ref".
|
||||
# * If the value is an ObjectId, the key should be "field_name._ref.$id".
|
||||
if isinstance(field, GenericReferenceField):
|
||||
if isinstance(value, DBRef):
|
||||
parts[-1] += '._ref'
|
||||
elif isinstance(value, ObjectId):
|
||||
parts[-1] += '._ref.$id'
|
||||
|
||||
# if op and op not in COMPARISON_OPERATORS:
|
||||
if op:
|
||||
if op in GEO_OPERATORS:
|
||||
value = _geo_operator(field, op, value)
|
||||
elif op in CUSTOM_OPERATORS:
|
||||
if op in ('elem_match', 'match'):
|
||||
value = field.prepare_query_value(op, value)
|
||||
value = {"$elemMatch": value}
|
||||
elif op in ('match', 'elemMatch'):
|
||||
ListField = _import_class('ListField')
|
||||
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
|
||||
if (
|
||||
isinstance(value, dict) and
|
||||
isinstance(field, ListField) and
|
||||
isinstance(field.field, EmbeddedDocumentField)
|
||||
):
|
||||
value = query(field.field.document_type, **value)
|
||||
else:
|
||||
NotImplementedError("Custom method '%s' has not "
|
||||
"been implemented" % op)
|
||||
value = field.prepare_query_value(op, value)
|
||||
value = {'$elemMatch': value}
|
||||
elif op in CUSTOM_OPERATORS:
|
||||
NotImplementedError('Custom method "%s" has not '
|
||||
'been implemented' % op)
|
||||
elif op not in STRING_OPERATORS:
|
||||
value = {'$' + op: value}
|
||||
|
||||
@@ -119,35 +141,42 @@ def query(_doc_cls=None, _field_operation=False, **query):
|
||||
|
||||
for i, part in indices:
|
||||
parts.insert(i, part)
|
||||
|
||||
key = '.'.join(parts)
|
||||
|
||||
if op is None or key not in mongo_query:
|
||||
mongo_query[key] = value
|
||||
elif key in mongo_query:
|
||||
if key in mongo_query and isinstance(mongo_query[key], dict):
|
||||
if isinstance(mongo_query[key], dict):
|
||||
mongo_query[key].update(value)
|
||||
# $maxDistance needs to come last - convert to SON
|
||||
# $max/minDistance needs to come last - convert to SON
|
||||
value_dict = mongo_query[key]
|
||||
if ('$maxDistance' in value_dict and '$near' in value_dict):
|
||||
if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \
|
||||
('$near' in value_dict or '$nearSphere' in value_dict):
|
||||
value_son = SON()
|
||||
if isinstance(value_dict['$near'], dict):
|
||||
for k, v in value_dict.iteritems():
|
||||
if k == '$maxDistance':
|
||||
continue
|
||||
value_son[k] = v
|
||||
if (get_connection().max_wire_version <= 1):
|
||||
value_son['$maxDistance'] = value_dict[
|
||||
'$maxDistance']
|
||||
else:
|
||||
value_son['$near'] = SON(value_son['$near'])
|
||||
value_son['$near'][
|
||||
'$maxDistance'] = value_dict['$maxDistance']
|
||||
else:
|
||||
for k, v in value_dict.iteritems():
|
||||
if k == '$maxDistance':
|
||||
continue
|
||||
value_son[k] = v
|
||||
value_son['$maxDistance'] = value_dict['$maxDistance']
|
||||
|
||||
for k, v in value_dict.iteritems():
|
||||
if k == '$maxDistance' or k == '$minDistance':
|
||||
continue
|
||||
value_son[k] = v
|
||||
# Required for MongoDB >= 2.6, may fail when combining
|
||||
# PyMongo 3+ and MongoDB < 2.6
|
||||
near_embedded = False
|
||||
for near_op in ('$near', '$nearSphere'):
|
||||
if isinstance(value_dict.get(near_op), dict) and (
|
||||
IS_PYMONGO_3 or get_connection().max_wire_version > 1):
|
||||
value_son[near_op] = SON(value_son[near_op])
|
||||
if '$maxDistance' in value_dict:
|
||||
value_son[near_op][
|
||||
'$maxDistance'] = value_dict['$maxDistance']
|
||||
if '$minDistance' in value_dict:
|
||||
value_son[near_op][
|
||||
'$minDistance'] = value_dict['$minDistance']
|
||||
near_embedded = True
|
||||
if not near_embedded:
|
||||
if '$maxDistance' in value_dict:
|
||||
value_son['$maxDistance'] = value_dict['$maxDistance']
|
||||
if '$minDistance' in value_dict:
|
||||
value_son['$minDistance'] = value_dict['$minDistance']
|
||||
mongo_query[key] = value_son
|
||||
else:
|
||||
# Store for manually merging later
|
||||
@@ -168,15 +197,16 @@ def query(_doc_cls=None, _field_operation=False, **query):
|
||||
|
||||
|
||||
def update(_doc_cls=None, **update):
|
||||
"""Transform an update spec from Django-style format to Mongo format.
|
||||
"""Transform an update spec from Django-style format to Mongo
|
||||
format.
|
||||
"""
|
||||
mongo_update = {}
|
||||
for key, value in update.items():
|
||||
if key == "__raw__":
|
||||
if key == '__raw__':
|
||||
mongo_update.update(value)
|
||||
continue
|
||||
parts = key.split('__')
|
||||
# if there is no operator, default to "set"
|
||||
# if there is no operator, default to 'set'
|
||||
if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS:
|
||||
parts.insert(0, 'set')
|
||||
# Check for an operator and transform to mongo-style if there is
|
||||
@@ -195,17 +225,21 @@ def update(_doc_cls=None, **update):
|
||||
elif op == 'add_to_set':
|
||||
op = 'addToSet'
|
||||
elif op == 'set_on_insert':
|
||||
op = "setOnInsert"
|
||||
op = 'setOnInsert'
|
||||
|
||||
match = None
|
||||
if parts[-1] in COMPARISON_OPERATORS:
|
||||
match = parts.pop()
|
||||
|
||||
# Allow to escape operator-like field name by __
|
||||
if len(parts) > 1 and parts[-1] == '':
|
||||
parts.pop()
|
||||
|
||||
if _doc_cls:
|
||||
# Switch field names to proper names [set in Field(name='foo')]
|
||||
try:
|
||||
fields = _doc_cls._lookup_field(parts)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise InvalidQueryError(e)
|
||||
parts = []
|
||||
|
||||
@@ -213,7 +247,7 @@ def update(_doc_cls=None, **update):
|
||||
appended_sub_field = False
|
||||
for field in fields:
|
||||
append_field = True
|
||||
if isinstance(field, basestring):
|
||||
if isinstance(field, six.string_types):
|
||||
# Convert the S operator to $
|
||||
if field == 'S':
|
||||
field = '$'
|
||||
@@ -234,7 +268,7 @@ def update(_doc_cls=None, **update):
|
||||
else:
|
||||
field = cleaned_fields[-1]
|
||||
|
||||
GeoJsonBaseField = _import_class("GeoJsonBaseField")
|
||||
GeoJsonBaseField = _import_class('GeoJsonBaseField')
|
||||
if isinstance(field, GeoJsonBaseField):
|
||||
value = field.to_mongo(value)
|
||||
|
||||
@@ -248,7 +282,7 @@ def update(_doc_cls=None, **update):
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
elif field.required or value is not None:
|
||||
value = field.prepare_query_value(op, value)
|
||||
elif op == "unset":
|
||||
elif op == 'unset':
|
||||
value = 1
|
||||
|
||||
if match:
|
||||
@@ -258,16 +292,16 @@ def update(_doc_cls=None, **update):
|
||||
key = '.'.join(parts)
|
||||
|
||||
if not op:
|
||||
raise InvalidQueryError("Updates must supply an operation "
|
||||
"eg: set__FIELD=value")
|
||||
raise InvalidQueryError('Updates must supply an operation '
|
||||
'eg: set__FIELD=value')
|
||||
|
||||
if 'pull' in op and '.' in key:
|
||||
# Dot operators don't work on pull operations
|
||||
# unless they point to a list field
|
||||
# Otherwise it uses nested dict syntax
|
||||
if op == 'pullAll':
|
||||
raise InvalidQueryError("pullAll operations only support "
|
||||
"a single field depth")
|
||||
raise InvalidQueryError('pullAll operations only support '
|
||||
'a single field depth')
|
||||
|
||||
# Look for the last list field and use dot notation until there
|
||||
field_classes = [c.__class__ for c in cleaned_fields]
|
||||
@@ -278,7 +312,7 @@ def update(_doc_cls=None, **update):
|
||||
# Then process as normal
|
||||
last_listField = len(
|
||||
cleaned_fields) - field_classes.index(ListField)
|
||||
key = ".".join(parts[:last_listField])
|
||||
key = '.'.join(parts[:last_listField])
|
||||
parts = parts[last_listField:]
|
||||
parts.insert(0, key)
|
||||
|
||||
@@ -286,7 +320,7 @@ def update(_doc_cls=None, **update):
|
||||
for key in parts:
|
||||
value = {key: value}
|
||||
elif op == 'addToSet' and isinstance(value, list):
|
||||
value = {key: {"$each": value}}
|
||||
value = {key: {'$each': value}}
|
||||
else:
|
||||
value = {key: value}
|
||||
key = '$' + op
|
||||
@@ -300,73 +334,82 @@ def update(_doc_cls=None, **update):
|
||||
|
||||
|
||||
def _geo_operator(field, op, value):
|
||||
"""Helper to return the query for a given geo query"""
|
||||
if field._geo_index == pymongo.GEO2D:
|
||||
if op == "within_distance":
|
||||
"""Helper to return the query for a given geo query."""
|
||||
if op == 'max_distance':
|
||||
value = {'$maxDistance': value}
|
||||
elif op == 'min_distance':
|
||||
value = {'$minDistance': value}
|
||||
elif field._geo_index == pymongo.GEO2D:
|
||||
if op == 'within_distance':
|
||||
value = {'$within': {'$center': value}}
|
||||
elif op == "within_spherical_distance":
|
||||
elif op == 'within_spherical_distance':
|
||||
value = {'$within': {'$centerSphere': value}}
|
||||
elif op == "within_polygon":
|
||||
elif op == 'within_polygon':
|
||||
value = {'$within': {'$polygon': value}}
|
||||
elif op == "near":
|
||||
elif op == 'near':
|
||||
value = {'$near': value}
|
||||
elif op == "near_sphere":
|
||||
elif op == 'near_sphere':
|
||||
value = {'$nearSphere': value}
|
||||
elif op == 'within_box':
|
||||
value = {'$within': {'$box': value}}
|
||||
elif op == "max_distance":
|
||||
value = {'$maxDistance': value}
|
||||
else:
|
||||
raise NotImplementedError("Geo method '%s' has not "
|
||||
"been implemented for a GeoPointField" % op)
|
||||
raise NotImplementedError('Geo method "%s" has not been '
|
||||
'implemented for a GeoPointField' % op)
|
||||
else:
|
||||
if op == "geo_within":
|
||||
value = {"$geoWithin": _infer_geometry(value)}
|
||||
elif op == "geo_within_box":
|
||||
value = {"$geoWithin": {"$box": value}}
|
||||
elif op == "geo_within_polygon":
|
||||
value = {"$geoWithin": {"$polygon": value}}
|
||||
elif op == "geo_within_center":
|
||||
value = {"$geoWithin": {"$center": value}}
|
||||
elif op == "geo_within_sphere":
|
||||
value = {"$geoWithin": {"$centerSphere": value}}
|
||||
elif op == "geo_intersects":
|
||||
value = {"$geoIntersects": _infer_geometry(value)}
|
||||
elif op == "near":
|
||||
if op == 'geo_within':
|
||||
value = {'$geoWithin': _infer_geometry(value)}
|
||||
elif op == 'geo_within_box':
|
||||
value = {'$geoWithin': {'$box': value}}
|
||||
elif op == 'geo_within_polygon':
|
||||
value = {'$geoWithin': {'$polygon': value}}
|
||||
elif op == 'geo_within_center':
|
||||
value = {'$geoWithin': {'$center': value}}
|
||||
elif op == 'geo_within_sphere':
|
||||
value = {'$geoWithin': {'$centerSphere': value}}
|
||||
elif op == 'geo_intersects':
|
||||
value = {'$geoIntersects': _infer_geometry(value)}
|
||||
elif op == 'near':
|
||||
value = {'$near': _infer_geometry(value)}
|
||||
elif op == "max_distance":
|
||||
value = {'$maxDistance': value}
|
||||
else:
|
||||
raise NotImplementedError("Geo method '%s' has not "
|
||||
"been implemented for a %s " % (op, field._name))
|
||||
raise NotImplementedError(
|
||||
'Geo method "%s" has not been implemented for a %s '
|
||||
% (op, field._name)
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def _infer_geometry(value):
|
||||
"""Helper method that tries to infer the $geometry shape for a given value"""
|
||||
"""Helper method that tries to infer the $geometry shape for a
|
||||
given value.
|
||||
"""
|
||||
if isinstance(value, dict):
|
||||
if "$geometry" in value:
|
||||
if '$geometry' in value:
|
||||
return value
|
||||
elif 'coordinates' in value and 'type' in value:
|
||||
return {"$geometry": value}
|
||||
raise InvalidQueryError("Invalid $geometry dictionary should have "
|
||||
"type and coordinates keys")
|
||||
return {'$geometry': value}
|
||||
raise InvalidQueryError('Invalid $geometry dictionary should have '
|
||||
'type and coordinates keys')
|
||||
elif isinstance(value, (list, set)):
|
||||
# TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon?
|
||||
# TODO: should both TypeError and IndexError be alike interpreted?
|
||||
|
||||
try:
|
||||
value[0][0][0]
|
||||
return {"$geometry": {"type": "Polygon", "coordinates": value}}
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
value[0][0]
|
||||
return {"$geometry": {"type": "LineString", "coordinates": value}}
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
value[0]
|
||||
return {"$geometry": {"type": "Point", "coordinates": value}}
|
||||
except:
|
||||
return {'$geometry': {'type': 'Polygon', 'coordinates': value}}
|
||||
except (TypeError, IndexError):
|
||||
pass
|
||||
|
||||
raise InvalidQueryError("Invalid $geometry data. Can be either a dictionary "
|
||||
"or (nested) lists of coordinate(s)")
|
||||
try:
|
||||
value[0][0]
|
||||
return {'$geometry': {'type': 'LineString', 'coordinates': value}}
|
||||
except (TypeError, IndexError):
|
||||
pass
|
||||
|
||||
try:
|
||||
value[0]
|
||||
return {'$geometry': {'type': 'Point', 'coordinates': value}}
|
||||
except (TypeError, IndexError):
|
||||
pass
|
||||
|
||||
raise InvalidQueryError('Invalid $geometry data. Can be either a '
|
||||
'dictionary or (nested) lists of coordinate(s)')
|
||||
|
@@ -1,8 +1,5 @@
|
||||
import copy
|
||||
|
||||
from itertools import product
|
||||
from functools import reduce
|
||||
|
||||
from mongoengine.errors import InvalidQueryError
|
||||
from mongoengine.queryset import transform
|
||||
|
||||
@@ -72,9 +69,9 @@ class QueryCompilerVisitor(QNodeVisitor):
|
||||
self.document = document
|
||||
|
||||
def visit_combination(self, combination):
|
||||
operator = "$and"
|
||||
operator = '$and'
|
||||
if combination.operation == combination.OR:
|
||||
operator = "$or"
|
||||
operator = '$or'
|
||||
return {operator: combination.children}
|
||||
|
||||
def visit_query(self, query):
|
||||
@@ -82,8 +79,7 @@ class QueryCompilerVisitor(QNodeVisitor):
|
||||
|
||||
|
||||
class QNode(object):
|
||||
"""Base class for nodes in query trees.
|
||||
"""
|
||||
"""Base class for nodes in query trees."""
|
||||
|
||||
AND = 0
|
||||
OR = 1
|
||||
@@ -97,7 +93,8 @@ class QNode(object):
|
||||
raise NotImplementedError
|
||||
|
||||
def _combine(self, other, operation):
|
||||
"""Combine this node with another node into a QCombination object.
|
||||
"""Combine this node with another node into a QCombination
|
||||
object.
|
||||
"""
|
||||
if getattr(other, 'empty', True):
|
||||
return self
|
||||
@@ -119,8 +116,8 @@ class QNode(object):
|
||||
|
||||
|
||||
class QCombination(QNode):
|
||||
"""Represents the combination of several conditions by a given logical
|
||||
operator.
|
||||
"""Represents the combination of several conditions by a given
|
||||
logical operator.
|
||||
"""
|
||||
|
||||
def __init__(self, operation, children):
|
||||
|
@@ -1,11 +1,10 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
__all__ = ['pre_init', 'post_init', 'pre_save', 'pre_save_post_validation',
|
||||
'post_save', 'pre_delete', 'post_delete']
|
||||
__all__ = ('pre_init', 'post_init', 'pre_save', 'pre_save_post_validation',
|
||||
'post_save', 'pre_delete', 'post_delete')
|
||||
|
||||
signals_available = False
|
||||
try:
|
||||
from blinker import Namespace
|
||||
|
||||
signals_available = True
|
||||
except ImportError:
|
||||
class Namespace(object):
|
||||
@@ -27,11 +26,13 @@ except ImportError:
|
||||
raise RuntimeError('signalling support is unavailable '
|
||||
'because the blinker library is '
|
||||
'not installed.')
|
||||
send = lambda *a, **kw: None
|
||||
|
||||
send = lambda *a, **kw: None # noqa
|
||||
connect = disconnect = has_receivers_for = receivers_for = \
|
||||
temporarily_connected_to = _fail
|
||||
del _fail
|
||||
|
||||
|
||||
# the namespace for code signals. If you are not mongoengine code, do
|
||||
# not put signals in here. Create your own namespace instead.
|
||||
_signals = Namespace()
|
||||
|
@@ -1,2 +1,5 @@
|
||||
pymongo>=2.7.1
|
||||
nose
|
||||
pymongo>=2.7.1
|
||||
six==1.10.0
|
||||
flake8
|
||||
flake8-import-order
|
||||
|
20
setup.cfg
20
setup.cfg
@@ -1,11 +1,11 @@
|
||||
[nosetests]
|
||||
verbosity = 3
|
||||
detailed-errors = 1
|
||||
#with-coverage = 1
|
||||
#cover-erase = 1
|
||||
#cover-html = 1
|
||||
#cover-html-dir = ../htmlcov
|
||||
#cover-package = mongoengine
|
||||
py3where = build
|
||||
where = tests
|
||||
#tests = document/__init__.py
|
||||
verbosity=2
|
||||
detailed-errors=1
|
||||
tests=tests
|
||||
cover-package=mongoengine
|
||||
|
||||
[flake8]
|
||||
ignore=E501,F401,F403,F405,I201
|
||||
exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests
|
||||
max-complexity=45
|
||||
application-import-names=mongoengine,tests
|
||||
|
78
setup.py
78
setup.py
@@ -1,6 +1,6 @@
|
||||
import os
|
||||
import sys
|
||||
from setuptools import setup, find_packages
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
# Hack to silence atexit traceback in newer python versions
|
||||
try:
|
||||
@@ -8,20 +8,25 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
DESCRIPTION = 'MongoEngine is a Python Object-Document ' + \
|
||||
'Mapper for working with MongoDB.'
|
||||
LONG_DESCRIPTION = None
|
||||
DESCRIPTION = (
|
||||
'MongoEngine is a Python Object-Document '
|
||||
'Mapper for working with MongoDB.'
|
||||
)
|
||||
|
||||
try:
|
||||
LONG_DESCRIPTION = open('README.rst').read()
|
||||
except:
|
||||
pass
|
||||
with open('README.rst') as fin:
|
||||
LONG_DESCRIPTION = fin.read()
|
||||
except Exception:
|
||||
LONG_DESCRIPTION = None
|
||||
|
||||
|
||||
def get_version(version_tuple):
|
||||
if not isinstance(version_tuple[-1], int):
|
||||
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
|
||||
"""Return the version tuple as a string, e.g. for (0, 10, 7),
|
||||
return '0.10.7'.
|
||||
"""
|
||||
return '.'.join(map(str, version_tuple))
|
||||
|
||||
|
||||
# Dirty hack to get version number from monogengine/__init__.py - we can't
|
||||
# import it as it depends on PyMongo and PyMongo isn't installed until this
|
||||
# file is read
|
||||
@@ -29,7 +34,6 @@ init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
|
||||
version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0]
|
||||
|
||||
VERSION = get_version(eval(version_line.split('=')[-1]))
|
||||
print(VERSION)
|
||||
|
||||
CLASSIFIERS = [
|
||||
'Development Status :: 4 - Beta',
|
||||
@@ -38,46 +42,46 @@ CLASSIFIERS = [
|
||||
'Operating System :: OS Independent',
|
||||
'Programming Language :: Python',
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 2.6",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.2",
|
||||
"Programming Language :: Python :: 3.3",
|
||||
"Programming Language :: Python :: 3.4",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
'Topic :: Database',
|
||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||
]
|
||||
|
||||
extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])}
|
||||
extra_opts = {
|
||||
'packages': find_packages(exclude=['tests', 'tests.*']),
|
||||
'tests_require': ['nose', 'coverage==4.2', 'blinker', 'Pillow>=2.0.0']
|
||||
}
|
||||
if sys.version_info[0] == 3:
|
||||
extra_opts['use_2to3'] = True
|
||||
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6', 'Pillow>=2.0.0', 'django>=1.5.1']
|
||||
if "test" in sys.argv or "nosetests" in sys.argv:
|
||||
if 'test' in sys.argv or 'nosetests' in sys.argv:
|
||||
extra_opts['packages'] = find_packages()
|
||||
extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]}
|
||||
extra_opts['package_data'] = {
|
||||
'tests': ['fields/mongoengine.png', 'fields/mongodb_leaf.png']}
|
||||
else:
|
||||
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'Pillow>=2.0.0', 'jinja2>=2.6', 'python-dateutil']
|
||||
extra_opts['tests_require'] += ['python-dateutil']
|
||||
|
||||
if sys.version_info[0] == 2 and sys.version_info[1] == 6:
|
||||
extra_opts['tests_require'].append('unittest2')
|
||||
|
||||
setup(name='mongoengine',
|
||||
version=VERSION,
|
||||
author='Harry Marr',
|
||||
author_email='harry.marr@{nospam}gmail.com',
|
||||
maintainer="Ross Lawley",
|
||||
maintainer_email="ross.lawley@{nospam}gmail.com",
|
||||
url='http://mongoengine.org/',
|
||||
download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
|
||||
license='MIT',
|
||||
include_package_data=True,
|
||||
description=DESCRIPTION,
|
||||
long_description=LONG_DESCRIPTION,
|
||||
platforms=['any'],
|
||||
classifiers=CLASSIFIERS,
|
||||
install_requires=['pymongo>=2.7.1'],
|
||||
test_suite='nose.collector',
|
||||
**extra_opts
|
||||
setup(
|
||||
name='mongoengine',
|
||||
version=VERSION,
|
||||
author='Harry Marr',
|
||||
author_email='harry.marr@{nospam}gmail.com',
|
||||
maintainer="Ross Lawley",
|
||||
maintainer_email="ross.lawley@{nospam}gmail.com",
|
||||
url='http://mongoengine.org/',
|
||||
download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
|
||||
license='MIT',
|
||||
include_package_data=True,
|
||||
description=DESCRIPTION,
|
||||
long_description=LONG_DESCRIPTION,
|
||||
platforms=['any'],
|
||||
classifiers=CLASSIFIERS,
|
||||
install_requires=['pymongo>=2.7.1', 'six'],
|
||||
test_suite='nose.collector',
|
||||
**extra_opts
|
||||
)
|
||||
|
@@ -2,4 +2,3 @@ from all_warnings import AllWarnings
|
||||
from document import *
|
||||
from queryset import *
|
||||
from fields import *
|
||||
from migration import *
|
||||
|
@@ -3,8 +3,6 @@ This test has been put into a module. This is because it tests warnings that
|
||||
only get triggered on first hit. This way we can ensure its imported into the
|
||||
top level and called first by the test suite.
|
||||
"""
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
|
@@ -1,5 +1,3 @@
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
import unittest
|
||||
|
||||
from class_methods import *
|
||||
|
@@ -1,6 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
|
@@ -1,6 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
import unittest
|
||||
|
||||
from bson import SON
|
||||
@@ -93,6 +91,7 @@ class DeltaTest(unittest.TestCase):
|
||||
def delta_recursive(self, DocClass, EmbeddedClass):
|
||||
|
||||
class Embedded(EmbeddedClass):
|
||||
id = StringField()
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
@@ -114,6 +113,7 @@ class DeltaTest(unittest.TestCase):
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.id = "010101"
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
@@ -123,6 +123,7 @@ class DeltaTest(unittest.TestCase):
|
||||
self.assertEqual(doc._get_changed_fields(), ['embedded_field'])
|
||||
|
||||
embedded_delta = {
|
||||
'id': "010101",
|
||||
'string_field': 'hello',
|
||||
'int_field': 1,
|
||||
'dict_field': {'hello': 'world'},
|
||||
@@ -250,13 +251,13 @@ class DeltaTest(unittest.TestCase):
|
||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||
[1, 2, {'hello': 'world'}])
|
||||
|
||||
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
||||
del doc.embedded_field.list_field[2].list_field[2]['hello']
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'embedded_field.list_field.2.list_field.2.hello': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
del(doc.embedded_field.list_field[2].list_field)
|
||||
del doc.embedded_field.list_field[2].list_field
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'embedded_field.list_field.2.list_field': 1}))
|
||||
|
||||
@@ -590,13 +591,13 @@ class DeltaTest(unittest.TestCase):
|
||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||
[1, 2, {'hello': 'world'}])
|
||||
|
||||
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
||||
del doc.embedded_field.list_field[2].list_field[2]['hello']
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
del(doc.embedded_field.list_field[2].list_field)
|
||||
del doc.embedded_field.list_field[2].list_field
|
||||
self.assertEqual(doc._delta(), ({},
|
||||
{'db_embedded_field.db_list_field.2.db_list_field': 1}))
|
||||
|
||||
@@ -612,7 +613,7 @@ class DeltaTest(unittest.TestCase):
|
||||
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
|
||||
|
||||
p.doc = 123
|
||||
del(p.doc)
|
||||
del p.doc
|
||||
self.assertEqual(p._delta(), (
|
||||
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
|
||||
|
||||
@@ -732,6 +733,56 @@ class DeltaTest(unittest.TestCase):
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
|
||||
def test_lower_level_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc().save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
mydoc.subs['a'] = EmbeddedDoc()
|
||||
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||
|
||||
subdoc = mydoc.subs['a']
|
||||
subdoc.name = 'bar'
|
||||
|
||||
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||
mydoc.save()
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
|
||||
def test_upper_level_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc(subs={'a': EmbeddedDoc(name='foo')}).save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
subdoc = mydoc.subs['a']
|
||||
subdoc.name = 'bar'
|
||||
|
||||
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||
self.assertEqual(["subs.a.name"], mydoc._get_changed_fields())
|
||||
|
||||
mydoc.subs['a'] = EmbeddedDoc()
|
||||
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||
mydoc.save()
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
|
||||
def test_referenced_object_changed_attributes(self):
|
||||
"""Ensures that when you save a new reference to a field, the referenced object isn't altered"""
|
||||
|
||||
@@ -774,5 +825,43 @@ class DeltaTest(unittest.TestCase):
|
||||
org2.reload()
|
||||
self.assertEqual(org2.name, 'New Org 2')
|
||||
|
||||
def test_delta_for_nested_map_fields(self):
|
||||
class UInfoDocument(Document):
|
||||
phone = StringField()
|
||||
|
||||
class EmbeddedRole(EmbeddedDocument):
|
||||
type = StringField()
|
||||
|
||||
class EmbeddedUser(EmbeddedDocument):
|
||||
name = StringField()
|
||||
roles = MapField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||
rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||
info = ReferenceField(UInfoDocument)
|
||||
|
||||
class Doc(Document):
|
||||
users = MapField(field=EmbeddedDocumentField(EmbeddedUser))
|
||||
num = IntField(default=-1)
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
doc = Doc(num=1)
|
||||
doc.users["007"] = EmbeddedUser(name="Agent007")
|
||||
doc.save()
|
||||
|
||||
uinfo = UInfoDocument(phone="79089269066")
|
||||
uinfo.save()
|
||||
|
||||
d = Doc.objects(num=1).first()
|
||||
d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin")
|
||||
d.users["007"]["rolist"].append(EmbeddedRole(type="oops"))
|
||||
d.users["007"]["info"] = uinfo
|
||||
delta = d._delta()
|
||||
self.assertEqual(True, "users.007.roles.666" in delta[0])
|
||||
self.assertEqual(True, "users.007.rolist" in delta[0])
|
||||
self.assertEqual(True, "users.007.info" in delta[0])
|
||||
self.assertEqual('superadmin', delta[0]["users.007.roles.666"]["type"])
|
||||
self.assertEqual('oops', delta[0]["users.007.rolist"][0]["type"])
|
||||
self.assertEqual(uinfo.id, delta[0]["users.007.info"])
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -1,6 +1,4 @@
|
||||
import unittest
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
@@ -72,7 +70,7 @@ class DynamicTest(unittest.TestCase):
|
||||
obj = collection.find_one()
|
||||
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name'])
|
||||
|
||||
del(p.misc)
|
||||
del p.misc
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
@@ -88,6 +86,18 @@ class DynamicTest(unittest.TestCase):
|
||||
p.update(unset__misc=1)
|
||||
p.reload()
|
||||
|
||||
def test_reload_dynamic_field(self):
|
||||
self.Person.objects.delete()
|
||||
p = self.Person.objects.create()
|
||||
p.update(age=1)
|
||||
|
||||
self.assertEqual(len(p._data), 3)
|
||||
self.assertEqual(sorted(p._data.keys()), ['_cls', 'id', 'name'])
|
||||
|
||||
p.reload()
|
||||
self.assertEqual(len(p._data), 4)
|
||||
self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name'])
|
||||
|
||||
def test_dynamic_document_queries(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
p = self.Person()
|
||||
@@ -129,6 +139,13 @@ class DynamicTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(1, self.Person.objects(misc__hello='world').count())
|
||||
|
||||
def test_three_level_complex_data_lookups(self):
|
||||
"""Ensure you can query three level document dynamic fields"""
|
||||
p = self.Person.objects.create(
|
||||
misc={'hello': {'hello2': 'world'}}
|
||||
)
|
||||
self.assertEqual(1, self.Person.objects(misc__hello__hello2='world').count())
|
||||
|
||||
def test_complex_embedded_document_validation(self):
|
||||
"""Ensure embedded dynamic documents may be validated"""
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
@@ -331,7 +348,7 @@ class DynamicTest(unittest.TestCase):
|
||||
person = Person.objects.first()
|
||||
person.attrval = "This works"
|
||||
|
||||
person["phone"] = "555-1212" # but this should too
|
||||
person["phone"] = "555-1212" # but this should too
|
||||
|
||||
# Same thing two levels deep
|
||||
person["address"]["city"] = "Lundenne"
|
||||
@@ -347,7 +364,6 @@ class DynamicTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(Person.objects.first().address.city, "Londinium")
|
||||
|
||||
|
||||
person = Person.objects.first()
|
||||
person["age"] = 35
|
||||
person.save()
|
||||
|
@@ -1,9 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
import os
|
||||
|
||||
import pymongo
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
@@ -32,10 +31,7 @@ class IndexesTest(unittest.TestCase):
|
||||
self.Person = Person
|
||||
|
||||
def tearDown(self):
|
||||
for collection in self.db.collection_names():
|
||||
if 'system.' in collection:
|
||||
continue
|
||||
self.db.drop_collection(collection)
|
||||
self.connection.drop_database(self.db)
|
||||
|
||||
def test_indexes_document(self):
|
||||
"""Ensure that indexes are used when meta[indexes] is specified for
|
||||
@@ -143,7 +139,7 @@ class IndexesTest(unittest.TestCase):
|
||||
meta = {
|
||||
'indexes': [
|
||||
{
|
||||
'fields': ('title',),
|
||||
'fields': ('title',),
|
||||
},
|
||||
],
|
||||
'allow_inheritance': True,
|
||||
@@ -275,6 +271,60 @@ class IndexesTest(unittest.TestCase):
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('current.location.point', '2d')] in info)
|
||||
|
||||
def test_explicit_geosphere_index(self):
|
||||
"""Ensure that geosphere indexes work when created via meta[indexes]
|
||||
"""
|
||||
class Place(Document):
|
||||
location = DictField()
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'indexes': [
|
||||
'(location.point',
|
||||
]
|
||||
}
|
||||
|
||||
self.assertEqual([{'fields': [('location.point', '2dsphere')]}],
|
||||
Place._meta['index_specs'])
|
||||
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('location.point', '2dsphere')] in info)
|
||||
|
||||
def test_explicit_geohaystack_index(self):
|
||||
"""Ensure that geohaystack indexes work when created via meta[indexes]
|
||||
"""
|
||||
raise SkipTest('GeoHaystack index creation is not supported for now'
|
||||
'from meta, as it requires a bucketSize parameter.')
|
||||
|
||||
class Place(Document):
|
||||
location = DictField()
|
||||
name = StringField()
|
||||
meta = {
|
||||
'indexes': [
|
||||
(')location.point', 'name')
|
||||
]
|
||||
}
|
||||
self.assertEqual([{'fields': [('location.point', 'geoHaystack'), ('name', 1)]}],
|
||||
Place._meta['index_specs'])
|
||||
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('location.point', 'geoHaystack')] in info)
|
||||
|
||||
def test_create_geohaystack_index(self):
|
||||
"""Ensure that geohaystack indexes can be created
|
||||
"""
|
||||
class Place(Document):
|
||||
location = DictField()
|
||||
name = StringField()
|
||||
|
||||
Place.create_index({'fields': (')location.point', 'name')}, bucketSize=10)
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('location.point', 'geoHaystack'), ('name', 1)] in info)
|
||||
|
||||
def test_dictionary_indexes(self):
|
||||
"""Ensure that indexes are used when meta[indexes] contains
|
||||
dictionaries instead of lists.
|
||||
@@ -432,6 +482,7 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
class Test(Document):
|
||||
a = IntField()
|
||||
b = IntField()
|
||||
|
||||
meta = {
|
||||
'indexes': ['a'],
|
||||
@@ -443,16 +494,36 @@ class IndexesTest(unittest.TestCase):
|
||||
obj = Test(a=1)
|
||||
obj.save()
|
||||
|
||||
connection = get_connection()
|
||||
IS_MONGODB_3 = connection.server_info()['versionArray'][0] >= 3
|
||||
|
||||
# Need to be explicit about covered indexes as mongoDB doesn't know if
|
||||
# the documents returned might have more keys in that here.
|
||||
query_plan = Test.objects(id=obj.id).exclude('a').explain()
|
||||
self.assertFalse(query_plan['indexOnly'])
|
||||
if not IS_MONGODB_3:
|
||||
self.assertFalse(query_plan['indexOnly'])
|
||||
else:
|
||||
self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IDHACK')
|
||||
|
||||
query_plan = Test.objects(id=obj.id).only('id').explain()
|
||||
self.assertTrue(query_plan['indexOnly'])
|
||||
if not IS_MONGODB_3:
|
||||
self.assertTrue(query_plan['indexOnly'])
|
||||
else:
|
||||
self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IDHACK')
|
||||
|
||||
query_plan = Test.objects(a=1).only('a').exclude('id').explain()
|
||||
self.assertTrue(query_plan['indexOnly'])
|
||||
if not IS_MONGODB_3:
|
||||
self.assertTrue(query_plan['indexOnly'])
|
||||
else:
|
||||
self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IXSCAN')
|
||||
self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('stage'), 'PROJECTION')
|
||||
|
||||
query_plan = Test.objects(a=1).explain()
|
||||
if not IS_MONGODB_3:
|
||||
self.assertFalse(query_plan['indexOnly'])
|
||||
else:
|
||||
self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IXSCAN')
|
||||
self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('stage'), 'FETCH')
|
||||
|
||||
def test_index_on_id(self):
|
||||
|
||||
@@ -485,25 +556,28 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
for i in xrange(0, 10):
|
||||
tags = [("tag %i" % n) for n in xrange(0, i % 2)]
|
||||
for i in range(0, 10):
|
||||
tags = [("tag %i" % n) for n in range(0, i % 2)]
|
||||
BlogPost(tags=tags).save()
|
||||
|
||||
self.assertEqual(BlogPost.objects.count(), 10)
|
||||
self.assertEqual(BlogPost.objects.hint().count(), 10)
|
||||
self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10)
|
||||
|
||||
self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).count(), 10)
|
||||
# PyMongo 3.0 bug only, works correctly with 2.X and 3.0.1+ versions
|
||||
if pymongo.version != '3.0':
|
||||
self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10)
|
||||
|
||||
self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).count(), 10)
|
||||
|
||||
if pymongo.version >= '2.8':
|
||||
self.assertEqual(BlogPost.objects.hint('tags').count(), 10)
|
||||
else:
|
||||
def invalid_index():
|
||||
BlogPost.objects.hint('tags')
|
||||
BlogPost.objects.hint('tags').next()
|
||||
self.assertRaises(TypeError, invalid_index)
|
||||
|
||||
def invalid_index_2():
|
||||
return BlogPost.objects.hint(('tags', 1))
|
||||
return BlogPost.objects.hint(('tags', 1)).next()
|
||||
self.assertRaises(Exception, invalid_index_2)
|
||||
|
||||
def test_unique(self):
|
||||
@@ -744,33 +818,34 @@ class IndexesTest(unittest.TestCase):
|
||||
name = StringField(required=True)
|
||||
term = StringField(required=True)
|
||||
|
||||
class Report(Document):
|
||||
class ReportEmbedded(Document):
|
||||
key = EmbeddedDocumentField(CompoundKey, primary_key=True)
|
||||
text = StringField()
|
||||
|
||||
Report.drop_collection()
|
||||
|
||||
my_key = CompoundKey(name="n", term="ok")
|
||||
report = Report(text="OK", key=my_key).save()
|
||||
report = ReportEmbedded(text="OK", key=my_key).save()
|
||||
|
||||
self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}},
|
||||
report.to_mongo())
|
||||
self.assertEqual(report, Report.objects.get(pk=my_key))
|
||||
self.assertEqual(report, ReportEmbedded.objects.get(pk=my_key))
|
||||
|
||||
def test_compound_key_dictfield(self):
|
||||
|
||||
class Report(Document):
|
||||
class ReportDictField(Document):
|
||||
key = DictField(primary_key=True)
|
||||
text = StringField()
|
||||
|
||||
Report.drop_collection()
|
||||
|
||||
my_key = {"name": "n", "term": "ok"}
|
||||
report = Report(text="OK", key=my_key).save()
|
||||
report = ReportDictField(text="OK", key=my_key).save()
|
||||
|
||||
self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}},
|
||||
report.to_mongo())
|
||||
self.assertEqual(report, Report.objects.get(pk=my_key))
|
||||
|
||||
# We can't directly call ReportDictField.objects.get(pk=my_key),
|
||||
# because dicts are unordered, and if the order in MongoDB is
|
||||
# different than the one in `my_key`, this test will fail.
|
||||
self.assertEqual(report, ReportDictField.objects.get(pk__name=my_key['name']))
|
||||
self.assertEqual(report, ReportDictField.objects.get(pk__term=my_key['term']))
|
||||
|
||||
def test_string_indexes(self):
|
||||
|
||||
@@ -785,6 +860,20 @@ class IndexesTest(unittest.TestCase):
|
||||
self.assertTrue([('provider_ids.foo', 1)] in info)
|
||||
self.assertTrue([('provider_ids.bar', 1)] in info)
|
||||
|
||||
def test_sparse_compound_indexes(self):
|
||||
|
||||
class MyDoc(Document):
|
||||
provider_ids = DictField()
|
||||
meta = {
|
||||
"indexes": [{'fields': ("provider_ids.foo", "provider_ids.bar"),
|
||||
'sparse': True}],
|
||||
}
|
||||
|
||||
info = MyDoc.objects._collection.index_information()
|
||||
self.assertEqual([('provider_ids.foo', 1), ('provider_ids.bar', 1)],
|
||||
info['provider_ids.foo_1_provider_ids.bar_1']['key'])
|
||||
self.assertTrue(info['provider_ids.foo_1_provider_ids.bar_1']['sparse'])
|
||||
|
||||
def test_text_indexes(self):
|
||||
|
||||
class Book(Document):
|
||||
@@ -798,6 +887,18 @@ class IndexesTest(unittest.TestCase):
|
||||
key = indexes["title_text"]["key"]
|
||||
self.assertTrue(('_fts', 'text') in key)
|
||||
|
||||
def test_hashed_indexes(self):
|
||||
|
||||
class Book(Document):
|
||||
ref_id = StringField()
|
||||
meta = {
|
||||
"indexes": ["#ref_id"],
|
||||
}
|
||||
|
||||
indexes = Book.objects._collection.index_information()
|
||||
self.assertTrue("ref_id_hashed" in indexes)
|
||||
self.assertTrue(('ref_id', 'hashed') in indexes["ref_id_hashed"]["key"])
|
||||
|
||||
def test_indexes_after_database_drop(self):
|
||||
"""
|
||||
Test to ensure that indexes are re-created on a collection even
|
||||
@@ -805,26 +906,122 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
Issue #812
|
||||
"""
|
||||
# Use a new connection and database since dropping the database could
|
||||
# cause concurrent tests to fail.
|
||||
connection = connect(db='tempdatabase',
|
||||
alias='test_indexes_after_database_drop')
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
slug = StringField(unique=True)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
meta = {'db_alias': 'test_indexes_after_database_drop'}
|
||||
|
||||
# Create Post #1
|
||||
post1 = BlogPost(title='test1', slug='test')
|
||||
post1.save()
|
||||
try:
|
||||
BlogPost.drop_collection()
|
||||
|
||||
# Drop the Database
|
||||
self.connection.drop_database(BlogPost._get_db().name)
|
||||
# Create Post #1
|
||||
post1 = BlogPost(title='test1', slug='test')
|
||||
post1.save()
|
||||
|
||||
# Re-create Post #1
|
||||
post1 = BlogPost(title='test1', slug='test')
|
||||
post1.save()
|
||||
# Drop the Database
|
||||
connection.drop_database('tempdatabase')
|
||||
|
||||
# Re-create Post #1
|
||||
post1 = BlogPost(title='test1', slug='test')
|
||||
post1.save()
|
||||
|
||||
# Create Post #2
|
||||
post2 = BlogPost(title='test2', slug='test')
|
||||
self.assertRaises(NotUniqueError, post2.save)
|
||||
finally:
|
||||
# Drop the temporary database at the end
|
||||
connection.drop_database('tempdatabase')
|
||||
|
||||
|
||||
def test_index_dont_send_cls_option(self):
|
||||
"""
|
||||
Ensure that 'cls' option is not sent through ensureIndex. We shouldn't
|
||||
send internal MongoEngine arguments that are not a part of the index
|
||||
spec.
|
||||
|
||||
This is directly related to the fact that MongoDB doesn't validate the
|
||||
options that are passed to ensureIndex. For more details, see:
|
||||
https://jira.mongodb.org/browse/SERVER-769
|
||||
"""
|
||||
class TestDoc(Document):
|
||||
txt = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'indexes': [
|
||||
{'fields': ('txt',), 'cls': False}
|
||||
]
|
||||
}
|
||||
|
||||
class TestChildDoc(TestDoc):
|
||||
txt2 = StringField()
|
||||
|
||||
meta = {
|
||||
'indexes': [
|
||||
{'fields': ('txt2',), 'cls': False}
|
||||
]
|
||||
}
|
||||
|
||||
TestDoc.drop_collection()
|
||||
TestDoc.ensure_indexes()
|
||||
TestChildDoc.ensure_indexes()
|
||||
|
||||
index_info = TestDoc._get_collection().index_information()
|
||||
for key in index_info:
|
||||
del index_info[key]['v'] # drop the index version - we don't care about that here
|
||||
if 'ns' in index_info[key]:
|
||||
del index_info[key]['ns'] # drop the index namespace - we don't care about that here, MongoDB 3+
|
||||
if 'dropDups' in index_info[key]:
|
||||
del index_info[key]['dropDups'] # drop the index dropDups - it is deprecated in MongoDB 3+
|
||||
|
||||
self.assertEqual(index_info, {
|
||||
'txt_1': {
|
||||
'key': [('txt', 1)],
|
||||
'background': False
|
||||
},
|
||||
'_id_': {
|
||||
'key': [('_id', 1)],
|
||||
},
|
||||
'txt2_1': {
|
||||
'key': [('txt2', 1)],
|
||||
'background': False
|
||||
},
|
||||
'_cls_1': {
|
||||
'key': [('_cls', 1)],
|
||||
'background': False,
|
||||
}
|
||||
})
|
||||
|
||||
def test_compound_index_underscore_cls_not_overwritten(self):
|
||||
"""
|
||||
Test that the compound index doesn't get another _cls when it is specified
|
||||
"""
|
||||
class TestDoc(Document):
|
||||
shard_1 = StringField()
|
||||
txt_1 = StringField()
|
||||
|
||||
meta = {
|
||||
'collection': 'test',
|
||||
'allow_inheritance': True,
|
||||
'sparse': True,
|
||||
'shard_key': 'shard_1',
|
||||
'indexes': [
|
||||
('shard_1', '_cls', 'txt_1'),
|
||||
]
|
||||
}
|
||||
|
||||
TestDoc.drop_collection()
|
||||
TestDoc.ensure_indexes()
|
||||
|
||||
index_info = TestDoc._get_collection().index_information()
|
||||
self.assertTrue('shard_1_1__cls_1_txt_1_1' in index_info)
|
||||
|
||||
# Create Post #2
|
||||
post2 = BlogPost(title='test2', slug='test')
|
||||
self.assertRaises(NotUniqueError, post2.save)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -1,6 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
@@ -253,19 +251,17 @@ class InheritanceTest(unittest.TestCase):
|
||||
self.assertEqual(classes, [Human])
|
||||
|
||||
def test_allow_inheritance(self):
|
||||
"""Ensure that inheritance may be disabled on simple classes and that
|
||||
_cls and _subclasses will not be used.
|
||||
"""Ensure that inheritance is disabled by default on simple
|
||||
classes and that _cls will not be used.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
|
||||
def create_dog_class():
|
||||
# can't inherit because Animal didn't explicitly allow inheritance
|
||||
with self.assertRaises(ValueError):
|
||||
class Dog(Animal):
|
||||
pass
|
||||
|
||||
self.assertRaises(ValueError, create_dog_class)
|
||||
|
||||
# Check that _cls etc aren't present on simple documents
|
||||
dog = Animal(name='dog').save()
|
||||
self.assertEqual(dog.to_mongo().keys(), ['_id', 'name'])
|
||||
@@ -275,17 +271,15 @@ class InheritanceTest(unittest.TestCase):
|
||||
self.assertFalse('_cls' in obj)
|
||||
|
||||
def test_cant_turn_off_inheritance_on_subclass(self):
|
||||
"""Ensure if inheritance is on in a subclass you cant turn it off
|
||||
"""Ensure if inheritance is on in a subclass you cant turn it off.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
def create_mammal_class():
|
||||
with self.assertRaises(ValueError):
|
||||
class Mammal(Animal):
|
||||
meta = {'allow_inheritance': False}
|
||||
self.assertRaises(ValueError, create_mammal_class)
|
||||
|
||||
def test_allow_inheritance_abstract_document(self):
|
||||
"""Ensure that abstract documents can set inheritance rules and that
|
||||
@@ -298,28 +292,87 @@ class InheritanceTest(unittest.TestCase):
|
||||
class Animal(FinalDocument):
|
||||
name = StringField()
|
||||
|
||||
def create_mammal_class():
|
||||
with self.assertRaises(ValueError):
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
self.assertRaises(ValueError, create_mammal_class)
|
||||
|
||||
# Check that _cls isn't present in simple documents
|
||||
doc = Animal(name='dog')
|
||||
self.assertFalse('_cls' in doc.to_mongo())
|
||||
|
||||
def test_allow_inheritance_embedded_document(self):
|
||||
"""Ensure embedded documents respect inheritance
|
||||
"""
|
||||
def test_abstract_handle_ids_in_metaclass_properly(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 3)
|
||||
self.assertEqual(berlin._fields_ordered[0], 'id')
|
||||
|
||||
def test_auto_id_not_set_if_specific_in_parent_class(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
city_id = IntField(primary_key=True)
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 3)
|
||||
self.assertEqual(berlin._fields_ordered[0], 'city_id')
|
||||
|
||||
def test_auto_id_vs_non_pk_id_field(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
id = IntField()
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 4)
|
||||
self.assertEqual(berlin._fields_ordered[0], 'auto_id_0')
|
||||
berlin.save()
|
||||
self.assertEqual(berlin.pk, berlin.auto_id_0)
|
||||
|
||||
def test_abstract_document_creation_does_not_fail(self):
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
bkk = City(continent='asia')
|
||||
self.assertEqual(None, bkk.pk)
|
||||
# TODO: expected error? Shouldn't we create a new error type?
|
||||
with self.assertRaises(KeyError):
|
||||
setattr(bkk, 'pk', 1)
|
||||
|
||||
def test_allow_inheritance_embedded_document(self):
|
||||
"""Ensure embedded documents respect inheritance."""
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
|
||||
def create_special_comment():
|
||||
with self.assertRaises(ValueError):
|
||||
class SpecialComment(Comment):
|
||||
pass
|
||||
|
||||
self.assertRaises(ValueError, create_special_comment)
|
||||
|
||||
doc = Comment(content='test')
|
||||
self.assertFalse('_cls' in doc.to_mongo())
|
||||
|
||||
@@ -348,7 +401,7 @@ class InheritanceTest(unittest.TestCase):
|
||||
try:
|
||||
class MyDocument(DateCreatedDocument, DateUpdatedDocument):
|
||||
pass
|
||||
except:
|
||||
except Exception:
|
||||
self.assertTrue(False, "Couldn't create MyDocument class")
|
||||
|
||||
def test_abstract_documents(self):
|
||||
@@ -391,11 +444,11 @@ class InheritanceTest(unittest.TestCase):
|
||||
self.assertEqual(Guppy._get_collection_name(), 'fish')
|
||||
self.assertEqual(Human._get_collection_name(), 'human')
|
||||
|
||||
def create_bad_abstract():
|
||||
# ensure that a subclass of a non-abstract class can't be abstract
|
||||
with self.assertRaises(ValueError):
|
||||
class EvilHuman(Human):
|
||||
evil = BooleanField(default=True)
|
||||
meta = {'abstract': True}
|
||||
self.assertRaises(ValueError, create_bad_abstract)
|
||||
|
||||
def test_abstract_embedded_documents(self):
|
||||
# 789: EmbeddedDocument shouldn't inherit abstract
|
||||
|
@@ -1,24 +1,24 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
import bson
|
||||
import os
|
||||
import pickle
|
||||
import unittest
|
||||
import uuid
|
||||
import weakref
|
||||
|
||||
from datetime import datetime
|
||||
from bson import DBRef, ObjectId
|
||||
from tests import fixtures
|
||||
from tests.fixtures import (PickleEmbedded, PickleTest, PickleSignalsTest,
|
||||
PickleDyanmicEmbedded, PickleDynamicTest)
|
||||
PickleDynamicEmbedded, PickleDynamicTest)
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.errors import (NotRegistered, InvalidDocumentError,
|
||||
InvalidQueryError, NotUniqueError)
|
||||
from mongoengine.queryset import NULLIFY, Q
|
||||
from mongoengine.base import get_document, _document_registry
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.base import get_document
|
||||
from mongoengine.errors import (NotRegistered, InvalidDocumentError,
|
||||
InvalidQueryError, NotUniqueError,
|
||||
FieldDoesNotExist, SaveConditionError)
|
||||
from mongoengine.queryset import NULLIFY, Q
|
||||
from mongoengine.context_managers import switch_db, query_counter
|
||||
from mongoengine import signals
|
||||
|
||||
@@ -28,6 +28,8 @@ TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__),
|
||||
__all__ = ("InstanceTest",)
|
||||
|
||||
|
||||
|
||||
|
||||
class InstanceTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
@@ -61,6 +63,14 @@ class InstanceTest(unittest.TestCase):
|
||||
list(self.Person._get_collection().find().sort("id")),
|
||||
sorted(docs, key=lambda doc: doc["_id"]))
|
||||
|
||||
def assertHasInstance(self, field, instance):
|
||||
self.assertTrue(hasattr(field, "_instance"))
|
||||
self.assertTrue(field._instance is not None)
|
||||
if isinstance(field._instance, weakref.ProxyType):
|
||||
self.assertTrue(field._instance.__eq__(instance))
|
||||
else:
|
||||
self.assertEqual(field._instance, instance)
|
||||
|
||||
def test_capped_collection(self):
|
||||
"""Ensure that capped collections work properly.
|
||||
"""
|
||||
@@ -86,21 +96,77 @@ class InstanceTest(unittest.TestCase):
|
||||
options = Log.objects._collection.options()
|
||||
self.assertEqual(options['capped'], True)
|
||||
self.assertEqual(options['max'], 10)
|
||||
self.assertTrue(options['size'] >= 4096)
|
||||
self.assertEqual(options['size'], 4096)
|
||||
|
||||
# Check that the document cannot be redefined with different options
|
||||
def recreate_log_document():
|
||||
class Log(Document):
|
||||
date = DateTimeField(default=datetime.now)
|
||||
meta = {
|
||||
'max_documents': 11,
|
||||
}
|
||||
# Create the collection by accessing Document.objects
|
||||
class Log(Document):
|
||||
date = DateTimeField(default=datetime.now)
|
||||
meta = {
|
||||
'max_documents': 11,
|
||||
}
|
||||
|
||||
# Accessing Document.objects creates the collection
|
||||
with self.assertRaises(InvalidCollectionError):
|
||||
Log.objects
|
||||
self.assertRaises(InvalidCollectionError, recreate_log_document)
|
||||
|
||||
def test_capped_collection_default(self):
|
||||
"""Ensure that capped collections defaults work properly."""
|
||||
class Log(Document):
|
||||
date = DateTimeField(default=datetime.now)
|
||||
meta = {
|
||||
'max_documents': 10,
|
||||
}
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
# Create a doc to create the collection
|
||||
Log().save()
|
||||
|
||||
options = Log.objects._collection.options()
|
||||
self.assertEqual(options['capped'], True)
|
||||
self.assertEqual(options['max'], 10)
|
||||
self.assertEqual(options['size'], 10 * 2**20)
|
||||
|
||||
# Check that the document with default value can be recreated
|
||||
class Log(Document):
|
||||
date = DateTimeField(default=datetime.now)
|
||||
meta = {
|
||||
'max_documents': 10,
|
||||
}
|
||||
|
||||
# Create the collection by accessing Document.objects
|
||||
Log.objects
|
||||
|
||||
def test_capped_collection_no_max_size_problems(self):
|
||||
"""Ensure that capped collections with odd max_size work properly.
|
||||
MongoDB rounds up max_size to next multiple of 256, recreating a doc
|
||||
with the same spec failed in mongoengine <0.10
|
||||
"""
|
||||
class Log(Document):
|
||||
date = DateTimeField(default=datetime.now)
|
||||
meta = {
|
||||
'max_size': 10000,
|
||||
}
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
# Create a doc to create the collection
|
||||
Log().save()
|
||||
|
||||
options = Log.objects._collection.options()
|
||||
self.assertEqual(options['capped'], True)
|
||||
self.assertTrue(options['size'] >= 10000)
|
||||
|
||||
# Check that the document with odd max_size value can be recreated
|
||||
class Log(Document):
|
||||
date = DateTimeField(default=datetime.now)
|
||||
meta = {
|
||||
'max_size': 10000,
|
||||
}
|
||||
|
||||
# Create the collection by accessing Document.objects
|
||||
Log.objects
|
||||
|
||||
def test_repr(self):
|
||||
"""Ensure that unicode representation works
|
||||
"""
|
||||
@@ -210,7 +276,7 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
list_stats = []
|
||||
|
||||
for i in xrange(10):
|
||||
for i in range(10):
|
||||
s = Stats()
|
||||
s.save()
|
||||
list_stats.append(s)
|
||||
@@ -280,14 +346,14 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(User._fields['username'].db_field, '_id')
|
||||
self.assertEqual(User._meta['id_field'], 'username')
|
||||
|
||||
def create_invalid_user():
|
||||
User(name='test').save() # no primary key field
|
||||
self.assertRaises(ValidationError, create_invalid_user)
|
||||
# test no primary key field
|
||||
self.assertRaises(ValidationError, User(name='test').save)
|
||||
|
||||
def define_invalid_user():
|
||||
# define a subclass with a different primary key field than the
|
||||
# parent
|
||||
with self.assertRaises(ValueError):
|
||||
class EmailUser(User):
|
||||
email = StringField(primary_key=True)
|
||||
self.assertRaises(ValueError, define_invalid_user)
|
||||
|
||||
class EmailUser(User):
|
||||
email = StringField()
|
||||
@@ -335,12 +401,10 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
# Mimic Place and NicePlace definitions being in a different file
|
||||
# and the NicePlace model not being imported in at query time.
|
||||
from mongoengine.base import _document_registry
|
||||
del(_document_registry['Place.NicePlace'])
|
||||
|
||||
def query_without_importing_nice_place():
|
||||
print Place.objects.all()
|
||||
self.assertRaises(NotRegistered, query_without_importing_nice_place)
|
||||
with self.assertRaises(NotRegistered):
|
||||
list(Place.objects.all())
|
||||
|
||||
def test_document_registry_regressions(self):
|
||||
|
||||
@@ -408,6 +472,20 @@ class InstanceTest(unittest.TestCase):
|
||||
doc.reload()
|
||||
Animal.drop_collection()
|
||||
|
||||
def test_reload_sharded_nested(self):
|
||||
class SuperPhylum(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class Animal(Document):
|
||||
superphylum = EmbeddedDocumentField(SuperPhylum)
|
||||
meta = {'shard_key': ('superphylum.name',)}
|
||||
|
||||
Animal.drop_collection()
|
||||
doc = Animal(superphylum=SuperPhylum(name='Deuterostomia'))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
Animal.drop_collection()
|
||||
|
||||
def test_reload_referencing(self):
|
||||
"""Ensures reloading updates weakrefs correctly
|
||||
"""
|
||||
@@ -481,6 +559,28 @@ class InstanceTest(unittest.TestCase):
|
||||
except Exception:
|
||||
self.assertFalse("Threw wrong exception")
|
||||
|
||||
def test_reload_of_non_strict_with_special_field_name(self):
|
||||
"""Ensures reloading works for documents with meta strict == False
|
||||
"""
|
||||
class Post(Document):
|
||||
meta = {
|
||||
'strict': False
|
||||
}
|
||||
title = StringField()
|
||||
items = ListField()
|
||||
|
||||
Post.drop_collection()
|
||||
|
||||
Post._get_collection().insert({
|
||||
"title": "Items eclipse",
|
||||
"items": ["more lorem", "even more ipsum"]
|
||||
})
|
||||
|
||||
post = Post.objects.first()
|
||||
post.reload()
|
||||
self.assertEqual(post.title, "Items eclipse")
|
||||
self.assertEqual(post.items, ["more lorem", "even more ipsum"])
|
||||
|
||||
def test_dictionary_access(self):
|
||||
"""Ensure that dictionary-style field access works properly.
|
||||
"""
|
||||
@@ -543,10 +643,12 @@ class InstanceTest(unittest.TestCase):
|
||||
embedded_field = EmbeddedDocumentField(Embedded)
|
||||
|
||||
Doc.drop_collection()
|
||||
Doc(embedded_field=Embedded(string="Hi")).save()
|
||||
doc = Doc(embedded_field=Embedded(string="Hi"))
|
||||
self.assertHasInstance(doc.embedded_field, doc)
|
||||
|
||||
doc.save()
|
||||
doc = Doc.objects.get()
|
||||
self.assertEqual(doc, doc.embedded_field._instance)
|
||||
self.assertHasInstance(doc.embedded_field, doc)
|
||||
|
||||
def test_embedded_document_complex_instance(self):
|
||||
"""Ensure that embedded documents in complex fields can reference
|
||||
@@ -558,10 +660,25 @@ class InstanceTest(unittest.TestCase):
|
||||
embedded_field = ListField(EmbeddedDocumentField(Embedded))
|
||||
|
||||
Doc.drop_collection()
|
||||
Doc(embedded_field=[Embedded(string="Hi")]).save()
|
||||
doc = Doc(embedded_field=[Embedded(string="Hi")])
|
||||
self.assertHasInstance(doc.embedded_field[0], doc)
|
||||
|
||||
doc.save()
|
||||
doc = Doc.objects.get()
|
||||
self.assertEqual(doc, doc.embedded_field[0]._instance)
|
||||
self.assertHasInstance(doc.embedded_field[0], doc)
|
||||
|
||||
def test_embedded_document_complex_instance_no_use_db_field(self):
|
||||
"""Ensure that use_db_field is propagated to list of Emb Docs
|
||||
"""
|
||||
class Embedded(EmbeddedDocument):
|
||||
string = StringField(db_field='s')
|
||||
|
||||
class Doc(Document):
|
||||
embedded_field = ListField(EmbeddedDocumentField(Embedded))
|
||||
|
||||
d = Doc(embedded_field=[Embedded(string="Hi")]).to_mongo(
|
||||
use_db_field=False).to_dict()
|
||||
self.assertEqual(d['embedded_field'], [{'string': 'Hi'}])
|
||||
|
||||
def test_instance_is_set_on_setattr(self):
|
||||
|
||||
@@ -574,11 +691,28 @@ class InstanceTest(unittest.TestCase):
|
||||
Account.drop_collection()
|
||||
acc = Account()
|
||||
acc.email = Email(email='test@example.com')
|
||||
self.assertTrue(hasattr(acc._data["email"], "_instance"))
|
||||
self.assertHasInstance(acc._data["email"], acc)
|
||||
acc.save()
|
||||
|
||||
acc1 = Account.objects.first()
|
||||
self.assertTrue(hasattr(acc1._data["email"], "_instance"))
|
||||
self.assertHasInstance(acc1._data["email"], acc1)
|
||||
|
||||
def test_instance_is_set_on_setattr_on_embedded_document_list(self):
|
||||
|
||||
class Email(EmbeddedDocument):
|
||||
email = EmailField()
|
||||
|
||||
class Account(Document):
|
||||
emails = EmbeddedDocumentListField(Email)
|
||||
|
||||
Account.drop_collection()
|
||||
acc = Account()
|
||||
acc.emails = [Email(email='test@example.com')]
|
||||
self.assertHasInstance(acc._data["emails"][0], acc)
|
||||
acc.save()
|
||||
|
||||
acc1 = Account.objects.first()
|
||||
self.assertHasInstance(acc1._data["emails"][0], acc1)
|
||||
|
||||
def test_document_clean(self):
|
||||
class TestDocument(Document):
|
||||
@@ -599,7 +733,7 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
try:
|
||||
t.save()
|
||||
except ValidationError, e:
|
||||
except ValidationError as e:
|
||||
expect_msg = "Draft entries may not have a publication date."
|
||||
self.assertTrue(expect_msg in e.message)
|
||||
self.assertEqual(e.to_dict(), {'__all__': expect_msg})
|
||||
@@ -638,7 +772,7 @@ class InstanceTest(unittest.TestCase):
|
||||
t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25, z=15))
|
||||
try:
|
||||
t.save()
|
||||
except ValidationError, e:
|
||||
except ValidationError as e:
|
||||
expect_msg = "Value of z != x + y"
|
||||
self.assertTrue(expect_msg in e.message)
|
||||
self.assertEqual(e.to_dict(), {'doc': {'__all__': expect_msg}})
|
||||
@@ -652,8 +786,10 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
def test_modify_empty(self):
|
||||
doc = self.Person(name="bob", age=10).save()
|
||||
self.assertRaises(
|
||||
InvalidDocumentError, lambda: self.Person().modify(set__age=10))
|
||||
|
||||
with self.assertRaises(InvalidDocumentError):
|
||||
self.Person().modify(set__age=10)
|
||||
|
||||
self.assertDbEqual([dict(doc.to_mongo())])
|
||||
|
||||
def test_modify_invalid_query(self):
|
||||
@@ -661,9 +797,8 @@ class InstanceTest(unittest.TestCase):
|
||||
doc2 = self.Person(name="jim", age=20).save()
|
||||
docs = [dict(doc1.to_mongo()), dict(doc2.to_mongo())]
|
||||
|
||||
self.assertRaises(
|
||||
InvalidQueryError,
|
||||
lambda: doc1.modify(dict(id=doc2.id), set__value=20))
|
||||
with self.assertRaises(InvalidQueryError):
|
||||
doc1.modify({'id': doc2.id}, set__value=20)
|
||||
|
||||
self.assertDbEqual(docs)
|
||||
|
||||
@@ -672,7 +807,7 @@ class InstanceTest(unittest.TestCase):
|
||||
doc2 = self.Person(name="jim", age=20).save()
|
||||
docs = [dict(doc1.to_mongo()), dict(doc2.to_mongo())]
|
||||
|
||||
assert not doc1.modify(dict(name=doc2.name), set__age=100)
|
||||
assert not doc1.modify({'name': doc2.name}, set__age=100)
|
||||
|
||||
self.assertDbEqual(docs)
|
||||
|
||||
@@ -681,7 +816,7 @@ class InstanceTest(unittest.TestCase):
|
||||
doc2 = self.Person(id=ObjectId(), name="jim", age=20)
|
||||
docs = [dict(doc1.to_mongo())]
|
||||
|
||||
assert not doc2.modify(dict(name=doc2.name), set__age=100)
|
||||
assert not doc2.modify({'name': doc2.name}, set__age=100)
|
||||
|
||||
self.assertDbEqual(docs)
|
||||
|
||||
@@ -952,11 +1087,12 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(w1.save_id, UUID(1))
|
||||
self.assertEqual(w1.count, 0)
|
||||
|
||||
# mismatch in save_condition prevents save
|
||||
# mismatch in save_condition prevents save and raise exception
|
||||
flip(w1)
|
||||
self.assertTrue(w1.toggle)
|
||||
self.assertEqual(w1.count, 1)
|
||||
w1.save(save_condition={'save_id': UUID(42)})
|
||||
self.assertRaises(SaveConditionError,
|
||||
w1.save, save_condition={'save_id': UUID(42)})
|
||||
w1.reload()
|
||||
self.assertFalse(w1.toggle)
|
||||
self.assertEqual(w1.count, 0)
|
||||
@@ -984,7 +1120,8 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(w1.count, 2)
|
||||
flip(w2)
|
||||
flip(w2)
|
||||
w2.save(save_condition={'save_id': old_id})
|
||||
self.assertRaises(SaveConditionError,
|
||||
w2.save, save_condition={'save_id': old_id})
|
||||
w2.reload()
|
||||
self.assertFalse(w2.toggle)
|
||||
self.assertEqual(w2.count, 2)
|
||||
@@ -996,7 +1133,8 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertTrue(w1.toggle)
|
||||
self.assertEqual(w1.count, 3)
|
||||
flip(w1)
|
||||
w1.save(save_condition={'count__gte': w1.count})
|
||||
self.assertRaises(SaveConditionError,
|
||||
w1.save, save_condition={'count__gte': w1.count})
|
||||
w1.reload()
|
||||
self.assertTrue(w1.toggle)
|
||||
self.assertEqual(w1.count, 3)
|
||||
@@ -1144,12 +1282,11 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
def test_document_update(self):
|
||||
|
||||
def update_not_saved_raises():
|
||||
# try updating a non-saved document
|
||||
with self.assertRaises(OperationError):
|
||||
person = self.Person(name='dcrosta')
|
||||
person.update(set__name='Dan Crosta')
|
||||
|
||||
self.assertRaises(OperationError, update_not_saved_raises)
|
||||
|
||||
author = self.Person(name='dcrosta')
|
||||
author.save()
|
||||
|
||||
@@ -1159,19 +1296,17 @@ class InstanceTest(unittest.TestCase):
|
||||
p1 = self.Person.objects.first()
|
||||
self.assertEqual(p1.name, author.name)
|
||||
|
||||
def update_no_value_raises():
|
||||
# try sending an empty update
|
||||
with self.assertRaises(OperationError):
|
||||
person = self.Person.objects.first()
|
||||
person.update()
|
||||
|
||||
self.assertRaises(OperationError, update_no_value_raises)
|
||||
|
||||
def update_no_op_should_default_to_set():
|
||||
person = self.Person.objects.first()
|
||||
person.update(name="Dan")
|
||||
person.reload()
|
||||
return person.name
|
||||
|
||||
self.assertEqual("Dan", update_no_op_should_default_to_set())
|
||||
# update that doesn't explicitly specify an operator should default
|
||||
# to 'set__'
|
||||
person = self.Person.objects.first()
|
||||
person.update(name="Dan")
|
||||
person.reload()
|
||||
self.assertEqual("Dan", person.name)
|
||||
|
||||
def test_update_unique_field(self):
|
||||
class Doc(Document):
|
||||
@@ -1180,8 +1315,8 @@ class InstanceTest(unittest.TestCase):
|
||||
doc1 = Doc(name="first").save()
|
||||
doc2 = Doc(name="second").save()
|
||||
|
||||
self.assertRaises(NotUniqueError, lambda:
|
||||
doc2.update(set__name=doc1.name))
|
||||
with self.assertRaises(NotUniqueError):
|
||||
doc2.update(set__name=doc1.name)
|
||||
|
||||
def test_embedded_update(self):
|
||||
"""
|
||||
@@ -1699,15 +1834,13 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
def test_duplicate_db_fields_raise_invalid_document_error(self):
|
||||
"""Ensure a InvalidDocumentError is thrown if duplicate fields
|
||||
declare the same db_field"""
|
||||
|
||||
def throw_invalid_document_error():
|
||||
declare the same db_field.
|
||||
"""
|
||||
with self.assertRaises(InvalidDocumentError):
|
||||
class Foo(Document):
|
||||
name = StringField()
|
||||
name2 = StringField(db_field='name')
|
||||
|
||||
self.assertRaises(InvalidDocumentError, throw_invalid_document_error)
|
||||
|
||||
def test_invalid_son(self):
|
||||
"""Raise an error if loading invalid data"""
|
||||
class Occurrence(EmbeddedDocument):
|
||||
@@ -1719,11 +1852,13 @@ class InstanceTest(unittest.TestCase):
|
||||
forms = ListField(StringField(), default=list)
|
||||
occurs = ListField(EmbeddedDocumentField(Occurrence), default=list)
|
||||
|
||||
def raise_invalid_document():
|
||||
Word._from_son({'stem': [1, 2, 3], 'forms': 1, 'count': 'one',
|
||||
'occurs': {"hello": None}})
|
||||
|
||||
self.assertRaises(InvalidDocumentError, raise_invalid_document)
|
||||
with self.assertRaises(InvalidDocumentError):
|
||||
Word._from_son({
|
||||
'stem': [1, 2, 3],
|
||||
'forms': 1,
|
||||
'count': 'one',
|
||||
'occurs': {"hello": None}
|
||||
})
|
||||
|
||||
def test_reverse_delete_rule_cascade_and_nullify(self):
|
||||
"""Ensure that a referenced document is also deleted upon deletion.
|
||||
@@ -1757,6 +1892,62 @@ class InstanceTest(unittest.TestCase):
|
||||
author.delete()
|
||||
self.assertEqual(BlogPost.objects.count(), 0)
|
||||
|
||||
def test_reverse_delete_rule_with_custom_id_field(self):
|
||||
"""Ensure that a referenced document with custom primary key
|
||||
is also deleted upon deletion.
|
||||
"""
|
||||
class User(Document):
|
||||
name = StringField(primary_key=True)
|
||||
|
||||
class Book(Document):
|
||||
author = ReferenceField(User, reverse_delete_rule=CASCADE)
|
||||
reviewer = ReferenceField(User, reverse_delete_rule=NULLIFY)
|
||||
|
||||
User.drop_collection()
|
||||
Book.drop_collection()
|
||||
|
||||
user = User(name='Mike').save()
|
||||
reviewer = User(name='John').save()
|
||||
book = Book(author=user, reviewer=reviewer).save()
|
||||
|
||||
reviewer.delete()
|
||||
self.assertEqual(Book.objects.count(), 1)
|
||||
self.assertEqual(Book.objects.get().reviewer, None)
|
||||
|
||||
user.delete()
|
||||
self.assertEqual(Book.objects.count(), 0)
|
||||
|
||||
def test_reverse_delete_rule_with_shared_id_among_collections(self):
|
||||
"""Ensure that cascade delete rule doesn't mix id among collections.
|
||||
"""
|
||||
class User(Document):
|
||||
id = IntField(primary_key=True)
|
||||
|
||||
class Book(Document):
|
||||
id = IntField(primary_key=True)
|
||||
author = ReferenceField(User, reverse_delete_rule=CASCADE)
|
||||
|
||||
User.drop_collection()
|
||||
Book.drop_collection()
|
||||
|
||||
user_1 = User(id=1).save()
|
||||
user_2 = User(id=2).save()
|
||||
book_1 = Book(id=1, author=user_2).save()
|
||||
book_2 = Book(id=2, author=user_1).save()
|
||||
|
||||
user_2.delete()
|
||||
# Deleting user_2 should also delete book_1 but not book_2
|
||||
self.assertEqual(Book.objects.count(), 1)
|
||||
self.assertEqual(Book.objects.get(), book_2)
|
||||
|
||||
user_3 = User(id=3).save()
|
||||
book_3 = Book(id=3, author=user_3).save()
|
||||
|
||||
user_3.delete()
|
||||
# Deleting user_3 should also delete book_3
|
||||
self.assertEqual(Book.objects.count(), 1)
|
||||
self.assertEqual(Book.objects.get(), book_2)
|
||||
|
||||
def test_reverse_delete_rule_with_document_inheritance(self):
|
||||
"""Ensure that a referenced document is also deleted upon deletion
|
||||
of a child document.
|
||||
@@ -1829,11 +2020,11 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(BlogPost.objects.count(), 0)
|
||||
|
||||
def test_reverse_delete_rule_cascade_triggers_pre_delete_signal(self):
|
||||
''' ensure the pre_delete signal is triggered upon a cascading deletion
|
||||
""" ensure the pre_delete signal is triggered upon a cascading deletion
|
||||
setup a blog post with content, an author and editor
|
||||
delete the author which triggers deletion of blogpost via cascade
|
||||
blog post's pre_delete signal alters an editor attribute
|
||||
'''
|
||||
"""
|
||||
class Editor(self.Person):
|
||||
review_queue = IntField(default=0)
|
||||
|
||||
@@ -1898,8 +2089,7 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(Bar.objects.get().foo, None)
|
||||
|
||||
def test_invalid_reverse_delete_rule_raise_errors(self):
|
||||
|
||||
def throw_invalid_document_error():
|
||||
with self.assertRaises(InvalidDocumentError):
|
||||
class Blog(Document):
|
||||
content = StringField()
|
||||
authors = MapField(ReferenceField(
|
||||
@@ -1909,21 +2099,15 @@ class InstanceTest(unittest.TestCase):
|
||||
self.Person,
|
||||
reverse_delete_rule=NULLIFY))
|
||||
|
||||
self.assertRaises(InvalidDocumentError, throw_invalid_document_error)
|
||||
|
||||
def throw_invalid_document_error_embedded():
|
||||
with self.assertRaises(InvalidDocumentError):
|
||||
class Parents(EmbeddedDocument):
|
||||
father = ReferenceField('Person', reverse_delete_rule=DENY)
|
||||
mother = ReferenceField('Person', reverse_delete_rule=DENY)
|
||||
|
||||
self.assertRaises(
|
||||
InvalidDocumentError, throw_invalid_document_error_embedded)
|
||||
|
||||
def test_reverse_delete_rule_cascade_recurs(self):
|
||||
"""Ensure that a chain of documents is also deleted upon cascaded
|
||||
deletion.
|
||||
"""
|
||||
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
author = ReferenceField(self.Person, reverse_delete_rule=CASCADE)
|
||||
@@ -2085,11 +2269,34 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(pickle_doc.string, "Two")
|
||||
self.assertEqual(pickle_doc.lists, ["1", "2", "3"])
|
||||
|
||||
def test_regular_document_pickle(self):
|
||||
|
||||
pickle_doc = PickleTest(number=1, string="One", lists=['1', '2'])
|
||||
pickled_doc = pickle.dumps(pickle_doc) # make sure pickling works even before the doc is saved
|
||||
pickle_doc.save()
|
||||
|
||||
pickled_doc = pickle.dumps(pickle_doc)
|
||||
|
||||
# Test that when a document's definition changes the new
|
||||
# definition is used
|
||||
fixtures.PickleTest = fixtures.NewDocumentPickleTest
|
||||
|
||||
resurrected = pickle.loads(pickled_doc)
|
||||
self.assertEqual(resurrected.__class__,
|
||||
fixtures.NewDocumentPickleTest)
|
||||
self.assertEqual(resurrected._fields_ordered,
|
||||
fixtures.NewDocumentPickleTest._fields_ordered)
|
||||
self.assertNotEqual(resurrected._fields_ordered,
|
||||
pickle_doc._fields_ordered)
|
||||
|
||||
# The local PickleTest is still a ref to the original
|
||||
fixtures.PickleTest = PickleTest
|
||||
|
||||
def test_dynamic_document_pickle(self):
|
||||
|
||||
pickle_doc = PickleDynamicTest(
|
||||
name="test", number=1, string="One", lists=['1', '2'])
|
||||
pickle_doc.embedded = PickleDyanmicEmbedded(foo="Bar")
|
||||
pickle_doc.embedded = PickleDynamicEmbedded(foo="Bar")
|
||||
pickled_doc = pickle.dumps(pickle_doc) # make sure pickling works even before the doc is saved
|
||||
|
||||
pickle_doc.save()
|
||||
@@ -2116,15 +2323,14 @@ class InstanceTest(unittest.TestCase):
|
||||
pickle_doc.save()
|
||||
pickle_doc.delete()
|
||||
|
||||
def test_throw_invalid_document_error(self):
|
||||
|
||||
# test handles people trying to upsert
|
||||
def throw_invalid_document_error():
|
||||
def test_override_method_with_field(self):
|
||||
"""Test creating a field with a field name that would override
|
||||
the "validate" method.
|
||||
"""
|
||||
with self.assertRaises(InvalidDocumentError):
|
||||
class Blog(Document):
|
||||
validate = DictField()
|
||||
|
||||
self.assertRaises(InvalidDocumentError, throw_invalid_document_error)
|
||||
|
||||
def test_mutating_documents(self):
|
||||
|
||||
class B(EmbeddedDocument):
|
||||
@@ -2443,6 +2649,114 @@ class InstanceTest(unittest.TestCase):
|
||||
group = Group.objects.first()
|
||||
self.assertEqual("hello - default", group.name)
|
||||
|
||||
def test_load_undefined_fields(self):
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
User._get_collection().save({
|
||||
'name': 'John',
|
||||
'foo': 'Bar',
|
||||
'data': [1, 2, 3]
|
||||
})
|
||||
|
||||
self.assertRaises(FieldDoesNotExist, User.objects.first)
|
||||
|
||||
def test_load_undefined_fields_with_strict_false(self):
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
meta = {'strict': False}
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
User._get_collection().save({
|
||||
'name': 'John',
|
||||
'foo': 'Bar',
|
||||
'data': [1, 2, 3]
|
||||
})
|
||||
|
||||
user = User.objects.first()
|
||||
self.assertEqual(user.name, 'John')
|
||||
self.assertFalse(hasattr(user, 'foo'))
|
||||
self.assertEqual(user._data['foo'], 'Bar')
|
||||
self.assertFalse(hasattr(user, 'data'))
|
||||
self.assertEqual(user._data['data'], [1, 2, 3])
|
||||
|
||||
def test_load_undefined_fields_on_embedded_document(self):
|
||||
class Thing(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
thing = EmbeddedDocumentField(Thing)
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
User._get_collection().save({
|
||||
'name': 'John',
|
||||
'thing': {
|
||||
'name': 'My thing',
|
||||
'foo': 'Bar',
|
||||
'data': [1, 2, 3]
|
||||
}
|
||||
})
|
||||
|
||||
self.assertRaises(FieldDoesNotExist, User.objects.first)
|
||||
|
||||
def test_load_undefined_fields_on_embedded_document_with_strict_false_on_doc(self):
|
||||
class Thing(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
thing = EmbeddedDocumentField(Thing)
|
||||
|
||||
meta = {'strict': False}
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
User._get_collection().save({
|
||||
'name': 'John',
|
||||
'thing': {
|
||||
'name': 'My thing',
|
||||
'foo': 'Bar',
|
||||
'data': [1, 2, 3]
|
||||
}
|
||||
})
|
||||
|
||||
self.assertRaises(FieldDoesNotExist, User.objects.first)
|
||||
|
||||
def test_load_undefined_fields_on_embedded_document_with_strict_false(self):
|
||||
class Thing(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
meta = {'strict': False}
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
thing = EmbeddedDocumentField(Thing)
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
User._get_collection().save({
|
||||
'name': 'John',
|
||||
'thing': {
|
||||
'name': 'My thing',
|
||||
'foo': 'Bar',
|
||||
'data': [1, 2, 3]
|
||||
}
|
||||
})
|
||||
|
||||
user = User.objects.first()
|
||||
self.assertEqual(user.name, 'John')
|
||||
self.assertEqual(user.thing.name, 'My thing')
|
||||
self.assertFalse(hasattr(user.thing, 'foo'))
|
||||
self.assertEqual(user.thing._data['foo'], 'Bar')
|
||||
self.assertFalse(hasattr(user.thing, 'data'))
|
||||
self.assertEqual(user.thing._data['data'], [1, 2, 3])
|
||||
|
||||
def test_spaces_in_keys(self):
|
||||
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
@@ -2479,10 +2793,34 @@ class InstanceTest(unittest.TestCase):
|
||||
log.log = "Saving"
|
||||
log.save()
|
||||
|
||||
def change_shard_key():
|
||||
# try to change the shard key
|
||||
with self.assertRaises(OperationError):
|
||||
log.machine = "127.0.0.1"
|
||||
|
||||
self.assertRaises(OperationError, change_shard_key)
|
||||
def test_shard_key_in_embedded_document(self):
|
||||
class Foo(EmbeddedDocument):
|
||||
foo = StringField()
|
||||
|
||||
class Bar(Document):
|
||||
meta = {
|
||||
'shard_key': ('foo.foo',)
|
||||
}
|
||||
foo = EmbeddedDocumentField(Foo)
|
||||
bar = StringField()
|
||||
|
||||
foo_doc = Foo(foo='hello')
|
||||
bar_doc = Bar(foo=foo_doc, bar='world')
|
||||
bar_doc.save()
|
||||
|
||||
self.assertTrue(bar_doc.id is not None)
|
||||
|
||||
bar_doc.bar = 'baz'
|
||||
bar_doc.save()
|
||||
|
||||
# try to change the shard key
|
||||
with self.assertRaises(OperationError):
|
||||
bar_doc.foo.foo = 'something'
|
||||
bar_doc.save()
|
||||
|
||||
def test_shard_key_primary(self):
|
||||
class LogEntry(Document):
|
||||
@@ -2504,11 +2842,10 @@ class InstanceTest(unittest.TestCase):
|
||||
log.log = "Saving"
|
||||
log.save()
|
||||
|
||||
def change_shard_key():
|
||||
# try to change the shard key
|
||||
with self.assertRaises(OperationError):
|
||||
log.machine = "127.0.0.1"
|
||||
|
||||
self.assertRaises(OperationError, change_shard_key)
|
||||
|
||||
def test_kwargs_simple(self):
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
@@ -2566,6 +2903,20 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(person.name, "Test User")
|
||||
self.assertEqual(person.age, 42)
|
||||
|
||||
def test_positional_creation_embedded(self):
|
||||
"""Ensure that embedded document may be created using positional arguments.
|
||||
"""
|
||||
job = self.Job("Test Job", 4)
|
||||
self.assertEqual(job.name, "Test Job")
|
||||
self.assertEqual(job.years, 4)
|
||||
|
||||
def test_mixed_creation_embedded(self):
|
||||
"""Ensure that embedded document may be created using mixed arguments.
|
||||
"""
|
||||
job = self.Job("Test Job", years=4)
|
||||
self.assertEqual(job.name, "Test Job")
|
||||
self.assertEqual(job.years, 4)
|
||||
|
||||
def test_mixed_creation_dynamic(self):
|
||||
"""Ensure that document may be created using mixed arguments.
|
||||
"""
|
||||
@@ -2579,11 +2930,9 @@ class InstanceTest(unittest.TestCase):
|
||||
def test_bad_mixed_creation(self):
|
||||
"""Ensure that document gives correct error when duplicating arguments
|
||||
"""
|
||||
def construct_bad_instance():
|
||||
with self.assertRaises(TypeError):
|
||||
return self.Person("Test User", 42, name="Bad User")
|
||||
|
||||
self.assertRaises(TypeError, construct_bad_instance)
|
||||
|
||||
def test_data_contains_id_field(self):
|
||||
"""Ensure that asking for _data returns 'id'
|
||||
"""
|
||||
@@ -2742,6 +3091,17 @@ class InstanceTest(unittest.TestCase):
|
||||
p4 = Person.objects()[0]
|
||||
p4.save()
|
||||
self.assertEquals(p4.height, 189)
|
||||
|
||||
# However the default will not be fixed in DB
|
||||
self.assertEquals(Person.objects(height=189).count(), 0)
|
||||
|
||||
# alter DB for the new default
|
||||
coll = Person._get_collection()
|
||||
for person in Person.objects.as_pymongo():
|
||||
if 'height' not in person:
|
||||
person['height'] = 189
|
||||
coll.save(person)
|
||||
|
||||
self.assertEquals(Person.objects(height=189).count(), 1)
|
||||
|
||||
def test_from_son(self):
|
||||
@@ -2799,5 +3159,36 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertNotEqual(p, p1)
|
||||
self.assertEqual(p, p)
|
||||
|
||||
def test_list_iter(self):
|
||||
# 914
|
||||
class B(EmbeddedDocument):
|
||||
v = StringField()
|
||||
|
||||
class A(Document):
|
||||
l = ListField(EmbeddedDocumentField(B))
|
||||
|
||||
A.objects.delete()
|
||||
A(l=[B(v='1'), B(v='2'), B(v='3')]).save()
|
||||
a = A.objects.get()
|
||||
self.assertEqual(a.l._instance, a)
|
||||
for idx, b in enumerate(a.l):
|
||||
self.assertEqual(b._instance, a)
|
||||
self.assertEqual(idx, 2)
|
||||
|
||||
def test_falsey_pk(self):
|
||||
"""Ensure that we can create and update a document with Falsey PK.
|
||||
"""
|
||||
class Person(Document):
|
||||
age = IntField(primary_key=True)
|
||||
height = FloatField()
|
||||
|
||||
person = Person()
|
||||
person.age = 0
|
||||
person.height = 1.89
|
||||
person.save()
|
||||
|
||||
person.update(set__height=2.0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -1,6 +1,3 @@
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
import unittest
|
||||
import uuid
|
||||
|
||||
|
@@ -1,7 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
import unittest
|
||||
from datetime import datetime
|
||||
|
||||
@@ -60,7 +57,7 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
try:
|
||||
User().validate()
|
||||
except ValidationError, e:
|
||||
except ValidationError as e:
|
||||
self.assertTrue("User:None" in e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
'username': 'Field is required',
|
||||
@@ -70,7 +67,7 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
user.name = None
|
||||
try:
|
||||
user.save()
|
||||
except ValidationError, e:
|
||||
except ValidationError as e:
|
||||
self.assertTrue("User:RossC0" in e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
'name': 'Field is required'})
|
||||
@@ -118,7 +115,7 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
try:
|
||||
Doc(id="bad").validate()
|
||||
except ValidationError, e:
|
||||
except ValidationError as e:
|
||||
self.assertTrue("SubDoc:None" in e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
"e": {'val': 'OK could not be converted to int'}})
|
||||
@@ -136,7 +133,7 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
doc.e.val = "OK"
|
||||
try:
|
||||
doc.save()
|
||||
except ValidationError, e:
|
||||
except ValidationError as e:
|
||||
self.assertTrue("Doc:test" in e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
"e": {'val': 'OK could not be converted to int'}})
|
||||
@@ -156,14 +153,61 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
s = SubDoc()
|
||||
|
||||
self.assertRaises(ValidationError, lambda: s.validate())
|
||||
self.assertRaises(ValidationError, s.validate)
|
||||
|
||||
d1.e = s
|
||||
d2.e = s
|
||||
|
||||
del d1
|
||||
|
||||
self.assertRaises(ValidationError, lambda: d2.validate())
|
||||
self.assertRaises(ValidationError, d2.validate)
|
||||
|
||||
def test_parent_reference_in_child_document(self):
|
||||
"""
|
||||
Test to ensure a ReferenceField can store a reference to a parent
|
||||
class when inherited. Issue #954.
|
||||
"""
|
||||
class Parent(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
reference = ReferenceField('self')
|
||||
|
||||
class Child(Parent):
|
||||
pass
|
||||
|
||||
parent = Parent()
|
||||
parent.save()
|
||||
|
||||
child = Child(reference=parent)
|
||||
|
||||
# Saving child should not raise a ValidationError
|
||||
try:
|
||||
child.save()
|
||||
except ValidationError as e:
|
||||
self.fail("ValidationError raised: %s" % e.message)
|
||||
|
||||
def test_parent_reference_set_as_attribute_in_child_document(self):
|
||||
"""
|
||||
Test to ensure a ReferenceField can store a reference to a parent
|
||||
class when inherited and when set via attribute. Issue #954.
|
||||
"""
|
||||
class Parent(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
reference = ReferenceField('self')
|
||||
|
||||
class Child(Parent):
|
||||
pass
|
||||
|
||||
parent = Parent()
|
||||
parent.save()
|
||||
|
||||
child = Child()
|
||||
child.reference = parent
|
||||
|
||||
# Saving the child should not raise a ValidationError
|
||||
try:
|
||||
child.save()
|
||||
except ValidationError as e:
|
||||
self.fail("ValidationError raised: %s" % e.message)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,18 +1,16 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
import copy
|
||||
import os
|
||||
import unittest
|
||||
import tempfile
|
||||
|
||||
import gridfs
|
||||
import six
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.python_support import PY3, b, StringIO
|
||||
from mongoengine.python_support import StringIO
|
||||
|
||||
try:
|
||||
from PIL import Image
|
||||
@@ -49,7 +47,7 @@ class FileTest(unittest.TestCase):
|
||||
|
||||
PutFile.drop_collection()
|
||||
|
||||
text = b('Hello, World!')
|
||||
text = six.b('Hello, World!')
|
||||
content_type = 'text/plain'
|
||||
|
||||
putfile = PutFile()
|
||||
@@ -88,8 +86,8 @@ class FileTest(unittest.TestCase):
|
||||
|
||||
StreamFile.drop_collection()
|
||||
|
||||
text = b('Hello, World!')
|
||||
more_text = b('Foo Bar')
|
||||
text = six.b('Hello, World!')
|
||||
more_text = six.b('Foo Bar')
|
||||
content_type = 'text/plain'
|
||||
|
||||
streamfile = StreamFile()
|
||||
@@ -112,7 +110,7 @@ class FileTest(unittest.TestCase):
|
||||
result.the_file.delete()
|
||||
|
||||
# Ensure deleted file returns None
|
||||
self.assertTrue(result.the_file.read() == None)
|
||||
self.assertTrue(result.the_file.read() is None)
|
||||
|
||||
def test_file_fields_stream_after_none(self):
|
||||
"""Ensure that a file field can be written to after it has been saved as
|
||||
@@ -123,8 +121,8 @@ class FileTest(unittest.TestCase):
|
||||
|
||||
StreamFile.drop_collection()
|
||||
|
||||
text = b('Hello, World!')
|
||||
more_text = b('Foo Bar')
|
||||
text = six.b('Hello, World!')
|
||||
more_text = six.b('Foo Bar')
|
||||
content_type = 'text/plain'
|
||||
|
||||
streamfile = StreamFile()
|
||||
@@ -138,7 +136,7 @@ class FileTest(unittest.TestCase):
|
||||
result = StreamFile.objects.first()
|
||||
self.assertTrue(streamfile == result)
|
||||
self.assertEqual(result.the_file.read(), text + more_text)
|
||||
#self.assertEqual(result.the_file.content_type, content_type)
|
||||
# self.assertEqual(result.the_file.content_type, content_type)
|
||||
result.the_file.seek(0)
|
||||
self.assertEqual(result.the_file.tell(), 0)
|
||||
self.assertEqual(result.the_file.read(len(text)), text)
|
||||
@@ -148,15 +146,15 @@ class FileTest(unittest.TestCase):
|
||||
result.the_file.delete()
|
||||
|
||||
# Ensure deleted file returns None
|
||||
self.assertTrue(result.the_file.read() == None)
|
||||
self.assertTrue(result.the_file.read() is None)
|
||||
|
||||
def test_file_fields_set(self):
|
||||
|
||||
class SetFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
text = b('Hello, World!')
|
||||
more_text = b('Foo Bar')
|
||||
text = six.b('Hello, World!')
|
||||
more_text = six.b('Foo Bar')
|
||||
|
||||
SetFile.drop_collection()
|
||||
|
||||
@@ -185,7 +183,7 @@ class FileTest(unittest.TestCase):
|
||||
GridDocument.drop_collection()
|
||||
|
||||
with tempfile.TemporaryFile() as f:
|
||||
f.write(b("Hello World!"))
|
||||
f.write(six.b("Hello World!"))
|
||||
f.flush()
|
||||
|
||||
# Test without default
|
||||
@@ -202,7 +200,7 @@ class FileTest(unittest.TestCase):
|
||||
self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id)
|
||||
|
||||
# Test with default
|
||||
doc_d = GridDocument(the_file=b(''))
|
||||
doc_d = GridDocument(the_file=six.b(''))
|
||||
doc_d.save()
|
||||
|
||||
doc_e = GridDocument.objects.with_id(doc_d.id)
|
||||
@@ -228,7 +226,7 @@ class FileTest(unittest.TestCase):
|
||||
# First instance
|
||||
test_file = TestFile()
|
||||
test_file.name = "Hello, World!"
|
||||
test_file.the_file.put(b('Hello, World!'))
|
||||
test_file.the_file.put(six.b('Hello, World!'))
|
||||
test_file.save()
|
||||
|
||||
# Second instance
|
||||
@@ -282,7 +280,7 @@ class FileTest(unittest.TestCase):
|
||||
|
||||
test_file = TestFile()
|
||||
self.assertFalse(bool(test_file.the_file))
|
||||
test_file.the_file.put(b('Hello, World!'), content_type='text/plain')
|
||||
test_file.the_file.put(six.b('Hello, World!'), content_type='text/plain')
|
||||
test_file.save()
|
||||
self.assertTrue(bool(test_file.the_file))
|
||||
|
||||
@@ -297,6 +295,71 @@ class FileTest(unittest.TestCase):
|
||||
test_file = TestFile()
|
||||
self.assertFalse(test_file.the_file in [{"test": 1}])
|
||||
|
||||
def test_file_disk_space(self):
|
||||
""" Test disk space usage when we delete/replace a file """
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
content_type = 'text/plain'
|
||||
|
||||
testfile = TestFile()
|
||||
testfile.the_file.put(text, content_type=content_type, filename="hello")
|
||||
testfile.save()
|
||||
|
||||
# Now check fs.files and fs.chunks
|
||||
db = TestFile._get_db()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEquals(len(list(files)), 1)
|
||||
self.assertEquals(len(list(chunks)), 1)
|
||||
|
||||
# Deleting the docoument should delete the files
|
||||
testfile.delete()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEquals(len(list(files)), 0)
|
||||
self.assertEquals(len(list(chunks)), 0)
|
||||
|
||||
# Test case where we don't store a file in the first place
|
||||
testfile = TestFile()
|
||||
testfile.save()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEquals(len(list(files)), 0)
|
||||
self.assertEquals(len(list(chunks)), 0)
|
||||
|
||||
testfile.delete()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEquals(len(list(files)), 0)
|
||||
self.assertEquals(len(list(chunks)), 0)
|
||||
|
||||
# Test case where we overwrite the file
|
||||
testfile = TestFile()
|
||||
testfile.the_file.put(text, content_type=content_type, filename="hello")
|
||||
testfile.save()
|
||||
|
||||
text = six.b('Bonjour, World!')
|
||||
testfile.the_file.replace(text, content_type=content_type, filename="hello")
|
||||
testfile.save()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEquals(len(list(files)), 1)
|
||||
self.assertEquals(len(list(chunks)), 1)
|
||||
|
||||
testfile.delete()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEquals(len(list(files)), 0)
|
||||
self.assertEquals(len(list(chunks)), 0)
|
||||
|
||||
def test_image_field(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
@@ -307,14 +370,14 @@ class FileTest(unittest.TestCase):
|
||||
TestImage.drop_collection()
|
||||
|
||||
with tempfile.TemporaryFile() as f:
|
||||
f.write(b("Hello World!"))
|
||||
f.write(six.b("Hello World!"))
|
||||
f.flush()
|
||||
|
||||
t = TestImage()
|
||||
try:
|
||||
t.image.put(f)
|
||||
self.fail("Should have raised an invalidation error")
|
||||
except ValidationError, e:
|
||||
except ValidationError as e:
|
||||
self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f)
|
||||
|
||||
t = TestImage()
|
||||
@@ -431,7 +494,7 @@ class FileTest(unittest.TestCase):
|
||||
# First instance
|
||||
test_file = TestFile()
|
||||
test_file.name = "Hello, World!"
|
||||
test_file.the_file.put(b('Hello, World!'),
|
||||
test_file.the_file.put(six.b('Hello, World!'),
|
||||
name="hello.txt")
|
||||
test_file.save()
|
||||
|
||||
@@ -439,16 +502,15 @@ class FileTest(unittest.TestCase):
|
||||
self.assertEqual(data.get('name'), 'hello.txt')
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
self.assertEqual(test_file.the_file.read(),
|
||||
b('Hello, World!'))
|
||||
self.assertEqual(test_file.the_file.read(), six.b('Hello, World!'))
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
test_file.the_file = b('HELLO, WORLD!')
|
||||
test_file.the_file = six.b('HELLO, WORLD!')
|
||||
test_file.save()
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
self.assertEqual(test_file.the_file.read(),
|
||||
b('HELLO, WORLD!'))
|
||||
six.b('HELLO, WORLD!'))
|
||||
|
||||
def test_copyable(self):
|
||||
class PutFile(Document):
|
||||
@@ -456,7 +518,7 @@ class FileTest(unittest.TestCase):
|
||||
|
||||
PutFile.drop_collection()
|
||||
|
||||
text = b('Hello, World!')
|
||||
text = six.b('Hello, World!')
|
||||
content_type = 'text/plain'
|
||||
|
||||
putfile = PutFile()
|
||||
|
@@ -1,7 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
@@ -115,7 +112,7 @@ class GeoFieldTest(unittest.TestCase):
|
||||
expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0])
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[1, 2], [3, 4], [5, 6], [1,2]]).validate()
|
||||
Location(loc=[[1, 2], [3, 4], [5, 6], [1, 2]]).validate()
|
||||
|
||||
def test_polygon_validation(self):
|
||||
class Location(Document):
|
||||
@@ -226,7 +223,7 @@ class GeoFieldTest(unittest.TestCase):
|
||||
expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0])
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[[1, 2], [3, 4], [5, 6], [1,2]]]).validate()
|
||||
Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate()
|
||||
|
||||
def test_multipolygon_validation(self):
|
||||
class Location(Document):
|
||||
@@ -336,12 +333,11 @@ class GeoFieldTest(unittest.TestCase):
|
||||
Location.drop_collection()
|
||||
Parent.drop_collection()
|
||||
|
||||
list(Parent.objects)
|
||||
|
||||
collection = Parent._get_collection()
|
||||
info = collection.index_information()
|
||||
|
||||
Parent(name='Berlin').save()
|
||||
info = Parent._get_collection().index_information()
|
||||
self.assertFalse('location_2d' in info)
|
||||
info = Location._get_collection().index_information()
|
||||
self.assertTrue('location_2d' in info)
|
||||
|
||||
self.assertEqual(len(Parent._geo_indices()), 0)
|
||||
self.assertEqual(len(Location._geo_indices()), 1)
|
||||
|
@@ -17,7 +17,16 @@ class PickleTest(Document):
|
||||
photo = FileField()
|
||||
|
||||
|
||||
class PickleDyanmicEmbedded(DynamicEmbeddedDocument):
|
||||
class NewDocumentPickleTest(Document):
|
||||
number = IntField()
|
||||
string = StringField(choices=(('One', '1'), ('Two', '2')))
|
||||
embedded = EmbeddedDocumentField(PickleEmbedded)
|
||||
lists = ListField(StringField())
|
||||
photo = FileField()
|
||||
new_field = StringField()
|
||||
|
||||
|
||||
class PickleDynamicEmbedded(DynamicEmbeddedDocument):
|
||||
date = DateTimeField(default=datetime.now)
|
||||
|
||||
|
||||
|
@@ -1,8 +0,0 @@
|
||||
from convert_to_new_inheritance_model import *
|
||||
from decimalfield_as_float import *
|
||||
from refrencefield_dbref_to_object_id import *
|
||||
from turn_off_inheritance import *
|
||||
from uuidfield_to_binary import *
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@@ -1,51 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
|
||||
from mongoengine import Document, connect
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.fields import StringField
|
||||
|
||||
__all__ = ('ConvertToNewInheritanceModel', )
|
||||
|
||||
|
||||
class ConvertToNewInheritanceModel(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
|
||||
def tearDown(self):
|
||||
for collection in self.db.collection_names():
|
||||
if 'system.' in collection:
|
||||
continue
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_how_to_convert_to_the_new_inheritance_model(self):
|
||||
"""Demonstrates migrating from 0.7 to 0.8
|
||||
"""
|
||||
|
||||
# 1. Declaration of the class
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'indexes': ['name']
|
||||
}
|
||||
|
||||
# 2. Remove _types
|
||||
collection = Animal._get_collection()
|
||||
collection.update({}, {"$unset": {"_types": 1}}, multi=True)
|
||||
|
||||
# 3. Confirm extra data is removed
|
||||
count = collection.find({'_types': {"$exists": True}}).count()
|
||||
self.assertEqual(0, count)
|
||||
|
||||
# 4. Remove indexes
|
||||
info = collection.index_information()
|
||||
indexes_to_drop = [key for key, value in info.iteritems()
|
||||
if '_types' in dict(value['key'])]
|
||||
for index in indexes_to_drop:
|
||||
collection.drop_index(index)
|
||||
|
||||
# 5. Recreate indexes
|
||||
Animal.ensure_indexes()
|
@@ -1,50 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
import decimal
|
||||
from decimal import Decimal
|
||||
|
||||
from mongoengine import Document, connect
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.fields import StringField, DecimalField, ListField
|
||||
|
||||
__all__ = ('ConvertDecimalField', )
|
||||
|
||||
|
||||
class ConvertDecimalField(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
|
||||
def test_how_to_convert_decimal_fields(self):
|
||||
"""Demonstrates migrating from 0.7 to 0.8
|
||||
"""
|
||||
|
||||
# 1. Old definition - using dbrefs
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
money = DecimalField(force_string=True)
|
||||
monies = ListField(DecimalField(force_string=True))
|
||||
|
||||
Person.drop_collection()
|
||||
Person(name="Wilson Jr", money=Decimal("2.50"),
|
||||
monies=[Decimal("2.10"), Decimal("5.00")]).save()
|
||||
|
||||
# 2. Start the migration by changing the schema
|
||||
# Change DecimalField - add precision and rounding settings
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
money = DecimalField(precision=2, rounding=decimal.ROUND_HALF_UP)
|
||||
monies = ListField(DecimalField(precision=2,
|
||||
rounding=decimal.ROUND_HALF_UP))
|
||||
|
||||
# 3. Loop all the objects and mark parent as changed
|
||||
for p in Person.objects:
|
||||
p._mark_as_changed('money')
|
||||
p._mark_as_changed('monies')
|
||||
p.save()
|
||||
|
||||
# 4. Confirmation of the fix!
|
||||
wilson = Person.objects(name="Wilson Jr").as_pymongo()[0]
|
||||
self.assertTrue(isinstance(wilson['money'], float))
|
||||
self.assertTrue(all([isinstance(m, float) for m in wilson['monies']]))
|
@@ -1,52 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
|
||||
from mongoengine import Document, connect
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.fields import StringField, ReferenceField, ListField
|
||||
|
||||
__all__ = ('ConvertToObjectIdsModel', )
|
||||
|
||||
|
||||
class ConvertToObjectIdsModel(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
|
||||
def test_how_to_convert_to_object_id_reference_fields(self):
|
||||
"""Demonstrates migrating from 0.7 to 0.8
|
||||
"""
|
||||
|
||||
# 1. Old definition - using dbrefs
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField('self', dbref=True)
|
||||
friends = ListField(ReferenceField('self', dbref=True))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="Wilson", parent=None).save()
|
||||
f1 = Person(name="John", parent=None).save()
|
||||
f2 = Person(name="Paul", parent=None).save()
|
||||
f3 = Person(name="George", parent=None).save()
|
||||
f4 = Person(name="Ringo", parent=None).save()
|
||||
Person(name="Wilson Jr", parent=p1, friends=[f1, f2, f3, f4]).save()
|
||||
|
||||
# 2. Start the migration by changing the schema
|
||||
# Change ReferenceField as now dbref defaults to False
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField('self')
|
||||
friends = ListField(ReferenceField('self'))
|
||||
|
||||
# 3. Loop all the objects and mark parent as changed
|
||||
for p in Person.objects:
|
||||
p._mark_as_changed('parent')
|
||||
p._mark_as_changed('friends')
|
||||
p.save()
|
||||
|
||||
# 4. Confirmation of the fix!
|
||||
wilson = Person.objects(name="Wilson Jr").as_pymongo()[0]
|
||||
self.assertEqual(p1.id, wilson['parent'])
|
||||
self.assertEqual([f1.id, f2.id, f3.id, f4.id], wilson['friends'])
|
@@ -1,62 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
|
||||
from mongoengine import Document, connect
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.fields import StringField
|
||||
|
||||
__all__ = ('TurnOffInheritanceTest', )
|
||||
|
||||
|
||||
class TurnOffInheritanceTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
|
||||
def tearDown(self):
|
||||
for collection in self.db.collection_names():
|
||||
if 'system.' in collection:
|
||||
continue
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_how_to_turn_off_inheritance(self):
|
||||
"""Demonstrates migrating from allow_inheritance = True to False.
|
||||
"""
|
||||
|
||||
# 1. Old declaration of the class
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'indexes': ['name']
|
||||
}
|
||||
|
||||
# 2. Turn off inheritance
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = {
|
||||
'allow_inheritance': False,
|
||||
'indexes': ['name']
|
||||
}
|
||||
|
||||
# 3. Remove _types and _cls
|
||||
collection = Animal._get_collection()
|
||||
collection.update({}, {"$unset": {"_types": 1, "_cls": 1}}, multi=True)
|
||||
|
||||
# 3. Confirm extra data is removed
|
||||
count = collection.find({"$or": [{'_types': {"$exists": True}},
|
||||
{'_cls': {"$exists": True}}]}).count()
|
||||
assert count == 0
|
||||
|
||||
# 4. Remove indexes
|
||||
info = collection.index_information()
|
||||
indexes_to_drop = [key for key, value in info.iteritems()
|
||||
if '_types' in dict(value['key'])
|
||||
or '_cls' in dict(value['key'])]
|
||||
for index in indexes_to_drop:
|
||||
collection.drop_index(index)
|
||||
|
||||
# 5. Recreate indexes
|
||||
Animal.ensure_indexes()
|
@@ -1,48 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
import uuid
|
||||
|
||||
from mongoengine import Document, connect
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.fields import StringField, UUIDField, ListField
|
||||
|
||||
__all__ = ('ConvertToBinaryUUID', )
|
||||
|
||||
|
||||
class ConvertToBinaryUUID(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
|
||||
def test_how_to_convert_to_binary_uuid_fields(self):
|
||||
"""Demonstrates migrating from 0.7 to 0.8
|
||||
"""
|
||||
|
||||
# 1. Old definition - using dbrefs
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
uuid = UUIDField(binary=False)
|
||||
uuids = ListField(UUIDField(binary=False))
|
||||
|
||||
Person.drop_collection()
|
||||
Person(name="Wilson Jr", uuid=uuid.uuid4(),
|
||||
uuids=[uuid.uuid4(), uuid.uuid4()]).save()
|
||||
|
||||
# 2. Start the migration by changing the schema
|
||||
# Change UUIDFIeld as now binary defaults to True
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
uuid = UUIDField()
|
||||
uuids = ListField(UUIDField())
|
||||
|
||||
# 3. Loop all the objects and mark parent as changed
|
||||
for p in Person.objects:
|
||||
p._mark_as_changed('uuid')
|
||||
p._mark_as_changed('uuids')
|
||||
p.save()
|
||||
|
||||
# 4. Confirmation of the fix!
|
||||
wilson = Person.objects(name="Wilson Jr").as_pymongo()[0]
|
||||
self.assertTrue(isinstance(wilson['uuid'], uuid.UUID))
|
||||
self.assertTrue(all([isinstance(u, uuid.UUID) for u in wilson['uuids']]))
|
@@ -1,6 +1,3 @@
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
@@ -95,7 +92,7 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
||||
exclude = ['d', 'e']
|
||||
only = ['b', 'c']
|
||||
|
||||
qs = MyDoc.objects.fields(**dict(((i, 1) for i in include)))
|
||||
qs = MyDoc.objects.fields(**{i: 1 for i in include})
|
||||
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||
{'a': 1, 'b': 1, 'c': 1, 'd': 1, 'e': 1})
|
||||
qs = qs.only(*only)
|
||||
@@ -103,14 +100,14 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
||||
qs = qs.exclude(*exclude)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||
|
||||
qs = MyDoc.objects.fields(**dict(((i, 1) for i in include)))
|
||||
qs = MyDoc.objects.fields(**{i: 1 for i in include})
|
||||
qs = qs.exclude(*exclude)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1})
|
||||
qs = qs.only(*only)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||
|
||||
qs = MyDoc.objects.exclude(*exclude)
|
||||
qs = qs.fields(**dict(((i, 1) for i in include)))
|
||||
qs = qs.fields(**{i: 1 for i in include})
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1})
|
||||
qs = qs.only(*only)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||
@@ -129,7 +126,7 @@ class OnlyExcludeAllTest(unittest.TestCase):
|
||||
exclude = ['d', 'e']
|
||||
only = ['b', 'c']
|
||||
|
||||
qs = MyDoc.objects.fields(**dict(((i, 1) for i in include)))
|
||||
qs = MyDoc.objects.fields(**{i: 1 for i in include})
|
||||
qs = qs.exclude(*exclude)
|
||||
qs = qs.only(*only)
|
||||
qs = qs.fields(slice__b=5)
|
||||
|
@@ -1,12 +1,12 @@
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
import unittest
|
||||
from datetime import datetime, timedelta
|
||||
from mongoengine import *
|
||||
import unittest
|
||||
|
||||
from pymongo.errors import OperationFailure
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_connection
|
||||
from nose.plugins.skip import SkipTest
|
||||
|
||||
|
||||
__all__ = ("GeoQueriesTest",)
|
||||
|
||||
|
||||
@@ -66,6 +66,16 @@ class GeoQueriesTest(unittest.TestCase):
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
|
||||
# find events at least 10 degrees away of san francisco
|
||||
point = [-122.415579, 37.7566023]
|
||||
events = Event.objects(location__near=point, location__min_distance=10)
|
||||
# The following real test passes on MongoDB 3 but minDistance seems
|
||||
# buggy on older MongoDB versions
|
||||
if get_connection().server_info()['versionArray'][0] > 2:
|
||||
self.assertEqual(events.count(), 2)
|
||||
else:
|
||||
self.assertTrue(events.count() >= 2)
|
||||
|
||||
# find events within 10 degrees of san francisco
|
||||
point_and_distance = [[-122.415579, 37.7566023], 10]
|
||||
events = Event.objects(location__within_distance=point_and_distance)
|
||||
@@ -141,7 +151,13 @@ class GeoQueriesTest(unittest.TestCase):
|
||||
def test_spherical_geospatial_operators(self):
|
||||
"""Ensure that spherical geospatial queries are working
|
||||
"""
|
||||
raise SkipTest("https://jira.mongodb.org/browse/SERVER-14039")
|
||||
# Needs MongoDB > 2.6.4 https://jira.mongodb.org/browse/SERVER-14039
|
||||
connection = get_connection()
|
||||
info = connection.test.command('buildInfo')
|
||||
mongodb_version = tuple([int(i) for i in info['version'].split('.')])
|
||||
if mongodb_version < (2, 6, 4):
|
||||
raise SkipTest("Need MongoDB version 2.6.4+")
|
||||
|
||||
class Point(Document):
|
||||
location = GeoPointField()
|
||||
|
||||
@@ -161,7 +177,7 @@ class GeoQueriesTest(unittest.TestCase):
|
||||
|
||||
# Same behavior for _within_spherical_distance
|
||||
points = Point.objects(
|
||||
location__within_spherical_distance=[[-122, 37.5], 60/earth_radius]
|
||||
location__within_spherical_distance=[[-122, 37.5], 60 / earth_radius]
|
||||
)
|
||||
self.assertEqual(points.count(), 2)
|
||||
|
||||
@@ -169,6 +185,24 @@ class GeoQueriesTest(unittest.TestCase):
|
||||
location__max_distance=60 / earth_radius)
|
||||
self.assertEqual(points.count(), 2)
|
||||
|
||||
# Test query works with max_distance, being farer from one point
|
||||
points = Point.objects(location__near_sphere=[-122, 37.8],
|
||||
location__max_distance=60 / earth_radius)
|
||||
close_point = points.first()
|
||||
self.assertEqual(points.count(), 1)
|
||||
|
||||
# Test query works with min_distance, being farer from one point
|
||||
points = Point.objects(location__near_sphere=[-122, 37.8],
|
||||
location__min_distance=60 / earth_radius)
|
||||
# The following real test passes on MongoDB 3 but minDistance seems
|
||||
# buggy on older MongoDB versions
|
||||
if get_connection().server_info()['versionArray'][0] > 2:
|
||||
self.assertEqual(points.count(), 1)
|
||||
far_point = points.first()
|
||||
self.assertNotEqual(close_point, far_point)
|
||||
else:
|
||||
self.assertTrue(points.count() >= 1)
|
||||
|
||||
# Finds both points, but orders the north point first because it's
|
||||
# closer to the reference point to the north.
|
||||
points = Point.objects(location__near_sphere=[-122, 38.5])
|
||||
@@ -251,6 +285,20 @@ class GeoQueriesTest(unittest.TestCase):
|
||||
self.assertEqual(events.count(), 2)
|
||||
self.assertEqual(events[0], event3)
|
||||
|
||||
# ensure min_distance and max_distance combine well
|
||||
events = Event.objects(location__near=[-87.67892, 41.9120459],
|
||||
location__min_distance=1000,
|
||||
location__max_distance=10000).order_by("-date")
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event3)
|
||||
|
||||
# ensure ordering is respected by "near"
|
||||
events = Event.objects(location__near=[-87.67892, 41.9120459],
|
||||
# location__min_distance=10000
|
||||
location__min_distance=10000).order_by("-date")
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
|
||||
# check that within_box works
|
||||
box = [(-125.0, 35.0), (-100.0, 40.0)]
|
||||
events = Event.objects(location__geo_within_box=box)
|
||||
|
@@ -1,6 +1,3 @@
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
import unittest
|
||||
|
||||
from mongoengine import connect, Document, IntField
|
||||
|
78
tests/queryset/pickable.py
Normal file
78
tests/queryset/pickable.py
Normal file
@@ -0,0 +1,78 @@
|
||||
import pickle
|
||||
import unittest
|
||||
from pymongo.mongo_client import MongoClient
|
||||
from mongoengine import Document, StringField, IntField
|
||||
from mongoengine.connection import connect
|
||||
|
||||
__author__ = 'stas'
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
class TestQuerysetPickable(unittest.TestCase):
|
||||
"""
|
||||
Test for adding pickling support for QuerySet instances
|
||||
See issue https://github.com/MongoEngine/mongoengine/issues/442
|
||||
"""
|
||||
def setUp(self):
|
||||
super(TestQuerysetPickable, self).setUp()
|
||||
|
||||
connection = connect(db="test") #type: pymongo.mongo_client.MongoClient
|
||||
|
||||
connection.drop_database("test")
|
||||
|
||||
self.john = Person.objects.create(
|
||||
name="John",
|
||||
age=21
|
||||
)
|
||||
|
||||
|
||||
def test_picke_simple_qs(self):
|
||||
|
||||
qs = Person.objects.all()
|
||||
|
||||
pickle.dumps(qs)
|
||||
|
||||
def _get_loaded(self, qs):
|
||||
s = pickle.dumps(qs)
|
||||
|
||||
return pickle.loads(s)
|
||||
|
||||
def test_unpickle(self):
|
||||
qs = Person.objects.all()
|
||||
|
||||
loadedQs = self._get_loaded(qs)
|
||||
|
||||
self.assertEqual(qs.count(), loadedQs.count())
|
||||
|
||||
#can update loadedQs
|
||||
loadedQs.update(age=23)
|
||||
|
||||
#check
|
||||
self.assertEqual(Person.objects.first().age, 23)
|
||||
|
||||
def test_pickle_support_filtration(self):
|
||||
Person.objects.create(
|
||||
name="Alice",
|
||||
age=22
|
||||
)
|
||||
|
||||
Person.objects.create(
|
||||
name="Bob",
|
||||
age=23
|
||||
)
|
||||
|
||||
qs = Person.objects.filter(age__gte=22)
|
||||
self.assertEqual(qs.count(), 2)
|
||||
|
||||
loaded = self._get_loaded(qs)
|
||||
|
||||
self.assertEqual(loaded.count(), 2)
|
||||
self.assertEqual(loaded.filter(name="Bob").first().age, 23)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,7 @@
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.queryset import Q
|
||||
from mongoengine.queryset import transform
|
||||
from mongoengine.queryset import Q, transform
|
||||
|
||||
__all__ = ("TransformTest",)
|
||||
|
||||
@@ -41,8 +37,8 @@ class TransformTest(unittest.TestCase):
|
||||
DicDoc.drop_collection()
|
||||
Doc.drop_collection()
|
||||
|
||||
DicDoc().save()
|
||||
doc = Doc().save()
|
||||
dic_doc = DicDoc().save()
|
||||
|
||||
for k, v in (("set", "$set"), ("set_on_insert", "$setOnInsert"), ("push", "$push")):
|
||||
update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc})
|
||||
@@ -55,7 +51,6 @@ class TransformTest(unittest.TestCase):
|
||||
update = transform.update(DicDoc, pull__dictField__test=doc)
|
||||
self.assertTrue(isinstance(update["$pull"]["dictField"]["test"], dict))
|
||||
|
||||
|
||||
def test_query_field_name(self):
|
||||
"""Ensure that the correct field name is used when querying.
|
||||
"""
|
||||
@@ -156,26 +151,33 @@ class TransformTest(unittest.TestCase):
|
||||
class Doc(Document):
|
||||
meta = {'allow_inheritance': False}
|
||||
|
||||
raw_query = Doc.objects(__raw__={'deleted': False,
|
||||
'scraped': 'yes',
|
||||
'$nor': [{'views.extracted': 'no'},
|
||||
{'attachments.views.extracted':'no'}]
|
||||
})._query
|
||||
raw_query = Doc.objects(__raw__={
|
||||
'deleted': False,
|
||||
'scraped': 'yes',
|
||||
'$nor': [
|
||||
{'views.extracted': 'no'},
|
||||
{'attachments.views.extracted': 'no'}
|
||||
]
|
||||
})._query
|
||||
|
||||
expected = {'deleted': False, 'scraped': 'yes',
|
||||
'$nor': [{'views.extracted': 'no'},
|
||||
{'attachments.views.extracted': 'no'}]}
|
||||
self.assertEqual(expected, raw_query)
|
||||
self.assertEqual(raw_query, {
|
||||
'deleted': False,
|
||||
'scraped': 'yes',
|
||||
'$nor': [
|
||||
{'views.extracted': 'no'},
|
||||
{'attachments.views.extracted': 'no'}
|
||||
]
|
||||
})
|
||||
|
||||
def test_geojson_PointField(self):
|
||||
class Location(Document):
|
||||
loc = PointField()
|
||||
|
||||
update = transform.update(Location, set__loc=[1, 2])
|
||||
self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1,2]}}})
|
||||
self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}})
|
||||
|
||||
update = transform.update(Location, set__loc={"type": "Point", "coordinates": [1,2]})
|
||||
self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1,2]}}})
|
||||
update = transform.update(Location, set__loc={"type": "Point", "coordinates": [1, 2]})
|
||||
self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}})
|
||||
|
||||
def test_geojson_LineStringField(self):
|
||||
class Location(Document):
|
||||
@@ -208,6 +210,37 @@ class TransformTest(unittest.TestCase):
|
||||
self.assertEqual(Doc.objects(df__type=2).count(), 1) # str
|
||||
self.assertEqual(Doc.objects(df__type=16).count(), 1) # int
|
||||
|
||||
def test_last_field_name_like_operator(self):
|
||||
class EmbeddedItem(EmbeddedDocument):
|
||||
type = StringField()
|
||||
name = StringField()
|
||||
|
||||
class Doc(Document):
|
||||
item = EmbeddedDocumentField(EmbeddedItem)
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
doc = Doc(item=EmbeddedItem(type="axe", name="Heroic axe"))
|
||||
doc.save()
|
||||
|
||||
self.assertEqual(1, Doc.objects(item__type__="axe").count())
|
||||
self.assertEqual(1, Doc.objects(item__name__="Heroic axe").count())
|
||||
|
||||
Doc.objects(id=doc.id).update(set__item__type__='sword')
|
||||
self.assertEqual(1, Doc.objects(item__type__="sword").count())
|
||||
self.assertEqual(0, Doc.objects(item__type__="axe").count())
|
||||
|
||||
def test_understandable_error_raised(self):
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
location = GeoPointField()
|
||||
|
||||
box = [(35.0, -125.0), (40.0, -100.0)]
|
||||
# I *meant* to execute location__within_box=box
|
||||
events = Event.objects(location__within=box)
|
||||
with self.assertRaises(InvalidQueryError):
|
||||
events.count()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -1,14 +1,12 @@
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
import datetime
|
||||
import re
|
||||
import unittest
|
||||
|
||||
from bson import ObjectId
|
||||
from datetime import datetime
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.queryset import Q
|
||||
from mongoengine.errors import InvalidQueryError
|
||||
from mongoengine.queryset import Q
|
||||
|
||||
__all__ = ("QTest",)
|
||||
|
||||
@@ -132,12 +130,12 @@ class QTest(unittest.TestCase):
|
||||
TestDoc(x=10).save()
|
||||
TestDoc(y=True).save()
|
||||
|
||||
self.assertEqual(query,
|
||||
{'$and': [
|
||||
{'$or': [{'x': {'$gt': 0}}, {'x': {'$exists': False}}]},
|
||||
{'$or': [{'x': {'$lt': 100}}, {'y': True}]}
|
||||
]})
|
||||
|
||||
self.assertEqual(query, {
|
||||
'$and': [
|
||||
{'$or': [{'x': {'$gt': 0}}, {'x': {'$exists': False}}]},
|
||||
{'$or': [{'x': {'$lt': 100}}, {'y': True}]}
|
||||
]
|
||||
})
|
||||
self.assertEqual(2, TestDoc.objects(q1 & q2).count())
|
||||
|
||||
def test_or_and_or_combination(self):
|
||||
@@ -157,15 +155,14 @@ class QTest(unittest.TestCase):
|
||||
q2 = (Q(x__lt=100) & (Q(y=False) | Q(y__exists=False)))
|
||||
query = (q1 | q2).to_query(TestDoc)
|
||||
|
||||
self.assertEqual(query,
|
||||
{'$or': [
|
||||
self.assertEqual(query, {
|
||||
'$or': [
|
||||
{'$and': [{'x': {'$gt': 0}},
|
||||
{'$or': [{'y': True}, {'y': {'$exists': False}}]}]},
|
||||
{'$and': [{'x': {'$lt': 100}},
|
||||
{'$or': [{'y': False}, {'y': {'$exists': False}}]}]}
|
||||
]}
|
||||
)
|
||||
|
||||
]
|
||||
})
|
||||
self.assertEqual(2, TestDoc.objects(q1 | q2).count())
|
||||
|
||||
def test_multiple_occurence_in_field(self):
|
||||
@@ -188,7 +185,7 @@ class QTest(unittest.TestCase):
|
||||
x = IntField()
|
||||
|
||||
TestDoc.drop_collection()
|
||||
for i in xrange(1, 101):
|
||||
for i in range(1, 101):
|
||||
t = TestDoc(x=i)
|
||||
t.save()
|
||||
|
||||
@@ -215,19 +212,19 @@ class QTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post1 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 8), published=False)
|
||||
post1 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 8), published=False)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title='Test 2', publish_date=datetime(2010, 1, 15), published=True)
|
||||
post2 = BlogPost(title='Test 2', publish_date=datetime.datetime(2010, 1, 15), published=True)
|
||||
post2.save()
|
||||
|
||||
post3 = BlogPost(title='Test 3', published=True)
|
||||
post3.save()
|
||||
|
||||
post4 = BlogPost(title='Test 4', publish_date=datetime(2010, 1, 8))
|
||||
post4 = BlogPost(title='Test 4', publish_date=datetime.datetime(2010, 1, 8))
|
||||
post4.save()
|
||||
|
||||
post5 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 15))
|
||||
post5 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 15))
|
||||
post5.save()
|
||||
|
||||
post6 = BlogPost(title='Test 1', published=False)
|
||||
@@ -250,7 +247,7 @@ class QTest(unittest.TestCase):
|
||||
self.assertTrue(all(obj.id in posts for obj in published_posts))
|
||||
|
||||
# Check Q object combination
|
||||
date = datetime(2010, 1, 10)
|
||||
date = datetime.datetime(2010, 1, 10)
|
||||
q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True))
|
||||
posts = [post.id for post in q]
|
||||
|
||||
@@ -271,12 +268,13 @@ class QTest(unittest.TestCase):
|
||||
self.assertEqual(self.Person.objects(Q(age__in=[20, 30])).count(), 3)
|
||||
|
||||
# Test invalid query objs
|
||||
def wrong_query_objs():
|
||||
with self.assertRaises(InvalidQueryError):
|
||||
self.Person.objects('user1')
|
||||
def wrong_query_objs_filter():
|
||||
self.Person.objects('user1')
|
||||
self.assertRaises(InvalidQueryError, wrong_query_objs)
|
||||
self.assertRaises(InvalidQueryError, wrong_query_objs_filter)
|
||||
|
||||
# filter should fail, too
|
||||
with self.assertRaises(InvalidQueryError):
|
||||
self.Person.objects.filter('user1')
|
||||
|
||||
|
||||
def test_q_regex(self):
|
||||
"""Ensure that Q objects can be queried using regexes.
|
||||
@@ -284,7 +282,6 @@ class QTest(unittest.TestCase):
|
||||
person = self.Person(name='Guido van Rossum')
|
||||
person.save()
|
||||
|
||||
import re
|
||||
obj = self.Person.objects(Q(name=re.compile('^Gui'))).first()
|
||||
self.assertEqual(obj, person)
|
||||
obj = self.Person.objects(Q(name=re.compile('^gui'))).first()
|
||||
|
@@ -1,19 +1,30 @@
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
import datetime
|
||||
from pymongo.errors import OperationFailure
|
||||
|
||||
try:
|
||||
import unittest2 as unittest
|
||||
except ImportError:
|
||||
import unittest
|
||||
|
||||
import datetime
|
||||
from nose.plugins.skip import SkipTest
|
||||
|
||||
import pymongo
|
||||
from bson.tz_util import utc
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine import (
|
||||
connect, register_connection,
|
||||
Document, DateTimeField
|
||||
)
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
import mongoengine.connection
|
||||
from mongoengine.connection import get_db, get_connection, ConnectionError
|
||||
from mongoengine.connection import (MongoEngineConnectionError, get_db,
|
||||
get_connection)
|
||||
|
||||
|
||||
def get_tz_awareness(connection):
|
||||
if not IS_PYMONGO_3:
|
||||
return connection.tz_aware
|
||||
else:
|
||||
return connection.codec_options.tz_aware
|
||||
|
||||
|
||||
class ConnectionTest(unittest.TestCase):
|
||||
@@ -39,15 +50,99 @@ class ConnectionTest(unittest.TestCase):
|
||||
conn = get_connection('testdb')
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
|
||||
def test_connect_in_mocking(self):
|
||||
"""Ensure that the connect() method works properly in mocking.
|
||||
"""
|
||||
try:
|
||||
import mongomock
|
||||
except ImportError:
|
||||
raise SkipTest('you need mongomock installed to run this testcase')
|
||||
|
||||
connect('mongoenginetest', host='mongomock://localhost')
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
|
||||
connect('mongoenginetest2', host='mongomock://localhost', alias='testdb2')
|
||||
conn = get_connection('testdb2')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
|
||||
connect('mongoenginetest3', host='mongodb://localhost', is_mock=True, alias='testdb3')
|
||||
conn = get_connection('testdb3')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
|
||||
connect('mongoenginetest4', is_mock=True, alias='testdb4')
|
||||
conn = get_connection('testdb4')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
|
||||
connect(host='mongodb://localhost:27017/mongoenginetest5', is_mock=True, alias='testdb5')
|
||||
conn = get_connection('testdb5')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
|
||||
connect(host='mongomock://localhost:27017/mongoenginetest6', alias='testdb6')
|
||||
conn = get_connection('testdb6')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
|
||||
connect(host='mongomock://localhost:27017/mongoenginetest7', is_mock=True, alias='testdb7')
|
||||
conn = get_connection('testdb7')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
|
||||
def test_connect_with_host_list(self):
|
||||
"""Ensure that the connect() method works when host is a list
|
||||
|
||||
Uses mongomock to test w/o needing multiple mongod/mongos processes
|
||||
"""
|
||||
try:
|
||||
import mongomock
|
||||
except ImportError:
|
||||
raise SkipTest('you need mongomock installed to run this testcase')
|
||||
|
||||
connect(host=['mongomock://localhost'])
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
|
||||
connect(host=['mongodb://localhost'], is_mock=True, alias='testdb2')
|
||||
conn = get_connection('testdb2')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
|
||||
connect(host=['localhost'], is_mock=True, alias='testdb3')
|
||||
conn = get_connection('testdb3')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
|
||||
connect(host=['mongomock://localhost:27017', 'mongomock://localhost:27018'], alias='testdb4')
|
||||
conn = get_connection('testdb4')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
|
||||
connect(host=['mongodb://localhost:27017', 'mongodb://localhost:27018'], is_mock=True, alias='testdb5')
|
||||
conn = get_connection('testdb5')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
|
||||
connect(host=['localhost:27017', 'localhost:27018'], is_mock=True, alias='testdb6')
|
||||
conn = get_connection('testdb6')
|
||||
self.assertTrue(isinstance(conn, mongomock.MongoClient))
|
||||
|
||||
def test_disconnect(self):
|
||||
"""Ensure that the disconnect() method works properly
|
||||
"""
|
||||
conn1 = connect('mongoenginetest')
|
||||
mongoengine.connection.disconnect()
|
||||
conn2 = connect('mongoenginetest')
|
||||
self.assertTrue(conn1 is not conn2)
|
||||
|
||||
def test_sharing_connections(self):
|
||||
"""Ensure that connections are shared when the connection settings are exactly the same
|
||||
"""
|
||||
connect('mongoenginetest', alias='testdb1')
|
||||
|
||||
connect('mongoenginetests', alias='testdb1')
|
||||
expected_connection = get_connection('testdb1')
|
||||
|
||||
connect('mongoenginetest', alias='testdb2')
|
||||
connect('mongoenginetests', alias='testdb2')
|
||||
actual_connection = get_connection('testdb2')
|
||||
|
||||
# Handle PyMongo 3+ Async Connection
|
||||
if IS_PYMONGO_3:
|
||||
# Ensure we are connected, throws ServerSelectionTimeoutError otherwise.
|
||||
# Purposely not catching exception to fail test if thrown.
|
||||
expected_connection.server_info()
|
||||
|
||||
self.assertEqual(expected_connection, actual_connection)
|
||||
|
||||
def test_connect_uri(self):
|
||||
@@ -61,7 +156,11 @@ class ConnectionTest(unittest.TestCase):
|
||||
c.admin.authenticate("admin", "password")
|
||||
c.mongoenginetest.add_user("username", "password")
|
||||
|
||||
self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost')
|
||||
if not IS_PYMONGO_3:
|
||||
self.assertRaises(
|
||||
MongoEngineConnectionError, connect, 'testdb_uri_bad',
|
||||
host='mongodb://test:password@localhost'
|
||||
)
|
||||
|
||||
connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest')
|
||||
|
||||
@@ -76,19 +175,9 @@ class ConnectionTest(unittest.TestCase):
|
||||
c.mongoenginetest.system.users.remove({})
|
||||
|
||||
def test_connect_uri_without_db(self):
|
||||
"""Ensure that the connect() method works properly with uri's
|
||||
without database_name
|
||||
"""Ensure connect() method works properly if the URI doesn't
|
||||
include a database name.
|
||||
"""
|
||||
c = connect(db='mongoenginetest', alias='admin')
|
||||
c.admin.system.users.remove({})
|
||||
c.mongoenginetest.system.users.remove({})
|
||||
|
||||
c.admin.add_user("admin", "password")
|
||||
c.admin.authenticate("admin", "password")
|
||||
c.mongoenginetest.add_user("username", "password")
|
||||
|
||||
self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost')
|
||||
|
||||
connect("mongoenginetest", host='mongodb://localhost/')
|
||||
|
||||
conn = get_connection()
|
||||
@@ -98,15 +187,75 @@ class ConnectionTest(unittest.TestCase):
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
|
||||
def test_connect_uri_default_db(self):
|
||||
"""Ensure connect() defaults to the right database name if
|
||||
the URI and the database_name don't explicitly specify it.
|
||||
"""
|
||||
connect(host='mongodb://localhost/')
|
||||
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertEqual(db.name, 'test')
|
||||
|
||||
def test_uri_without_credentials_doesnt_override_conn_settings(self):
|
||||
"""Ensure connect() uses the username & password params if the URI
|
||||
doesn't explicitly specify them.
|
||||
"""
|
||||
c = connect(host='mongodb://localhost/mongoenginetest',
|
||||
username='user',
|
||||
password='pass')
|
||||
|
||||
# OperationFailure means that mongoengine attempted authentication
|
||||
# w/ the provided username/password and failed - that's the desired
|
||||
# behavior. If the MongoDB URI would override the credentials
|
||||
self.assertRaises(OperationFailure, get_db)
|
||||
|
||||
def test_connect_uri_with_authsource(self):
|
||||
"""Ensure that the connect() method works well with
|
||||
the option `authSource` in URI.
|
||||
This feature was introduced in MongoDB 2.4 and removed in 2.6
|
||||
"""
|
||||
# Create users
|
||||
c = connect('mongoenginetest')
|
||||
c.admin.system.users.remove({})
|
||||
c.admin.add_user('username2', 'password')
|
||||
|
||||
# Authentication fails without "authSource"
|
||||
if IS_PYMONGO_3:
|
||||
test_conn = connect('mongoenginetest', alias='test1',
|
||||
host='mongodb://username2:password@localhost/mongoenginetest')
|
||||
self.assertRaises(OperationFailure, test_conn.server_info)
|
||||
else:
|
||||
self.assertRaises(
|
||||
MongoEngineConnectionError, connect, 'mongoenginetest',
|
||||
alias='test1',
|
||||
host='mongodb://username2:password@localhost/mongoenginetest'
|
||||
)
|
||||
self.assertRaises(MongoEngineConnectionError, get_db, 'test1')
|
||||
|
||||
# Authentication succeeds with "authSource"
|
||||
connect(
|
||||
'mongoenginetest', alias='test2',
|
||||
host=('mongodb://username2:password@localhost/'
|
||||
'mongoenginetest?authSource=admin')
|
||||
)
|
||||
# This will fail starting from MongoDB 2.6+
|
||||
db = get_db('test2')
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
|
||||
# Clear all users
|
||||
c.admin.system.users.remove({})
|
||||
c.mongoenginetest.system.users.remove({})
|
||||
|
||||
def test_register_connection(self):
|
||||
"""Ensure that connections with different aliases may be registered.
|
||||
"""
|
||||
register_connection('testdb', 'mongoenginetest2')
|
||||
|
||||
self.assertRaises(ConnectionError, get_connection)
|
||||
self.assertRaises(MongoEngineConnectionError, get_connection)
|
||||
conn = get_connection('testdb')
|
||||
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
|
||||
|
||||
@@ -128,11 +277,11 @@ class ConnectionTest(unittest.TestCase):
|
||||
connect('mongoenginetest', alias='t1', tz_aware=True)
|
||||
conn = get_connection('t1')
|
||||
|
||||
self.assertTrue(conn.tz_aware)
|
||||
self.assertTrue(get_tz_awareness(conn))
|
||||
|
||||
connect('mongoenginetest2', alias='t2')
|
||||
conn = get_connection('t2')
|
||||
self.assertFalse(conn.tz_aware)
|
||||
self.assertFalse(get_tz_awareness(conn))
|
||||
|
||||
def test_datetime(self):
|
||||
connect('mongoenginetest', tz_aware=True)
|
||||
@@ -156,8 +305,17 @@ class ConnectionTest(unittest.TestCase):
|
||||
self.assertEqual(len(mongo_connections.items()), 2)
|
||||
self.assertTrue('t1' in mongo_connections.keys())
|
||||
self.assertTrue('t2' in mongo_connections.keys())
|
||||
self.assertEqual(mongo_connections['t1'].host, 'localhost')
|
||||
self.assertEqual(mongo_connections['t2'].host, '127.0.0.1')
|
||||
if not IS_PYMONGO_3:
|
||||
self.assertEqual(mongo_connections['t1'].host, 'localhost')
|
||||
self.assertEqual(mongo_connections['t2'].host, '127.0.0.1')
|
||||
else:
|
||||
# Handle PyMongo 3+ Async Connection
|
||||
# Ensure we are connected, throws ServerSelectionTimeoutError otherwise.
|
||||
# Purposely not catching exception to fail test if thrown.
|
||||
mongo_connections['t1'].server_info()
|
||||
mongo_connections['t2'].server_info()
|
||||
self.assertEqual(mongo_connections['t1'].address[0], 'localhost')
|
||||
self.assertEqual(mongo_connections['t2'].address[0], '127.0.0.1')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@@ -1,5 +1,3 @@
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
@@ -79,7 +77,7 @@ class ContextManagersTest(unittest.TestCase):
|
||||
User.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
for i in xrange(1, 51):
|
||||
for i in range(1, 51):
|
||||
User(name='user %s' % i).save()
|
||||
|
||||
user = User.objects.first()
|
||||
@@ -117,7 +115,7 @@ class ContextManagersTest(unittest.TestCase):
|
||||
User.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
for i in xrange(1, 51):
|
||||
for i in range(1, 51):
|
||||
User(name='user %s' % i).save()
|
||||
|
||||
user = User.objects.first()
|
||||
@@ -195,7 +193,7 @@ class ContextManagersTest(unittest.TestCase):
|
||||
with query_counter() as q:
|
||||
self.assertEqual(0, q)
|
||||
|
||||
for i in xrange(1, 51):
|
||||
for i in range(1, 51):
|
||||
db.test.find({}).count()
|
||||
|
||||
self.assertEqual(50, q)
|
||||
|
@@ -1,17 +1,30 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine.base.datastructures import StrictDict, SemiStrictDict
|
||||
|
||||
|
||||
class TestStrictDict(unittest.TestCase):
|
||||
def strict_dict_class(self, *args, **kwargs):
|
||||
return StrictDict.create(*args, **kwargs)
|
||||
|
||||
def setUp(self):
|
||||
self.dtype = self.strict_dict_class(("a", "b", "c"))
|
||||
|
||||
def test_init(self):
|
||||
d = self.dtype(a=1, b=1, c=1)
|
||||
self.assertEqual((d.a, d.b, d.c), (1, 1, 1))
|
||||
|
||||
def test_repr(self):
|
||||
d = self.dtype(a=1, b=2, c=3)
|
||||
self.assertEqual(repr(d), '{"a": 1, "b": 2, "c": 3}')
|
||||
|
||||
# make sure quotes are escaped properly
|
||||
d = self.dtype(a='"', b="'", c="")
|
||||
self.assertEqual(repr(d), '{"a": \'"\', "b": "\'", "c": \'\'}')
|
||||
|
||||
def test_init_fails_on_nonexisting_attrs(self):
|
||||
self.assertRaises(AttributeError, lambda: self.dtype(a=1, b=2, d=3))
|
||||
with self.assertRaises(AttributeError):
|
||||
self.dtype(a=1, b=2, d=3)
|
||||
|
||||
def test_eq(self):
|
||||
d = self.dtype(a=1, b=1, c=1)
|
||||
@@ -34,13 +47,12 @@ class TestStrictDict(unittest.TestCase):
|
||||
d = self.dtype()
|
||||
d.a = 1
|
||||
self.assertEqual(d.a, 1)
|
||||
self.assertRaises(AttributeError, lambda: d.b)
|
||||
self.assertRaises(AttributeError, getattr, d, 'b')
|
||||
|
||||
def test_setattr_raises_on_nonexisting_attr(self):
|
||||
d = self.dtype()
|
||||
def _f():
|
||||
d.x=1
|
||||
self.assertRaises(AttributeError, _f)
|
||||
with self.assertRaises(AttributeError):
|
||||
d.x = 1
|
||||
|
||||
def test_setattr_getattr_special(self):
|
||||
d = self.strict_dict_class(["items"])
|
||||
|
@@ -1,6 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
import unittest
|
||||
|
||||
from bson import DBRef, ObjectId
|
||||
@@ -12,9 +10,13 @@ from mongoengine.context_managers import query_counter
|
||||
|
||||
class FieldTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.db = connect(db='mongoenginetest')
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
cls.db.drop_database('mongoenginetest')
|
||||
|
||||
def test_list_item_dereference(self):
|
||||
"""Ensure that DBRef items in ListFields are dereferenced.
|
||||
@@ -28,7 +30,7 @@ class FieldTest(unittest.TestCase):
|
||||
User.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
for i in xrange(1, 51):
|
||||
for i in range(1, 51):
|
||||
user = User(name='user %s' % i)
|
||||
user.save()
|
||||
|
||||
@@ -86,7 +88,7 @@ class FieldTest(unittest.TestCase):
|
||||
User.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
for i in xrange(1, 51):
|
||||
for i in range(1, 51):
|
||||
user = User(name='user %s' % i)
|
||||
user.save()
|
||||
|
||||
@@ -158,7 +160,7 @@ class FieldTest(unittest.TestCase):
|
||||
User.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
for i in xrange(1, 26):
|
||||
for i in range(1, 26):
|
||||
user = User(name='user %s' % i)
|
||||
user.save()
|
||||
|
||||
@@ -304,6 +306,7 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
User.drop_collection()
|
||||
Post.drop_collection()
|
||||
SimpleList.drop_collection()
|
||||
|
||||
u1 = User.objects.create(name='u1')
|
||||
u2 = User.objects.create(name='u2')
|
||||
@@ -435,7 +438,7 @@ class FieldTest(unittest.TestCase):
|
||||
Group.drop_collection()
|
||||
|
||||
members = []
|
||||
for i in xrange(1, 51):
|
||||
for i in range(1, 51):
|
||||
a = UserA(name='User A %s' % i)
|
||||
a.save()
|
||||
|
||||
@@ -526,7 +529,7 @@ class FieldTest(unittest.TestCase):
|
||||
Group.drop_collection()
|
||||
|
||||
members = []
|
||||
for i in xrange(1, 51):
|
||||
for i in range(1, 51):
|
||||
a = UserA(name='User A %s' % i)
|
||||
a.save()
|
||||
|
||||
@@ -609,15 +612,15 @@ class FieldTest(unittest.TestCase):
|
||||
Group.drop_collection()
|
||||
|
||||
members = []
|
||||
for i in xrange(1, 51):
|
||||
for i in range(1, 51):
|
||||
user = User(name='user %s' % i)
|
||||
user.save()
|
||||
members.append(user)
|
||||
|
||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||
group = Group(members={str(u.id): u for u in members})
|
||||
group.save()
|
||||
|
||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||
group = Group(members={str(u.id): u for u in members})
|
||||
group.save()
|
||||
|
||||
with query_counter() as q:
|
||||
@@ -682,7 +685,7 @@ class FieldTest(unittest.TestCase):
|
||||
Group.drop_collection()
|
||||
|
||||
members = []
|
||||
for i in xrange(1, 51):
|
||||
for i in range(1, 51):
|
||||
a = UserA(name='User A %s' % i)
|
||||
a.save()
|
||||
|
||||
@@ -694,9 +697,9 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
members += [a, b, c]
|
||||
|
||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||
group = Group(members={str(u.id): u for u in members})
|
||||
group.save()
|
||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||
group = Group(members={str(u.id): u for u in members})
|
||||
group.save()
|
||||
|
||||
with query_counter() as q:
|
||||
@@ -778,16 +781,16 @@ class FieldTest(unittest.TestCase):
|
||||
Group.drop_collection()
|
||||
|
||||
members = []
|
||||
for i in xrange(1, 51):
|
||||
for i in range(1, 51):
|
||||
a = UserA(name='User A %s' % i)
|
||||
a.save()
|
||||
|
||||
members += [a]
|
||||
|
||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||
group = Group(members={str(u.id): u for u in members})
|
||||
group.save()
|
||||
|
||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||
group = Group(members={str(u.id): u for u in members})
|
||||
group.save()
|
||||
|
||||
with query_counter() as q:
|
||||
@@ -861,7 +864,7 @@ class FieldTest(unittest.TestCase):
|
||||
Group.drop_collection()
|
||||
|
||||
members = []
|
||||
for i in xrange(1, 51):
|
||||
for i in range(1, 51):
|
||||
a = UserA(name='User A %s' % i)
|
||||
a.save()
|
||||
|
||||
@@ -873,9 +876,9 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
members += [a, b, c]
|
||||
|
||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||
group = Group(members={str(u.id): u for u in members})
|
||||
group.save()
|
||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
||||
group = Group(members={str(u.id): u for u in members})
|
||||
group.save()
|
||||
|
||||
with query_counter() as q:
|
||||
@@ -1026,6 +1029,43 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(type(foo.bar), Bar)
|
||||
self.assertEqual(type(foo.baz), Baz)
|
||||
|
||||
|
||||
def test_document_reload_reference_integrity(self):
|
||||
"""
|
||||
Ensure reloading a document with multiple similar id
|
||||
in different collections doesn't mix them.
|
||||
"""
|
||||
class Topic(Document):
|
||||
id = IntField(primary_key=True)
|
||||
class User(Document):
|
||||
id = IntField(primary_key=True)
|
||||
name = StringField()
|
||||
class Message(Document):
|
||||
id = IntField(primary_key=True)
|
||||
topic = ReferenceField(Topic)
|
||||
author = ReferenceField(User)
|
||||
|
||||
Topic.drop_collection()
|
||||
User.drop_collection()
|
||||
Message.drop_collection()
|
||||
|
||||
# All objects share the same id, but each in a different collection
|
||||
topic = Topic(id=1).save()
|
||||
user = User(id=1, name='user-name').save()
|
||||
Message(id=1, topic=topic, author=user).save()
|
||||
|
||||
concurrent_change_user = User.objects.get(id=1)
|
||||
concurrent_change_user.name = 'new-name'
|
||||
concurrent_change_user.save()
|
||||
self.assertNotEqual(user.name, 'new-name')
|
||||
|
||||
msg = Message.objects.get(id=1)
|
||||
msg.reload()
|
||||
self.assertEqual(msg.topic, topic)
|
||||
self.assertEqual(msg.author, user)
|
||||
self.assertEqual(msg.author.name, 'new-name')
|
||||
|
||||
|
||||
def test_list_lookup_not_checked_in_map(self):
|
||||
"""Ensure we dereference list data correctly
|
||||
"""
|
||||
@@ -1061,7 +1101,7 @@ class FieldTest(unittest.TestCase):
|
||||
User.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
for i in xrange(1, 51):
|
||||
for i in range(1, 51):
|
||||
User(name='user %s' % i).save()
|
||||
|
||||
Group(name="Test", members=User.objects).save()
|
||||
@@ -1090,7 +1130,7 @@ class FieldTest(unittest.TestCase):
|
||||
User.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
for i in xrange(1, 51):
|
||||
for i in range(1, 51):
|
||||
User(name='user %s' % i).save()
|
||||
|
||||
Group(name="Test", members=User.objects).save()
|
||||
@@ -1127,7 +1167,7 @@ class FieldTest(unittest.TestCase):
|
||||
Group.drop_collection()
|
||||
|
||||
members = []
|
||||
for i in xrange(1, 51):
|
||||
for i in range(1, 51):
|
||||
a = UserA(name='User A %s' % i).save()
|
||||
b = UserB(name='User B %s' % i).save()
|
||||
c = UserC(name='User C %s' % i).save()
|
||||
|
@@ -1,330 +0,0 @@
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
import unittest
|
||||
from nose.plugins.skip import SkipTest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.django.shortcuts import get_document_or_404
|
||||
|
||||
import django
|
||||
from django.http import Http404
|
||||
from django.template import Context, Template
|
||||
from django.conf import settings
|
||||
from django.core.paginator import Paginator
|
||||
|
||||
settings.configure(
|
||||
USE_TZ=True,
|
||||
INSTALLED_APPS=('django.contrib.auth', 'mongoengine.django.mongo_auth'),
|
||||
AUTH_USER_MODEL=('mongo_auth.MongoUser'),
|
||||
AUTHENTICATION_BACKENDS = ('mongoengine.django.auth.MongoEngineBackend',)
|
||||
)
|
||||
|
||||
try:
|
||||
# For Django >= 1.7
|
||||
if hasattr(django, 'setup'):
|
||||
django.setup()
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
try:
|
||||
from django.contrib.auth import authenticate, get_user_model
|
||||
from mongoengine.django.auth import User
|
||||
from mongoengine.django.mongo_auth.models import (
|
||||
MongoUser,
|
||||
MongoUserManager,
|
||||
get_user_document,
|
||||
)
|
||||
DJ15 = True
|
||||
except Exception:
|
||||
DJ15 = False
|
||||
from mongoengine.django.sessions import SessionStore, MongoSession
|
||||
from mongoengine.django.tests import MongoTestCase
|
||||
from datetime import tzinfo, timedelta
|
||||
ZERO = timedelta(0)
|
||||
|
||||
|
||||
class FixedOffset(tzinfo):
|
||||
"""Fixed offset in minutes east from UTC."""
|
||||
|
||||
def __init__(self, offset, name):
|
||||
self.__offset = timedelta(minutes=offset)
|
||||
self.__name = name
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return self.__offset
|
||||
|
||||
def tzname(self, dt):
|
||||
return self.__name
|
||||
|
||||
def dst(self, dt):
|
||||
return ZERO
|
||||
|
||||
|
||||
def activate_timezone(tz):
|
||||
"""Activate Django timezone support if it is available.
|
||||
"""
|
||||
try:
|
||||
from django.utils import timezone
|
||||
timezone.deactivate()
|
||||
timezone.activate(tz)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class QuerySetTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
self.Person = Person
|
||||
|
||||
def test_order_by_in_django_template(self):
|
||||
"""Ensure that QuerySets are properly ordered in Django template.
|
||||
"""
|
||||
self.Person.drop_collection()
|
||||
|
||||
self.Person(name="A", age=20).save()
|
||||
self.Person(name="D", age=10).save()
|
||||
self.Person(name="B", age=40).save()
|
||||
self.Person(name="C", age=30).save()
|
||||
|
||||
t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}")
|
||||
|
||||
d = {"ol": self.Person.objects.order_by('-name')}
|
||||
self.assertEqual(t.render(Context(d)), u'D-10:C-30:B-40:A-20:')
|
||||
d = {"ol": self.Person.objects.order_by('+name')}
|
||||
self.assertEqual(t.render(Context(d)), u'A-20:B-40:C-30:D-10:')
|
||||
d = {"ol": self.Person.objects.order_by('-age')}
|
||||
self.assertEqual(t.render(Context(d)), u'B-40:C-30:A-20:D-10:')
|
||||
d = {"ol": self.Person.objects.order_by('+age')}
|
||||
self.assertEqual(t.render(Context(d)), u'D-10:A-20:C-30:B-40:')
|
||||
|
||||
self.Person.drop_collection()
|
||||
|
||||
def test_q_object_filter_in_template(self):
|
||||
|
||||
self.Person.drop_collection()
|
||||
|
||||
self.Person(name="A", age=20).save()
|
||||
self.Person(name="D", age=10).save()
|
||||
self.Person(name="B", age=40).save()
|
||||
self.Person(name="C", age=30).save()
|
||||
|
||||
t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}")
|
||||
|
||||
d = {"ol": self.Person.objects.filter(Q(age=10) | Q(name="C"))}
|
||||
self.assertEqual(t.render(Context(d)), 'D-10:C-30:')
|
||||
|
||||
# Check double rendering doesn't throw an error
|
||||
self.assertEqual(t.render(Context(d)), 'D-10:C-30:')
|
||||
|
||||
def test_get_document_or_404(self):
|
||||
p = self.Person(name="G404")
|
||||
p.save()
|
||||
|
||||
self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234')
|
||||
self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk))
|
||||
|
||||
def test_pagination(self):
|
||||
"""Ensure that Pagination works as expected
|
||||
"""
|
||||
class Page(Document):
|
||||
name = StringField()
|
||||
|
||||
Page.drop_collection()
|
||||
|
||||
for i in xrange(1, 11):
|
||||
Page(name=str(i)).save()
|
||||
|
||||
paginator = Paginator(Page.objects.all(), 2)
|
||||
|
||||
t = Template("{% for i in page.object_list %}{{ i.name }}:{% endfor %}")
|
||||
for p in paginator.page_range:
|
||||
d = {"page": paginator.page(p)}
|
||||
end = p * 2
|
||||
start = end - 1
|
||||
self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end))
|
||||
|
||||
def test_nested_queryset_template_iterator(self):
|
||||
# Try iterating the same queryset twice, nested, in a Django template.
|
||||
names = ['A', 'B', 'C', 'D']
|
||||
|
||||
class CustomUser(Document):
|
||||
name = StringField()
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
CustomUser.drop_collection()
|
||||
|
||||
for name in names:
|
||||
CustomUser(name=name).save()
|
||||
|
||||
users = CustomUser.objects.all().order_by('name')
|
||||
template = Template("{% for user in users %}{{ user.name }}{% ifequal forloop.counter 2 %} {% for inner_user in users %}{{ inner_user.name }}{% endfor %} {% endifequal %}{% endfor %}")
|
||||
rendered = template.render(Context({'users': users}))
|
||||
self.assertEqual(rendered, 'AB ABCD CD')
|
||||
|
||||
def test_filter(self):
|
||||
"""Ensure that a queryset and filters work as expected
|
||||
"""
|
||||
|
||||
class LimitCountQuerySet(QuerySet):
|
||||
def count(self, with_limit_and_skip=True):
|
||||
return super(LimitCountQuerySet, self).count(with_limit_and_skip)
|
||||
|
||||
class Note(Document):
|
||||
meta = dict(queryset_class=LimitCountQuerySet)
|
||||
name = StringField()
|
||||
|
||||
Note.drop_collection()
|
||||
|
||||
for i in xrange(1, 101):
|
||||
Note(name="Note: %s" % i).save()
|
||||
|
||||
# Check the count
|
||||
self.assertEqual(Note.objects.count(), 100)
|
||||
|
||||
# Get the first 10 and confirm
|
||||
notes = Note.objects[:10]
|
||||
self.assertEqual(notes.count(), 10)
|
||||
|
||||
# Test djangos template filters
|
||||
# self.assertEqual(length(notes), 10)
|
||||
t = Template("{{ notes.count }}")
|
||||
c = Context({"notes": notes})
|
||||
self.assertEqual(t.render(c), "10")
|
||||
|
||||
# Test with skip
|
||||
notes = Note.objects.skip(90)
|
||||
self.assertEqual(notes.count(), 10)
|
||||
|
||||
# Test djangos template filters
|
||||
self.assertEqual(notes.count(), 10)
|
||||
t = Template("{{ notes.count }}")
|
||||
c = Context({"notes": notes})
|
||||
self.assertEqual(t.render(c), "10")
|
||||
|
||||
# Test with limit
|
||||
notes = Note.objects.skip(90)
|
||||
self.assertEqual(notes.count(), 10)
|
||||
|
||||
# Test djangos template filters
|
||||
self.assertEqual(notes.count(), 10)
|
||||
t = Template("{{ notes.count }}")
|
||||
c = Context({"notes": notes})
|
||||
self.assertEqual(t.render(c), "10")
|
||||
|
||||
# Test with skip and limit
|
||||
notes = Note.objects.skip(10).limit(10)
|
||||
|
||||
# Test djangos template filters
|
||||
self.assertEqual(notes.count(), 10)
|
||||
t = Template("{{ notes.count }}")
|
||||
c = Context({"notes": notes})
|
||||
self.assertEqual(t.render(c), "10")
|
||||
|
||||
|
||||
class _BaseMongoDBSessionTest(unittest.TestCase):
|
||||
backend = SessionStore
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
MongoSession.drop_collection()
|
||||
super(_BaseMongoDBSessionTest, self).setUp()
|
||||
|
||||
def assertIn(self, first, second, msg=None):
|
||||
self.assertTrue(first in second, msg)
|
||||
|
||||
def assertNotIn(self, first, second, msg=None):
|
||||
self.assertFalse(first in second, msg)
|
||||
|
||||
def test_first_save(self):
|
||||
session = SessionStore()
|
||||
session['test'] = True
|
||||
session.save()
|
||||
self.assertTrue('test' in session)
|
||||
|
||||
def test_session_expiration_tz(self):
|
||||
activate_timezone(FixedOffset(60, 'UTC+1'))
|
||||
# create and save new session
|
||||
session = SessionStore()
|
||||
session.set_expiry(600) # expire in 600 seconds
|
||||
session['test_expire'] = True
|
||||
session.save()
|
||||
# reload session with key
|
||||
key = session.session_key
|
||||
session = SessionStore(key)
|
||||
self.assertTrue('test_expire' in session, 'Session has expired before it is expected')
|
||||
|
||||
|
||||
try:
|
||||
# SessionTestsMixin isn't available for import on django > 1.8a1
|
||||
from django.contrib.sessions.tests import SessionTestsMixin
|
||||
|
||||
class _MongoDBSessionTest(SessionTestsMixin):
|
||||
pass
|
||||
|
||||
class MongoDBSessionTest(_BaseMongoDBSessionTest):
|
||||
pass
|
||||
|
||||
except ImportError:
|
||||
class MongoDBSessionTest(_BaseMongoDBSessionTest):
|
||||
pass
|
||||
|
||||
|
||||
class MongoAuthTest(unittest.TestCase):
|
||||
user_data = {
|
||||
'username': 'user',
|
||||
'email': 'user@example.com',
|
||||
'password': 'test',
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
if not DJ15:
|
||||
raise SkipTest('mongo_auth requires Django 1.5')
|
||||
connect(db='mongoenginetest')
|
||||
User.drop_collection()
|
||||
super(MongoAuthTest, self).setUp()
|
||||
|
||||
def test_get_user_model(self):
|
||||
self.assertEqual(get_user_model(), MongoUser)
|
||||
|
||||
def test_get_user_document(self):
|
||||
self.assertEqual(get_user_document(), User)
|
||||
|
||||
def test_user_manager(self):
|
||||
manager = get_user_model()._default_manager
|
||||
self.assertTrue(isinstance(manager, MongoUserManager))
|
||||
|
||||
def test_user_manager_exception(self):
|
||||
manager = get_user_model()._default_manager
|
||||
self.assertRaises(MongoUser.DoesNotExist, manager.get,
|
||||
username='not found')
|
||||
|
||||
def test_create_user(self):
|
||||
manager = get_user_model()._default_manager
|
||||
user = manager.create_user(**self.user_data)
|
||||
self.assertTrue(isinstance(user, User))
|
||||
db_user = User.objects.get(username='user')
|
||||
self.assertEqual(user.id, db_user.id)
|
||||
|
||||
def test_authenticate(self):
|
||||
get_user_model()._default_manager.create_user(**self.user_data)
|
||||
user = authenticate(username='user', password='fail')
|
||||
self.assertEqual(None, user)
|
||||
user = authenticate(username='user', password='test')
|
||||
db_user = User.objects.get(username='user')
|
||||
self.assertEqual(user.id, db_user.id)
|
||||
|
||||
|
||||
class MongoTestCaseTest(MongoTestCase):
|
||||
def test_mongo_test_case(self):
|
||||
self.db.dummy_collection.insert({'collection': 'will be dropped'})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@@ -1,47 +0,0 @@
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
import jinja2
|
||||
|
||||
|
||||
class TemplateFilterTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
def test_jinja2(self):
|
||||
env = jinja2.Environment()
|
||||
|
||||
class TestData(Document):
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
|
||||
TestData.drop_collection()
|
||||
|
||||
examples = [('A', '1'),
|
||||
('B', '2'),
|
||||
('C', '3')]
|
||||
|
||||
for title, description in examples:
|
||||
TestData(title=title, description=description).save()
|
||||
|
||||
tmpl = """
|
||||
{%- for record in content -%}
|
||||
{%- if loop.first -%}{ {%- endif -%}
|
||||
"{{ record.title }}": "{{ record.description }}"
|
||||
{%- if loop.last -%} }{%- else -%},{% endif -%}
|
||||
{%- endfor -%}
|
||||
"""
|
||||
ctx = {'content': TestData.objects}
|
||||
template = env.from_string(tmpl)
|
||||
rendered = template.render(**ctx)
|
||||
|
||||
self.assertEqual('{"A": "1","B": "2","C": "3"}', rendered)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@@ -1,17 +1,30 @@
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
import unittest
|
||||
|
||||
import pymongo
|
||||
from pymongo import ReadPreference, ReplicaSetConnection
|
||||
from pymongo import ReadPreference
|
||||
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
|
||||
if IS_PYMONGO_3:
|
||||
from pymongo import MongoClient
|
||||
CONN_CLASS = MongoClient
|
||||
READ_PREF = ReadPreference.SECONDARY
|
||||
else:
|
||||
from pymongo import ReplicaSetConnection
|
||||
CONN_CLASS = ReplicaSetConnection
|
||||
READ_PREF = ReadPreference.SECONDARY_ONLY
|
||||
|
||||
import mongoengine
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db, get_connection, ConnectionError
|
||||
from mongoengine.connection import MongoEngineConnectionError
|
||||
|
||||
|
||||
class ConnectionTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
mongoengine.connection._connection_settings = {}
|
||||
mongoengine.connection._connections = {}
|
||||
mongoengine.connection._dbs = {}
|
||||
|
||||
def tearDown(self):
|
||||
mongoengine.connection._connection_settings = {}
|
||||
mongoengine.connection._connections = {}
|
||||
@@ -22,14 +35,17 @@ class ConnectionTest(unittest.TestCase):
|
||||
"""
|
||||
|
||||
try:
|
||||
conn = connect(db='mongoenginetest', host="mongodb://localhost/mongoenginetest?replicaSet=rs", read_preference=ReadPreference.SECONDARY_ONLY)
|
||||
except ConnectionError, e:
|
||||
conn = connect(db='mongoenginetest',
|
||||
host="mongodb://localhost/mongoenginetest?replicaSet=rs",
|
||||
read_preference=READ_PREF)
|
||||
except MongoEngineConnectionError as e:
|
||||
return
|
||||
|
||||
if not isinstance(conn, ReplicaSetConnection):
|
||||
if not isinstance(conn, CONN_CLASS):
|
||||
# really???
|
||||
return
|
||||
|
||||
self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_ONLY)
|
||||
self.assertEqual(conn.read_preference, READ_PREF)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -1,6 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
@@ -25,6 +23,8 @@ class SignalTests(unittest.TestCase):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
class Author(Document):
|
||||
# Make the id deterministic for easier testing
|
||||
id = SequenceField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
def __unicode__(self):
|
||||
@@ -33,7 +33,7 @@ class SignalTests(unittest.TestCase):
|
||||
@classmethod
|
||||
def pre_init(cls, sender, document, *args, **kwargs):
|
||||
signal_output.append('pre_init signal, %s' % cls.__name__)
|
||||
signal_output.append(str(kwargs['values']))
|
||||
signal_output.append(kwargs['values'])
|
||||
|
||||
@classmethod
|
||||
def post_init(cls, sender, document, **kwargs):
|
||||
@@ -43,48 +43,55 @@ class SignalTests(unittest.TestCase):
|
||||
@classmethod
|
||||
def pre_save(cls, sender, document, **kwargs):
|
||||
signal_output.append('pre_save signal, %s' % document)
|
||||
signal_output.append(kwargs)
|
||||
|
||||
@classmethod
|
||||
def pre_save_post_validation(cls, sender, document, **kwargs):
|
||||
signal_output.append('pre_save_post_validation signal, %s' % document)
|
||||
if 'created' in kwargs:
|
||||
if kwargs['created']:
|
||||
signal_output.append('Is created')
|
||||
else:
|
||||
signal_output.append('Is updated')
|
||||
if kwargs.pop('created', False):
|
||||
signal_output.append('Is created')
|
||||
else:
|
||||
signal_output.append('Is updated')
|
||||
signal_output.append(kwargs)
|
||||
|
||||
@classmethod
|
||||
def post_save(cls, sender, document, **kwargs):
|
||||
dirty_keys = document._delta()[0].keys() + document._delta()[1].keys()
|
||||
signal_output.append('post_save signal, %s' % document)
|
||||
signal_output.append('post_save dirty keys, %s' % dirty_keys)
|
||||
if 'created' in kwargs:
|
||||
if kwargs['created']:
|
||||
signal_output.append('Is created')
|
||||
else:
|
||||
signal_output.append('Is updated')
|
||||
if kwargs.pop('created', False):
|
||||
signal_output.append('Is created')
|
||||
else:
|
||||
signal_output.append('Is updated')
|
||||
signal_output.append(kwargs)
|
||||
|
||||
@classmethod
|
||||
def pre_delete(cls, sender, document, **kwargs):
|
||||
signal_output.append('pre_delete signal, %s' % document)
|
||||
signal_output.append(kwargs)
|
||||
|
||||
@classmethod
|
||||
def post_delete(cls, sender, document, **kwargs):
|
||||
signal_output.append('post_delete signal, %s' % document)
|
||||
signal_output.append(kwargs)
|
||||
|
||||
@classmethod
|
||||
def pre_bulk_insert(cls, sender, documents, **kwargs):
|
||||
signal_output.append('pre_bulk_insert signal, %s' % documents)
|
||||
signal_output.append(kwargs)
|
||||
|
||||
@classmethod
|
||||
def post_bulk_insert(cls, sender, documents, **kwargs):
|
||||
signal_output.append('post_bulk_insert signal, %s' % documents)
|
||||
if kwargs.get('loaded', False):
|
||||
if kwargs.pop('loaded', False):
|
||||
signal_output.append('Is loaded')
|
||||
else:
|
||||
signal_output.append('Not loaded')
|
||||
signal_output.append(kwargs)
|
||||
|
||||
self.Author = Author
|
||||
Author.drop_collection()
|
||||
Author.id.set_next_value(0)
|
||||
|
||||
class Another(Document):
|
||||
|
||||
@@ -96,10 +103,12 @@ class SignalTests(unittest.TestCase):
|
||||
@classmethod
|
||||
def pre_delete(cls, sender, document, **kwargs):
|
||||
signal_output.append('pre_delete signal, %s' % document)
|
||||
signal_output.append(kwargs)
|
||||
|
||||
@classmethod
|
||||
def post_delete(cls, sender, document, **kwargs):
|
||||
signal_output.append('post_delete signal, %s' % document)
|
||||
signal_output.append(kwargs)
|
||||
|
||||
self.Another = Another
|
||||
Another.drop_collection()
|
||||
@@ -118,6 +127,41 @@ class SignalTests(unittest.TestCase):
|
||||
self.ExplicitId = ExplicitId
|
||||
ExplicitId.drop_collection()
|
||||
|
||||
class Post(Document):
|
||||
title = StringField()
|
||||
content = StringField()
|
||||
active = BooleanField(default=False)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.title
|
||||
|
||||
@classmethod
|
||||
def pre_bulk_insert(cls, sender, documents, **kwargs):
|
||||
signal_output.append('pre_bulk_insert signal, %s' %
|
||||
[(doc, {'active': documents[n].active})
|
||||
for n, doc in enumerate(documents)])
|
||||
|
||||
# make changes here, this is just an example -
|
||||
# it could be anything that needs pre-validation or looks-ups before bulk bulk inserting
|
||||
for document in documents:
|
||||
if not document.active:
|
||||
document.active = True
|
||||
signal_output.append(kwargs)
|
||||
|
||||
@classmethod
|
||||
def post_bulk_insert(cls, sender, documents, **kwargs):
|
||||
signal_output.append('post_bulk_insert signal, %s' %
|
||||
[(doc, {'active': documents[n].active})
|
||||
for n, doc in enumerate(documents)])
|
||||
if kwargs.pop('loaded', False):
|
||||
signal_output.append('Is loaded')
|
||||
else:
|
||||
signal_output.append('Not loaded')
|
||||
signal_output.append(kwargs)
|
||||
|
||||
self.Post = Post
|
||||
Post.drop_collection()
|
||||
|
||||
# Save up the number of connected signals so that we can check at the
|
||||
# end that all the signals we register get properly unregistered
|
||||
self.pre_signals = (
|
||||
@@ -147,6 +191,9 @@ class SignalTests(unittest.TestCase):
|
||||
|
||||
signals.post_save.connect(ExplicitId.post_save, sender=ExplicitId)
|
||||
|
||||
signals.pre_bulk_insert.connect(Post.pre_bulk_insert, sender=Post)
|
||||
signals.post_bulk_insert.connect(Post.post_bulk_insert, sender=Post)
|
||||
|
||||
def tearDown(self):
|
||||
signals.pre_init.disconnect(self.Author.pre_init)
|
||||
signals.post_init.disconnect(self.Author.post_init)
|
||||
@@ -163,6 +210,9 @@ class SignalTests(unittest.TestCase):
|
||||
|
||||
signals.post_save.disconnect(self.ExplicitId.post_save)
|
||||
|
||||
signals.pre_bulk_insert.disconnect(self.Post.pre_bulk_insert)
|
||||
signals.post_bulk_insert.disconnect(self.Post.post_bulk_insert)
|
||||
|
||||
# Check that all our signals got disconnected properly.
|
||||
post_signals = (
|
||||
len(signals.pre_init.receivers),
|
||||
@@ -202,63 +252,118 @@ class SignalTests(unittest.TestCase):
|
||||
|
||||
self.assertEqual(self.get_signal_output(create_author), [
|
||||
"pre_init signal, Author",
|
||||
"{'name': 'Bill Shakespeare'}",
|
||||
{'name': 'Bill Shakespeare'},
|
||||
"post_init signal, Bill Shakespeare, document._created = True",
|
||||
])
|
||||
|
||||
a1 = self.Author(name='Bill Shakespeare')
|
||||
self.assertEqual(self.get_signal_output(a1.save), [
|
||||
"pre_save signal, Bill Shakespeare",
|
||||
{},
|
||||
"pre_save_post_validation signal, Bill Shakespeare",
|
||||
"Is created",
|
||||
{},
|
||||
"post_save signal, Bill Shakespeare",
|
||||
"post_save dirty keys, ['name']",
|
||||
"Is created"
|
||||
"Is created",
|
||||
{}
|
||||
])
|
||||
|
||||
a1.reload()
|
||||
a1.name = 'William Shakespeare'
|
||||
self.assertEqual(self.get_signal_output(a1.save), [
|
||||
"pre_save signal, William Shakespeare",
|
||||
{},
|
||||
"pre_save_post_validation signal, William Shakespeare",
|
||||
"Is updated",
|
||||
{},
|
||||
"post_save signal, William Shakespeare",
|
||||
"post_save dirty keys, ['name']",
|
||||
"Is updated"
|
||||
"Is updated",
|
||||
{}
|
||||
])
|
||||
|
||||
self.assertEqual(self.get_signal_output(a1.delete), [
|
||||
'pre_delete signal, William Shakespeare',
|
||||
{},
|
||||
'post_delete signal, William Shakespeare',
|
||||
{}
|
||||
])
|
||||
|
||||
signal_output = self.get_signal_output(load_existing_author)
|
||||
# test signal_output lines separately, because of random ObjectID after object load
|
||||
self.assertEqual(signal_output[0],
|
||||
self.assertEqual(self.get_signal_output(load_existing_author), [
|
||||
"pre_init signal, Author",
|
||||
)
|
||||
self.assertEqual(signal_output[2],
|
||||
"post_init signal, Bill Shakespeare, document._created = False",
|
||||
)
|
||||
{'id': 2, 'name': 'Bill Shakespeare'},
|
||||
"post_init signal, Bill Shakespeare, document._created = False"
|
||||
])
|
||||
|
||||
|
||||
signal_output = self.get_signal_output(bulk_create_author_with_load)
|
||||
|
||||
# The output of this signal is not entirely deterministic. The reloaded
|
||||
# object will have an object ID. Hence, we only check part of the output
|
||||
self.assertEqual(signal_output[3], "pre_bulk_insert signal, [<Author: Bill Shakespeare>]"
|
||||
)
|
||||
self.assertEqual(signal_output[-2:],
|
||||
["post_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||
"Is loaded",])
|
||||
self.assertEqual(self.get_signal_output(bulk_create_author_with_load), [
|
||||
'pre_init signal, Author',
|
||||
{'name': 'Bill Shakespeare'},
|
||||
'post_init signal, Bill Shakespeare, document._created = True',
|
||||
'pre_bulk_insert signal, [<Author: Bill Shakespeare>]',
|
||||
{},
|
||||
'pre_init signal, Author',
|
||||
{'id': 3, 'name': 'Bill Shakespeare'},
|
||||
'post_init signal, Bill Shakespeare, document._created = False',
|
||||
'post_bulk_insert signal, [<Author: Bill Shakespeare>]',
|
||||
'Is loaded',
|
||||
{}
|
||||
])
|
||||
|
||||
self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [
|
||||
"pre_init signal, Author",
|
||||
"{'name': 'Bill Shakespeare'}",
|
||||
{'name': 'Bill Shakespeare'},
|
||||
"post_init signal, Bill Shakespeare, document._created = True",
|
||||
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||
{},
|
||||
"post_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||
"Not loaded",
|
||||
{}
|
||||
])
|
||||
|
||||
def test_signal_kwargs(self):
|
||||
""" Make sure signal_kwargs is passed to signals calls. """
|
||||
|
||||
def live_and_let_die():
|
||||
a = self.Author(name='Bill Shakespeare')
|
||||
a.save(signal_kwargs={'live': True, 'die': False})
|
||||
a.delete(signal_kwargs={'live': False, 'die': True})
|
||||
|
||||
self.assertEqual(self.get_signal_output(live_and_let_die), [
|
||||
"pre_init signal, Author",
|
||||
{'name': 'Bill Shakespeare'},
|
||||
"post_init signal, Bill Shakespeare, document._created = True",
|
||||
"pre_save signal, Bill Shakespeare",
|
||||
{'die': False, 'live': True},
|
||||
"pre_save_post_validation signal, Bill Shakespeare",
|
||||
"Is created",
|
||||
{'die': False, 'live': True},
|
||||
"post_save signal, Bill Shakespeare",
|
||||
"post_save dirty keys, ['name']",
|
||||
"Is created",
|
||||
{'die': False, 'live': True},
|
||||
'pre_delete signal, Bill Shakespeare',
|
||||
{'die': True, 'live': False},
|
||||
'post_delete signal, Bill Shakespeare',
|
||||
{'die': True, 'live': False}
|
||||
])
|
||||
|
||||
def bulk_create_author():
|
||||
a1 = self.Author(name='Bill Shakespeare')
|
||||
self.Author.objects.insert([a1], signal_kwargs={'key': True})
|
||||
|
||||
self.assertEqual(self.get_signal_output(bulk_create_author), [
|
||||
'pre_init signal, Author',
|
||||
{'name': 'Bill Shakespeare'},
|
||||
'post_init signal, Bill Shakespeare, document._created = True',
|
||||
'pre_bulk_insert signal, [<Author: Bill Shakespeare>]',
|
||||
{'key': True},
|
||||
'pre_init signal, Author',
|
||||
{'id': 2, 'name': 'Bill Shakespeare'},
|
||||
'post_init signal, Bill Shakespeare, document._created = False',
|
||||
'post_bulk_insert signal, [<Author: Bill Shakespeare>]',
|
||||
'Is loaded',
|
||||
{'key': True}
|
||||
])
|
||||
|
||||
def test_queryset_delete_signals(self):
|
||||
@@ -267,7 +372,9 @@ class SignalTests(unittest.TestCase):
|
||||
self.Another(name='Bill Shakespeare').save()
|
||||
self.assertEqual(self.get_signal_output(self.Another.objects.delete), [
|
||||
'pre_delete signal, Bill Shakespeare',
|
||||
{},
|
||||
'post_delete signal, Bill Shakespeare',
|
||||
{}
|
||||
])
|
||||
|
||||
def test_signals_with_explicit_doc_ids(self):
|
||||
@@ -279,5 +386,50 @@ class SignalTests(unittest.TestCase):
|
||||
# second time, it must be an update
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is updated'])
|
||||
|
||||
def test_signals_with_switch_collection(self):
|
||||
ei = self.ExplicitId(id=123)
|
||||
ei.switch_collection("explicit__1")
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
ei.switch_collection("explicit__1")
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is updated'])
|
||||
|
||||
ei.switch_collection("explicit__1", keep_created=False)
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
ei.switch_collection("explicit__1", keep_created=False)
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
|
||||
def test_signals_with_switch_db(self):
|
||||
connect('mongoenginetest')
|
||||
register_connection('testdb-1', 'mongoenginetest2')
|
||||
|
||||
ei = self.ExplicitId(id=123)
|
||||
ei.switch_db("testdb-1")
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
ei.switch_db("testdb-1")
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is updated'])
|
||||
|
||||
ei.switch_db("testdb-1", keep_created=False)
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
ei.switch_db("testdb-1", keep_created=False)
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
|
||||
def test_signals_bulk_insert(self):
|
||||
def bulk_set_active_post():
|
||||
posts = [
|
||||
self.Post(title='Post 1'),
|
||||
self.Post(title='Post 2'),
|
||||
self.Post(title='Post 3')
|
||||
]
|
||||
self.Post.objects.insert(posts)
|
||||
|
||||
results = self.get_signal_output(bulk_set_active_post)
|
||||
self.assertEqual(results, [
|
||||
"pre_bulk_insert signal, [(<Post: Post 1>, {'active': False}), (<Post: Post 2>, {'active': False}), (<Post: Post 3>, {'active': False})]",
|
||||
{},
|
||||
"post_bulk_insert signal, [(<Post: Post 1>, {'active': True}), (<Post: Post 2>, {'active': True}), (<Post: Post 3>, {'active': True})]",
|
||||
'Is loaded',
|
||||
{}
|
||||
])
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
22
tox.ini
Normal file
22
tox.ini
Normal file
@@ -0,0 +1,22 @@
|
||||
[tox]
|
||||
envlist = {py26,py27,py33,py34,py35,pypy,pypy3}-{mg27,mg28},flake8
|
||||
|
||||
[testenv]
|
||||
commands =
|
||||
python setup.py nosetests {posargs}
|
||||
deps =
|
||||
nose
|
||||
mg27: PyMongo<2.8
|
||||
mg28: PyMongo>=2.8,<3.0
|
||||
mg30: PyMongo>=3.0
|
||||
mgdev: https://github.com/mongodb/mongo-python-driver/tarball/master
|
||||
setenv =
|
||||
PYTHON_EGG_CACHE = {envdir}/python-eggs
|
||||
passenv = windir
|
||||
|
||||
[testenv:flake8]
|
||||
deps =
|
||||
flake8
|
||||
flake8-import-order
|
||||
commands =
|
||||
flake8
|
Reference in New Issue
Block a user