Compare commits
362 Commits
topic/land
...
v0.10.0
Author | SHA1 | Date | |
---|---|---|---|
|
c6151e34e0 | ||
|
45cb991254 | ||
|
839bc99f94 | ||
|
0aeb1ca408 | ||
|
cd76a906f4 | ||
|
e438491938 | ||
|
307b35a5bf | ||
|
217c9720ea | ||
|
778c7dc5f2 | ||
|
4c80154437 | ||
|
6bd9529a66 | ||
|
33ea2b4844 | ||
|
5c807f3dc8 | ||
|
9063b559c4 | ||
|
40f6df7160 | ||
|
95165aa92f | ||
|
d96fcdb35c | ||
|
5efabdcea3 | ||
|
2d57dc0565 | ||
|
576629f825 | ||
|
5badb9d151 | ||
|
45dc379d9a | ||
|
49c0c9f44c | ||
|
ef5fa4d062 | ||
|
35b66d5d94 | ||
|
d0b749a43c | ||
|
bcc4d4e8c6 | ||
|
41bff0b293 | ||
|
dfc7f35ef1 | ||
|
0bbbbdde80 | ||
|
5fa5284b58 | ||
|
b7ef82cb67 | ||
|
1233780265 | ||
|
dd095279c8 | ||
|
4d5200c50f | ||
|
1bcd675ead | ||
|
2a3d3de0b2 | ||
|
b124836f3a | ||
|
93ba95971b | ||
|
7b193b3745 | ||
|
2b647d2405 | ||
|
7714cca599 | ||
|
42511aa9cf | ||
|
ace2a2f3d1 | ||
|
2062fe7a08 | ||
|
d4c02c3988 | ||
|
4c1496b4a4 | ||
|
eec876295d | ||
|
3093175f54 | ||
|
dd05c4d34a | ||
|
57e3a40321 | ||
|
9e70152076 | ||
|
e1da83a8f6 | ||
|
8108198613 | ||
|
915849b2ce | ||
|
2e96302336 | ||
|
051cd744ad | ||
|
53fbc165ba | ||
|
1862bcf867 | ||
|
8909d1d144 | ||
|
a2f0f20284 | ||
|
1951b52aa5 | ||
|
cd7a9345ec | ||
|
dba4c33c81 | ||
|
153c239c9b | ||
|
4034ab4182 | ||
|
9c917c3bd3 | ||
|
cca0222e1d | ||
|
682db9b81f | ||
|
3e000f9be1 | ||
|
548a552638 | ||
|
1d5b5b7d15 | ||
|
91aa4586e2 | ||
|
6d3bc43ef6 | ||
|
0f63e26641 | ||
|
ab2ef69c6a | ||
|
621350515e | ||
|
03ed5c398a | ||
|
65d6f8c018 | ||
|
79d0673ae6 | ||
|
cbd488e19f | ||
|
380d869195 | ||
|
73893f2a33 | ||
|
ad81470d35 | ||
|
fc140d04ef | ||
|
a0257ed7e7 | ||
|
4769487c3b | ||
|
29def587ff | ||
|
f35d0b2b37 | ||
|
283e92d55d | ||
|
c82b26d334 | ||
|
2753e02cda | ||
|
fde733c205 | ||
|
f730591f2c | ||
|
94eac1e79d | ||
|
9f2b6d0ec6 | ||
|
7d7d0ea001 | ||
|
794101691c | ||
|
a443144a5c | ||
|
73f0867061 | ||
|
f97db93212 | ||
|
d36708933c | ||
|
14f82ea0a9 | ||
|
c41dd6495d | ||
|
1005c99e9c | ||
|
f4478fc762 | ||
|
c5ed308ea5 | ||
|
3ab5ba6149 | ||
|
9b2fde962c | ||
|
571a7dc42d | ||
|
3421fffa9b | ||
|
c25619fd63 | ||
|
76adb13a64 | ||
|
33b1eed361 | ||
|
c44891a1a8 | ||
|
f31f52ff1c | ||
|
6ad9a56bd9 | ||
|
a5c2fc4f9d | ||
|
0a65006bb4 | ||
|
3db896c4e2 | ||
|
e80322021a | ||
|
48316ba60d | ||
|
c0f1493473 | ||
|
ccbd128fa2 | ||
|
46817caa68 | ||
|
775c8624d4 | ||
|
36eedc987c | ||
|
3b8f31c888 | ||
|
a34fa74eaa | ||
|
d6b2d8dcb5 | ||
|
aab0599280 | ||
|
dfa8eaf24e | ||
|
63d55cb797 | ||
|
c642eee0d2 | ||
|
5f33d298d7 | ||
|
fc39fd7519 | ||
|
7f442f7485 | ||
|
0ee3203a5a | ||
|
43a5df8780 | ||
|
0949df014b | ||
|
01f4dd8f97 | ||
|
8b7599f5d9 | ||
|
9bdc320cf8 | ||
|
d9c8285806 | ||
|
4b8344082f | ||
|
e5cf76b460 | ||
|
422ca87a12 | ||
|
a512ccca28 | ||
|
ba215be97c | ||
|
ca16050681 | ||
|
06e4ed1bb4 | ||
|
d4a8ae5743 | ||
|
a4f2f811d3 | ||
|
ebaba95eb3 | ||
|
31f7769199 | ||
|
7726be94be | ||
|
f2cbcea6d7 | ||
|
5d6a28954b | ||
|
319f1deceb | ||
|
3f14958741 | ||
|
42ba4a5c56 | ||
|
c804c395ed | ||
|
58c8cf1a3a | ||
|
76ea8c86b7 | ||
|
050378fa72 | ||
|
29d858d58c | ||
|
dc45920afb | ||
|
15fcb57e2f | ||
|
91ee85152c | ||
|
aa7bf7af1e | ||
|
02c1ba39ad | ||
|
8e8d9426df | ||
|
57f301815d | ||
|
dfc9dc713c | ||
|
1a0cad7f5f | ||
|
3df436f0d8 | ||
|
d737fca295 | ||
|
da5a3532d7 | ||
|
27111e7b29 | ||
|
b847bc0aba | ||
|
6eb0bc50e2 | ||
|
7530f03bf6 | ||
|
24a9633edc | ||
|
7e1a5ce445 | ||
|
2ffdbc7fc0 | ||
|
52c7b68cc3 | ||
|
ddbcc8e84b | ||
|
2bfb195ad6 | ||
|
cd2d9517a0 | ||
|
19dc312128 | ||
|
175659628d | ||
|
8fea2b09be | ||
|
f77f45b70c | ||
|
103a287f11 | ||
|
d600ade40c | ||
|
a6a7cba121 | ||
|
7fff635a3f | ||
|
7a749b88c7 | ||
|
1ce6a7f4be | ||
|
a092910fdd | ||
|
bb77838b3e | ||
|
1001f1bd36 | ||
|
de0e5583a5 | ||
|
cbd2a44350 | ||
|
c888e461ba | ||
|
d135522087 | ||
|
ce2b148dd2 | ||
|
2d075c4dd6 | ||
|
bcd1841f71 | ||
|
029cf4ad1f | ||
|
ed7fc86d69 | ||
|
82a9e43b6f | ||
|
9ae2c731ed | ||
|
7d1ba466b4 | ||
|
4f1d8678ea | ||
|
4bd72ebc63 | ||
|
e5986e0ae2 | ||
|
fae39e4bc9 | ||
|
dbe8357dd5 | ||
|
3234f0bdd7 | ||
|
47a4d58009 | ||
|
4ae60da58d | ||
|
47f995bda3 | ||
|
42721628eb | ||
|
f42ab957d4 | ||
|
ce9d0d7e82 | ||
|
baf79dda21 | ||
|
b71a9bc097 | ||
|
129632cd6b | ||
|
aca8899c4d | ||
|
5c3d91e65e | ||
|
0205d827f1 | ||
|
225c31d583 | ||
|
b18d87ddba | ||
|
25298c72bb | ||
|
3df3d27533 | ||
|
cbb0b57018 | ||
|
65f205bca8 | ||
|
1cc7f80109 | ||
|
213a0a18a5 | ||
|
1a24d599b3 | ||
|
d80be60e2b | ||
|
0ffe79d76c | ||
|
db36d0a375 | ||
|
ff659a0be3 | ||
|
8485b12102 | ||
|
d889cc3c5a | ||
|
7bb65fca4e | ||
|
8aaa5951ca | ||
|
d58f3b7520 | ||
|
e5a636a159 | ||
|
51f314e907 | ||
|
531fa30b69 | ||
|
2b3bb81fae | ||
|
80f80cd31f | ||
|
79705fbf11 | ||
|
191a4e569e | ||
|
1cac35be03 | ||
|
6d48100f44 | ||
|
4627af3e90 | ||
|
913952ffe1 | ||
|
67bf6afc89 | ||
|
06064decd2 | ||
|
4cca9f17df | ||
|
74a89223c0 | ||
|
2954017836 | ||
|
a03262fc01 | ||
|
d65ce6fc2c | ||
|
d27e1eee25 | ||
|
b1f00bb708 | ||
|
e0f1e79e6a | ||
|
d70b7d41e8 | ||
|
43af9f3fad | ||
|
bc53dd6830 | ||
|
263616ef01 | ||
|
285da0542e | ||
|
17f7e2f892 | ||
|
a29d8f1d68 | ||
|
8965172603 | ||
|
03c2967337 | ||
|
5b154a0da4 | ||
|
b2c8c326d7 | ||
|
96aedaa91f | ||
|
a22ad1ec32 | ||
|
a4244defb5 | ||
|
57328e55f3 | ||
|
87c32aeb40 | ||
|
2e01e0c30e | ||
|
a12b2de74a | ||
|
6b01d8f99b | ||
|
eac4f6062e | ||
|
5583cf0a5f | ||
|
57d772fa23 | ||
|
1bdc3988a9 | ||
|
2af55baa9a | ||
|
0452eec11d | ||
|
c4f7db6c04 | ||
|
3569529a84 | ||
|
70942ac0f6 | ||
|
dc02e39918 | ||
|
73d6bc35ec | ||
|
b1d558d700 | ||
|
897480265f | ||
|
73724f5a33 | ||
|
bdbd495a9e | ||
|
1fcf009804 | ||
|
914c5752a5 | ||
|
201b12a886 | ||
|
c5f23ad93d | ||
|
28d62009a7 | ||
|
1a5a436f82 | ||
|
1275ac0569 | ||
|
5112fb777e | ||
|
f571a944c9 | ||
|
bc9aff8c60 | ||
|
c4c7ab7888 | ||
|
d9819a990c | ||
|
aea400e26a | ||
|
eb4e7735c1 | ||
|
4b498ae8cd | ||
|
158e2a4ca9 | ||
|
b011d48d82 | ||
|
8ac3e725f8 | ||
|
9a4aef0358 | ||
|
7d3146234a | ||
|
5d2ca6493d | ||
|
4752f9aa37 | ||
|
025d3a03d6 | ||
|
aec06183e7 | ||
|
aa28abd517 | ||
|
7430b31697 | ||
|
759f72169a | ||
|
1f7135be61 | ||
|
6942f9c1cf | ||
|
d9da75d1c0 | ||
|
7ab7372be4 | ||
|
3503c98857 | ||
|
708c3f1e2a | ||
|
6f645e8619 | ||
|
bce7ca7ac4 | ||
|
350465c25d | ||
|
5b9c70ae22 | ||
|
9b30afeca9 | ||
|
c1b202c119 | ||
|
41cfe5d2ca | ||
|
05339e184f | ||
|
447127d956 | ||
|
394334fbea | ||
|
9f8cd33d43 | ||
|
f066e28c35 | ||
|
b349a449bb | ||
|
1c5898d396 | ||
|
6802967863 | ||
|
0462f18680 | ||
|
af6699098f | ||
|
6b7e7dc124 | ||
|
6bae4c6a66 | ||
|
46da918dbe | ||
|
bb7e5f17b5 | ||
|
b9d03114c2 | ||
|
436b1ce176 | ||
|
85336f9777 |
75
.travis.yml
75
.travis.yml
@@ -1,50 +1,45 @@
|
||||
# http://travis-ci.org/#!/MongoEngine/mongoengine
|
||||
language: python
|
||||
python:
|
||||
- "2.6"
|
||||
- "2.7"
|
||||
- "3.2"
|
||||
- "3.3"
|
||||
- "3.4"
|
||||
- "pypy"
|
||||
- "pypy3"
|
||||
- '2.6'
|
||||
- '2.7'
|
||||
- '3.2'
|
||||
- '3.3'
|
||||
- '3.4'
|
||||
- pypy
|
||||
- pypy3
|
||||
env:
|
||||
- PYMONGO=dev DJANGO=dev
|
||||
- PYMONGO=dev DJANGO=1.6.5
|
||||
- PYMONGO=dev DJANGO=1.5.8
|
||||
- PYMONGO=2.7.1 DJANGO=dev
|
||||
- PYMONGO=2.7.1 DJANGO=1.6.5
|
||||
- PYMONGO=2.7.1 DJANGO=1.5.8
|
||||
|
||||
- PYMONGO=2.7
|
||||
- PYMONGO=2.8
|
||||
- PYMONGO=3.0
|
||||
- PYMONGO=dev
|
||||
matrix:
|
||||
exclude:
|
||||
- python: "2.6"
|
||||
env: PYMONGO=dev DJANGO=dev
|
||||
- python: "2.6"
|
||||
env: PYMONGO=2.7.1 DJANGO=dev
|
||||
fast_finish: true
|
||||
|
||||
fast_finish: true
|
||||
before_install:
|
||||
- "travis_retry sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10"
|
||||
- "echo 'deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' | sudo tee /etc/apt/sources.list.d/mongodb.list"
|
||||
- "travis_retry sudo apt-get update"
|
||||
- "travis_retry sudo apt-get install mongodb-org-server"
|
||||
|
||||
- travis_retry sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
|
||||
- echo 'deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' |
|
||||
sudo tee /etc/apt/sources.list.d/mongodb.list
|
||||
- travis_retry sudo apt-get update
|
||||
- travis_retry sudo apt-get install mongodb-org-server
|
||||
install:
|
||||
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev python-tk
|
||||
- if [[ $PYMONGO == 'dev' ]]; then travis_retry pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi
|
||||
- if [[ $PYMONGO != 'dev' ]]; then travis_retry pip install pymongo==$PYMONGO; true; fi
|
||||
- if [[ $DJANGO == 'dev' ]]; then travis_retry pip install https://www.djangoproject.com/download/1.7c2/tarball/; fi
|
||||
- if [[ $DJANGO != 'dev' ]]; then travis_retry pip install Django==$DJANGO; fi
|
||||
- travis_retry pip install https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz#md5=1534bb15cf311f07afaa3aacba1c028b
|
||||
- travis_retry python setup.py install
|
||||
|
||||
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev
|
||||
libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev
|
||||
python-tk
|
||||
- travis_retry pip install tox>=1.9 coveralls
|
||||
- travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test
|
||||
script:
|
||||
- travis_retry python setup.py test
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '3.'* ]]; then 2to3 . -w; fi;
|
||||
- python benchmark.py
|
||||
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage
|
||||
after_script: coveralls --verbose
|
||||
notifications:
|
||||
irc: "irc.freenode.org#mongoengine"
|
||||
irc: irc.freenode.org#mongoengine
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- master
|
||||
- /^v.*$/
|
||||
deploy:
|
||||
provider: pypi
|
||||
user: the_drow
|
||||
password:
|
||||
secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek=
|
||||
on:
|
||||
tags: true
|
||||
repo: MongoEngine/mongoengine
|
||||
|
27
AUTHORS
27
AUTHORS
@@ -12,7 +12,7 @@ Laine Herron https://github.com/LaineHerron
|
||||
|
||||
CONTRIBUTORS
|
||||
|
||||
Dervived from the git logs, inevitably incomplete but all of whom and others
|
||||
Derived from the git logs, inevitably incomplete but all of whom and others
|
||||
have submitted patches, reported bugs and generally helped make MongoEngine
|
||||
that much better:
|
||||
|
||||
@@ -119,7 +119,7 @@ that much better:
|
||||
* Anton Kolechkin
|
||||
* Sergey Nikitin
|
||||
* psychogenic
|
||||
* Stefan Wójcik
|
||||
* Stefan Wójcik (https://github.com/wojcikstefan)
|
||||
* dimonb
|
||||
* Garry Polley
|
||||
* James Slagle
|
||||
@@ -138,7 +138,6 @@ that much better:
|
||||
* hellysmile
|
||||
* Jaepil Jeong
|
||||
* Daniil Sharou
|
||||
* Stefan Wójcik
|
||||
* Pete Campton
|
||||
* Martyn Smith
|
||||
* Marcelo Anton
|
||||
@@ -206,3 +205,25 @@ that much better:
|
||||
* Clay McClure (https://github.com/claymation)
|
||||
* Bruno Rocha (https://github.com/rochacbruno)
|
||||
* Norberto Leite (https://github.com/nleite)
|
||||
* Bob Cribbs (https://github.com/bocribbz)
|
||||
* Jay Shirley (https://github.com/jshirley)
|
||||
* David Bordeynik (https://github.com/DavidBord)
|
||||
* Axel Haustant (https://github.com/noirbizarre)
|
||||
* David Czarnecki (https://github.com/czarneckid)
|
||||
* Vyacheslav Murashkin (https://github.com/a4tunado)
|
||||
* André Ericson https://github.com/aericson)
|
||||
* Mikhail Moshnogorsky (https://github.com/mikhailmoshnogorsky)
|
||||
* Diego Berrocal (https://github.com/cestdiego)
|
||||
* Matthew Ellison (https://github.com/seglberg)
|
||||
* Jimmy Shen (https://github.com/jimmyshen)
|
||||
* J. Fernando Sánchez (https://github.com/balkian)
|
||||
* Michael Chase (https://github.com/rxsegrxup)
|
||||
* Eremeev Danil (https://github.com/elephanter)
|
||||
* Catstyle Lee (https://github.com/Catstyle)
|
||||
* Kiryl Yermakou (https://github.com/rma4ok)
|
||||
* Matthieu Rigal (https://github.com/MRigal)
|
||||
* Charanpal Dhanjal (https://github.com/charanpald)
|
||||
* Emmanuel Leblond (https://github.com/touilleMan)
|
||||
* Breeze.Kay (https://github.com/9nix00)
|
||||
* Vicki Donchenko (https://github.com/kivistein)
|
||||
|
||||
|
@@ -29,7 +29,10 @@ Style Guide
|
||||
-----------
|
||||
|
||||
MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_
|
||||
including 4 space indents and 79 character line limits.
|
||||
including 4 space indents. When possible we try to stick to 79 character line limits.
|
||||
However, screens got bigger and an ORM has a strong focus on readability and
|
||||
if it can help, we accept 119 as maximum line length, in a similar way as
|
||||
`django does <https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/coding-style/#python-style>`_
|
||||
|
||||
Testing
|
||||
-------
|
||||
@@ -38,14 +41,21 @@ All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_
|
||||
and any pull requests are automatically tested by Travis. Any pull requests
|
||||
without tests will take longer to be integrated and might be refused.
|
||||
|
||||
You may also submit a simple failing test as a PullRequest if you don't know
|
||||
how to fix it, it will be easier for other people to work on it and it may get
|
||||
fixed faster.
|
||||
|
||||
General Guidelines
|
||||
------------------
|
||||
|
||||
- Avoid backward breaking changes if at all possible.
|
||||
- Write inline documentation for new classes and methods.
|
||||
- Write tests and make sure they pass (make sure you have a mongod
|
||||
running on the default port, then execute ``python setup.py test``
|
||||
running on the default port, then execute ``python setup.py nosetests``
|
||||
from the cmd line to run the test suite).
|
||||
- Ensure tests pass on every Python and PyMongo versions.
|
||||
You can test on these versions locally by executing ``tox``
|
||||
- Add enhancements or problematic bug fixes to docs/changelog.rst
|
||||
- Add yourself to AUTHORS :)
|
||||
|
||||
Documentation
|
||||
|
38
README.rst
38
README.rst
@@ -8,10 +8,10 @@ MongoEngine
|
||||
|
||||
.. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master
|
||||
:target: http://travis-ci.org/MongoEngine/mongoengine
|
||||
|
||||
.. image:: https://coveralls.io/repos/MongoEngine/mongoengine/badge.png?branch=master
|
||||
|
||||
.. image:: https://coveralls.io/repos/MongoEngine/mongoengine/badge.png?branch=master
|
||||
:target: https://coveralls.io/r/MongoEngine/mongoengine?branch=master
|
||||
|
||||
|
||||
.. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.png
|
||||
:target: https://landscape.io/github/MongoEngine/mongoengine/master
|
||||
:alt: Code Health
|
||||
@@ -26,7 +26,9 @@ a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html
|
||||
|
||||
Installation
|
||||
============
|
||||
If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
||||
We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of
|
||||
`pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``.
|
||||
You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ and thus
|
||||
you can use ``easy_install -U mongoengine``. Otherwise, you can download the
|
||||
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
|
||||
setup.py install``.
|
||||
@@ -38,12 +40,11 @@ Dependencies
|
||||
|
||||
Optional Dependencies
|
||||
---------------------
|
||||
- **Django Integration:** Django>=1.4.0 for Python 2.x or PyPy and Django>=1.5.0 for Python 3.x
|
||||
- **Image Fields**: Pillow>=2.0.0 or PIL (not recommended since MongoEngine is tested with Pillow)
|
||||
- **Image Fields**: Pillow>=2.0.0
|
||||
- dateutil>=2.1.0
|
||||
|
||||
.. note
|
||||
MongoEngine always runs it's test suite against the latest patch version of each dependecy. e.g.: Django 1.6.5
|
||||
MongoEngine always runs it's test suite against the latest patch version of each dependecy. e.g.: PyMongo 3.0.1
|
||||
|
||||
Examples
|
||||
========
|
||||
@@ -82,7 +83,7 @@ Some simple examples of what MongoEngine code looks like::
|
||||
|
||||
>>> len(BlogPost.objects)
|
||||
2
|
||||
>>> len(HtmlPost.objects)
|
||||
>>> len(TextPost.objects)
|
||||
1
|
||||
>>> len(LinkPost.objects)
|
||||
1
|
||||
@@ -96,7 +97,26 @@ Some simple examples of what MongoEngine code looks like::
|
||||
Tests
|
||||
=====
|
||||
To run the test suite, ensure you are running a local instance of MongoDB on
|
||||
the standard port, and run: ``python setup.py test``.
|
||||
the standard port, and run: ``python setup.py nosetests``.
|
||||
|
||||
To run the test suite on every supported Python version and every supported PyMongo version,
|
||||
you can use ``tox``.
|
||||
tox and each supported Python version should be installed in your environment:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
# Install tox
|
||||
$ pip install tox
|
||||
# Run the test suites
|
||||
$ tox
|
||||
|
||||
If you wish to run one single or selected tests, use the nosetest convention. It will find the folder,
|
||||
eventually the file, go to the TestClass specified after the colon and eventually right to the single test.
|
||||
Also use the -s argument if you want to print out whatever or access pdb while testing.
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
$ python setup.py nosetests --tests tests/fields/fields.py:FieldTest.test_cls_field -s
|
||||
|
||||
Community
|
||||
=========
|
||||
|
2
docs/_themes/sphinx_rtd_theme/footer.html
vendored
2
docs/_themes/sphinx_rtd_theme/footer.html
vendored
@@ -2,7 +2,7 @@
|
||||
{% if next or prev %}
|
||||
<div class="rst-footer-buttons">
|
||||
{% if next %}
|
||||
<a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}"/>Next <span class="icon icon-circle-arrow-right"></span></a>
|
||||
<a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}">Next <span class="icon icon-circle-arrow-right"></span></a>
|
||||
{% endif %}
|
||||
{% if prev %}
|
||||
<a href="{{ prev.link|e }}" class="btn btn-neutral" title="{{ prev.title|striptags|e }}"><span class="icon icon-circle-arrow-left"></span> Previous</a>
|
||||
|
@@ -34,6 +34,9 @@ Documents
|
||||
.. autoclass:: mongoengine.ValidationError
|
||||
:members:
|
||||
|
||||
.. autoclass:: mongoengine.FieldDoesNotExist
|
||||
|
||||
|
||||
Context Managers
|
||||
================
|
||||
|
||||
@@ -79,6 +82,7 @@ Fields
|
||||
.. autoclass:: mongoengine.fields.GenericEmbeddedDocumentField
|
||||
.. autoclass:: mongoengine.fields.DynamicField
|
||||
.. autoclass:: mongoengine.fields.ListField
|
||||
.. autoclass:: mongoengine.fields.EmbeddedDocumentListField
|
||||
.. autoclass:: mongoengine.fields.SortedListField
|
||||
.. autoclass:: mongoengine.fields.DictField
|
||||
.. autoclass:: mongoengine.fields.MapField
|
||||
@@ -95,11 +99,29 @@ Fields
|
||||
.. autoclass:: mongoengine.fields.PointField
|
||||
.. autoclass:: mongoengine.fields.LineStringField
|
||||
.. autoclass:: mongoengine.fields.PolygonField
|
||||
.. autoclass:: mongoengine.fields.MultiPointField
|
||||
.. autoclass:: mongoengine.fields.MultiLineStringField
|
||||
.. autoclass:: mongoengine.fields.MultiPolygonField
|
||||
.. autoclass:: mongoengine.fields.GridFSError
|
||||
.. autoclass:: mongoengine.fields.GridFSProxy
|
||||
.. autoclass:: mongoengine.fields.ImageGridFsProxy
|
||||
.. autoclass:: mongoengine.fields.ImproperlyConfigured
|
||||
|
||||
Embedded Document Querying
|
||||
==========================
|
||||
|
||||
.. versionadded:: 0.9
|
||||
|
||||
Additional queries for Embedded Documents are available when using the
|
||||
:class:`~mongoengine.EmbeddedDocumentListField` to store a list of embedded
|
||||
documents.
|
||||
|
||||
A list of embedded documents is returned as a special list with the
|
||||
following methods:
|
||||
|
||||
.. autoclass:: mongoengine.base.datastructures.EmbeddedDocumentList
|
||||
:members:
|
||||
|
||||
Misc
|
||||
====
|
||||
|
||||
|
@@ -2,9 +2,64 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
Changes in 0.10.1 - DEV
|
||||
=======================
|
||||
|
||||
Changes in 0.9.X - DEV
|
||||
======================
|
||||
Changes in 0.10.0
|
||||
=================
|
||||
- Django support was removed and will be available as a separate extension. #958
|
||||
- Allow to load undeclared field with meta attribute 'strict': False #957
|
||||
- Support for PyMongo 3+ #946
|
||||
- Removed get_or_create() deprecated since 0.8.0. #300
|
||||
- Improve Document._created status when switch collection and db #1020
|
||||
- Queryset update doesn't go through field validation #453
|
||||
- Added support for specifying authentication source as option `authSource` in URI. #967
|
||||
- Fixed mark_as_changed to handle higher/lower level fields changed. #927
|
||||
- ListField of embedded docs doesn't set the _instance attribute when iterating over it #914
|
||||
- Support += and *= for ListField #595
|
||||
- Use sets for populating dbrefs to dereference
|
||||
- Fixed unpickled documents replacing the global field's list. #888
|
||||
- Fixed storage of microseconds in ComplexDateTimeField and unused separator option. #910
|
||||
- Don't send a "cls" option to ensureIndex (related to https://jira.mongodb.org/browse/SERVER-769)
|
||||
- Fix for updating sorting in SortedListField. #978
|
||||
- Added __ support to escape field name in fields lookup keywords that match operators names #949
|
||||
- Fix for issue where FileField deletion did not free space in GridFS.
|
||||
- No_dereference() not respected on embedded docs containing reference. #517
|
||||
- Document save raise an exception if save_condition fails #1005
|
||||
- Fixes some internal _id handling issue. #961
|
||||
- Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652
|
||||
- Capped collection multiple of 256. #1011
|
||||
- Added `BaseQuerySet.aggregate_sum` and `BaseQuerySet.aggregate_average` methods.
|
||||
- Fix for delete with write_concern {'w': 0}. #1008
|
||||
- Allow dynamic lookup for more than two parts. #882
|
||||
- Added support for min_distance on geo queries. #831
|
||||
- Allow to add custom metadata to fields #705
|
||||
|
||||
Changes in 0.9.0
|
||||
================
|
||||
- Update FileField when creating a new file #714
|
||||
- Added `EmbeddedDocumentListField` for Lists of Embedded Documents. #826
|
||||
- ComplexDateTimeField should fall back to None when null=True #864
|
||||
- Request Support for $min, $max Field update operators #863
|
||||
- `BaseDict` does not follow `setdefault` #866
|
||||
- Add support for $type operator # 766
|
||||
- Fix tests for pymongo 2.8+ #877
|
||||
- No module named 'django.utils.importlib' (Django dev) #872
|
||||
- Field Choices Now Accept Subclasses of Documents
|
||||
- Ensure Indexes before Each Save #812
|
||||
- Generate Unique Indices for Lists of EmbeddedDocuments #358
|
||||
- Sparse fields #515
|
||||
- write_concern not in params of Collection#remove #801
|
||||
- Better BaseDocument equality check when not saved #798
|
||||
- OperationError: Shard Keys are immutable. Tried to update id even though the document is not yet saved #771
|
||||
- with_limit_and_skip for count should default like in pymongo #759
|
||||
- Fix storing value of precision attribute in DecimalField #787
|
||||
- Set attribute to None does not work (at least for fields with default values) #734
|
||||
- Querying by a field defined in a subclass raises InvalidQueryError #744
|
||||
- Add Support For MongoDB 2.6.X's maxTimeMS #778
|
||||
- abstract shouldn't be inherited in EmbeddedDocument # 789
|
||||
- Allow specifying the '_cls' as a field for indexes #397
|
||||
- Stop ensure_indexes running on a secondaries unless connection is through mongos #746
|
||||
- Not overriding default values when loading a subset of fields #399
|
||||
- Saving document doesn't create new fields in existing collection #620
|
||||
- Added `Queryset.aggregate` wrapper to aggregation framework #703
|
||||
@@ -33,7 +88,7 @@ Changes in 0.9.X - DEV
|
||||
- Removing support for Django 1.4.x, pymongo 2.5.x, pymongo 2.6.x.
|
||||
- Removing support for Python < 2.6.6
|
||||
- Fixed $maxDistance location for geoJSON $near queries with MongoDB 2.6+ #664
|
||||
- QuerySet.modify() method to provide find_and_modify() like behaviour #677
|
||||
- QuerySet.modify() and Document.modify() methods to provide find_and_modify() like behaviour #677 #773
|
||||
- Added support for the using() method on a queryset #676
|
||||
- PYPY support #673
|
||||
- Connection pooling #674
|
||||
@@ -46,10 +101,20 @@ Changes in 0.9.X - DEV
|
||||
- Workaround a dateutil bug #608
|
||||
- Conditional save for atomic-style operations #511
|
||||
- Allow dynamic dictionary-style field access #559
|
||||
- Increase email field length to accommodate new TLDs #726
|
||||
- index_cls is ignored when deciding to set _cls as index prefix #733
|
||||
- Make 'db' argument to connection optional #737
|
||||
- Allow atomic update for the entire `DictField` #742
|
||||
- Added MultiPointField, MultiLineField, MultiPolygonField
|
||||
- Fix multiple connections aliases being rewritten #748
|
||||
- Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791
|
||||
- Make `in_bulk()` respect `no_dereference()` #775
|
||||
- Handle None from model __str__; Fixes #753 #754
|
||||
- _get_changed_fields fix for embedded documents with id field. #925
|
||||
|
||||
Changes in 0.8.7
|
||||
================
|
||||
- Calling reload on deleted / nonexistant documents raises DoesNotExist (#538)
|
||||
- Calling reload on deleted / nonexistent documents raises DoesNotExist (#538)
|
||||
- Stop ensure_indexes running on a secondaries (#555)
|
||||
- Fix circular import issue with django auth (#531) (#545)
|
||||
|
||||
@@ -62,7 +127,7 @@ Changes in 0.8.5
|
||||
- Fix multi level nested fields getting marked as changed (#523)
|
||||
- Django 1.6 login fix (#522) (#527)
|
||||
- Django 1.6 session fix (#509)
|
||||
- EmbeddedDocument._instance is now set when settng the attribute (#506)
|
||||
- EmbeddedDocument._instance is now set when setting the attribute (#506)
|
||||
- Fixed EmbeddedDocument with ReferenceField equality issue (#502)
|
||||
- Fixed GenericReferenceField serialization order (#499)
|
||||
- Fixed count and none bug (#498)
|
||||
@@ -152,7 +217,7 @@ Changes in 0.8.0
|
||||
- Added `get_next_value` preview for SequenceFields (#319)
|
||||
- Added no_sub_classes context manager and queryset helper (#312)
|
||||
- Querysets now utilises a local cache
|
||||
- Changed __len__ behavour in the queryset (#247, #311)
|
||||
- Changed __len__ behaviour in the queryset (#247, #311)
|
||||
- Fixed querying string versions of ObjectIds issue with ReferenceField (#307)
|
||||
- Added $setOnInsert support for upserts (#308)
|
||||
- Upserts now possible with just query parameters (#309)
|
||||
@@ -203,7 +268,7 @@ Changes in 0.8.0
|
||||
- Uses getlasterror to test created on updated saves (#163)
|
||||
- Fixed inheritance and unique index creation (#140)
|
||||
- Fixed reverse delete rule with inheritance (#197)
|
||||
- Fixed validation for GenericReferences which havent been dereferenced
|
||||
- Fixed validation for GenericReferences which haven't been dereferenced
|
||||
- Added switch_db context manager (#106)
|
||||
- Added switch_db method to document instances (#106)
|
||||
- Added no_dereference context manager (#82) (#61)
|
||||
@@ -285,11 +350,11 @@ Changes in 0.7.2
|
||||
- Update index spec generation so its not destructive (#113)
|
||||
|
||||
Changes in 0.7.1
|
||||
=================
|
||||
================
|
||||
- Fixed index spec inheritance (#111)
|
||||
|
||||
Changes in 0.7.0
|
||||
=================
|
||||
================
|
||||
- Updated queryset.delete so you can use with skip / limit (#107)
|
||||
- Updated index creation allows kwargs to be passed through refs (#104)
|
||||
- Fixed Q object merge edge case (#109)
|
||||
@@ -370,7 +435,7 @@ Changes in 0.6.12
|
||||
- Fixes error with _delta handling DBRefs
|
||||
|
||||
Changes in 0.6.11
|
||||
==================
|
||||
=================
|
||||
- Fixed inconsistency handling None values field attrs
|
||||
- Fixed map_field embedded db_field issue
|
||||
- Fixed .save() _delta issue with DbRefs
|
||||
@@ -450,7 +515,7 @@ Changes in 0.6.1
|
||||
- Fix for replicaSet connections
|
||||
|
||||
Changes in 0.6
|
||||
================
|
||||
==============
|
||||
|
||||
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
|
||||
- Added support for covered indexes when inheritance is off
|
||||
@@ -538,8 +603,8 @@ Changes in v0.5
|
||||
- Updated default collection naming convention
|
||||
- Added Document Mixin support
|
||||
- Fixed queryet __repr__ mid iteration
|
||||
- Added hint() support, so cantell Mongo the proper index to use for the query
|
||||
- Fixed issue with inconsitent setting of _cls breaking inherited referencing
|
||||
- Added hint() support, so can tell Mongo the proper index to use for the query
|
||||
- Fixed issue with inconsistent setting of _cls breaking inherited referencing
|
||||
- Added help_text and verbose_name to fields to help with some form libs
|
||||
- Updated item_frequencies to handle embedded document lookups
|
||||
- Added delta tracking now only sets / unsets explicitly changed fields
|
||||
|
180
docs/django.rst
180
docs/django.rst
@@ -2,176 +2,18 @@
|
||||
Django Support
|
||||
==============
|
||||
|
||||
.. note:: Updated to support Django 1.5
|
||||
|
||||
Connecting
|
||||
==========
|
||||
In your **settings.py** file, ignore the standard database settings (unless you
|
||||
also plan to use the ORM in your project), and instead call
|
||||
:func:`~mongoengine.connect` somewhere in the settings module.
|
||||
|
||||
.. note::
|
||||
If you are not using another Database backend you may need to add a dummy
|
||||
database backend to ``settings.py`` eg::
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.dummy'
|
||||
}
|
||||
}
|
||||
|
||||
Authentication
|
||||
==============
|
||||
MongoEngine includes a Django authentication backend, which uses MongoDB. The
|
||||
:class:`~mongoengine.django.auth.User` model is a MongoEngine
|
||||
:class:`~mongoengine.Document`, but implements most of the methods and
|
||||
attributes that the standard Django :class:`User` model does - so the two are
|
||||
moderately compatible. Using this backend will allow you to store users in
|
||||
MongoDB but still use many of the Django authentication infrastructure (such as
|
||||
the :func:`login_required` decorator and the :func:`authenticate` function). To
|
||||
enable the MongoEngine auth backend, add the following to your **settings.py**
|
||||
file::
|
||||
|
||||
AUTHENTICATION_BACKENDS = (
|
||||
'mongoengine.django.auth.MongoEngineBackend',
|
||||
)
|
||||
|
||||
The :mod:`~mongoengine.django.auth` module also contains a
|
||||
:func:`~mongoengine.django.auth.get_user` helper function, that takes a user's
|
||||
:attr:`id` and returns a :class:`~mongoengine.django.auth.User` object.
|
||||
|
||||
.. versionadded:: 0.1.3
|
||||
|
||||
Custom User model
|
||||
=================
|
||||
Django 1.5 introduced `Custom user Models
|
||||
<https://docs.djangoproject.com/en/dev/topics/auth/customizing/#auth-custom-user>`_
|
||||
which can be used as an alternative to the MongoEngine authentication backend.
|
||||
|
||||
The main advantage of this option is that other components relying on
|
||||
:mod:`django.contrib.auth` and supporting the new swappable user model are more
|
||||
likely to work. For example, you can use the ``createsuperuser`` management
|
||||
command as usual.
|
||||
|
||||
To enable the custom User model in Django, add ``'mongoengine.django.mongo_auth'``
|
||||
in your ``INSTALLED_APPS`` and set ``'mongo_auth.MongoUser'`` as the custom user
|
||||
user model to use. In your **settings.py** file you will have::
|
||||
|
||||
INSTALLED_APPS = (
|
||||
...
|
||||
'django.contrib.auth',
|
||||
'mongoengine.django.mongo_auth',
|
||||
...
|
||||
)
|
||||
|
||||
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
|
||||
|
||||
An additional ``MONGOENGINE_USER_DOCUMENT`` setting enables you to replace the
|
||||
:class:`~mongoengine.django.auth.User` class with another class of your choice::
|
||||
|
||||
MONGOENGINE_USER_DOCUMENT = 'mongoengine.django.auth.User'
|
||||
|
||||
The custom :class:`User` must be a :class:`~mongoengine.Document` class, but
|
||||
otherwise has the same requirements as a standard custom user model,
|
||||
as specified in the `Django Documentation
|
||||
<https://docs.djangoproject.com/en/dev/topics/auth/customizing/>`_.
|
||||
In particular, the custom class must define :attr:`USERNAME_FIELD` and
|
||||
:attr:`REQUIRED_FIELDS` attributes.
|
||||
|
||||
Sessions
|
||||
========
|
||||
Django allows the use of different backend stores for its sessions. MongoEngine
|
||||
provides a MongoDB-based session backend for Django, which allows you to use
|
||||
sessions in your Django application with just MongoDB. To enable the MongoEngine
|
||||
session backend, ensure that your settings module has
|
||||
``'django.contrib.sessions.middleware.SessionMiddleware'`` in the
|
||||
``MIDDLEWARE_CLASSES`` field and ``'django.contrib.sessions'`` in your
|
||||
``INSTALLED_APPS``. From there, all you need to do is add the following line
|
||||
into your settings module::
|
||||
|
||||
SESSION_ENGINE = 'mongoengine.django.sessions'
|
||||
SESSION_SERIALIZER = 'mongoengine.django.sessions.BSONSerializer'
|
||||
|
||||
Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` seconds, but doesn't delete cookie at sessions backend, so ``'mongoengine.django.sessions'`` supports `mongodb TTL
|
||||
<http://docs.mongodb.org/manual/tutorial/expire-data/>`_.
|
||||
|
||||
.. note:: ``SESSION_SERIALIZER`` is only necessary in Django 1.6 as the default
|
||||
serializer is based around JSON and doesn't know how to convert
|
||||
``bson.objectid.ObjectId`` instances to strings.
|
||||
|
||||
.. versionadded:: 0.2.1
|
||||
|
||||
Storage
|
||||
=======
|
||||
With MongoEngine's support for GridFS via the :class:`~mongoengine.fields.FileField`,
|
||||
it is useful to have a Django file storage backend that wraps this. The new
|
||||
storage module is called :class:`~mongoengine.django.storage.GridFSStorage`.
|
||||
Using it is very similar to using the default FileSystemStorage.::
|
||||
|
||||
from mongoengine.django.storage import GridFSStorage
|
||||
fs = GridFSStorage()
|
||||
|
||||
filename = fs.save('hello.txt', 'Hello, World!')
|
||||
|
||||
All of the `Django Storage API methods
|
||||
<http://docs.djangoproject.com/en/dev/ref/files/storage/>`_ have been
|
||||
implemented except :func:`path`. If the filename provided already exists, an
|
||||
underscore and a number (before # the file extension, if one exists) will be
|
||||
appended to the filename until the generated filename doesn't exist. The
|
||||
:func:`save` method will return the new filename.::
|
||||
|
||||
>>> fs.exists('hello.txt')
|
||||
True
|
||||
>>> fs.open('hello.txt').read()
|
||||
'Hello, World!'
|
||||
>>> fs.size('hello.txt')
|
||||
13
|
||||
>>> fs.url('hello.txt')
|
||||
'http://your_media_url/hello.txt'
|
||||
>>> fs.open('hello.txt').name
|
||||
'hello.txt'
|
||||
>>> fs.listdir()
|
||||
([], [u'hello.txt'])
|
||||
|
||||
All files will be saved and retrieved in GridFS via the :class:`FileDocument`
|
||||
document, allowing easy access to the files without the GridFSStorage
|
||||
backend.::
|
||||
|
||||
>>> from mongoengine.django.storage import FileDocument
|
||||
>>> FileDocument.objects()
|
||||
[<FileDocument: FileDocument object>]
|
||||
|
||||
.. versionadded:: 0.4
|
||||
|
||||
Shortcuts
|
||||
=========
|
||||
Inspired by the `Django shortcut get_object_or_404
|
||||
<https://docs.djangoproject.com/en/dev/topics/http/shortcuts/#get-object-or-404>`_,
|
||||
the :func:`~mongoengine.django.shortcuts.get_document_or_404` method returns
|
||||
a document or raises an Http404 exception if the document does not exist::
|
||||
|
||||
from mongoengine.django.shortcuts import get_document_or_404
|
||||
|
||||
admin_user = get_document_or_404(User, username='root')
|
||||
|
||||
The first argument may be a Document or QuerySet object. All other passed arguments
|
||||
and keyword arguments are used in the query::
|
||||
|
||||
foo_email = get_document_or_404(User.objects.only('email'), username='foo', is_active=True).email
|
||||
|
||||
.. note:: Like with :func:`get`, a MultipleObjectsReturned will be raised if more than one
|
||||
object is found.
|
||||
.. note:: Django support has been split from the main MongoEngine
|
||||
repository. The *legacy* Django extension may be found bundled with the
|
||||
0.9 release of MongoEngine.
|
||||
|
||||
|
||||
Also inspired by the `Django shortcut get_list_or_404
|
||||
<https://docs.djangoproject.com/en/dev/topics/http/shortcuts/#get-list-or-404>`_,
|
||||
the :func:`~mongoengine.django.shortcuts.get_list_or_404` method returns a list of
|
||||
documents or raises an Http404 exception if the list is empty::
|
||||
|
||||
from mongoengine.django.shortcuts import get_list_or_404
|
||||
|
||||
active_users = get_list_or_404(User, is_active=True)
|
||||
|
||||
The first argument may be a Document or QuerySet object. All other passed
|
||||
arguments and keyword arguments are used to filter the query.
|
||||
Help Wanted!
|
||||
------------
|
||||
|
||||
The MongoEngine team is looking for help contributing and maintaining a new
|
||||
Django extension for MongoEngine! If you have Django experience and would like
|
||||
to help contribute to the project, please get in touch on the
|
||||
`mailing list <http://groups.google.com/group/mongoengine-users>`_ or by
|
||||
simply contributing on
|
||||
`GitHub <https://github.com/MongoEngine/django-mongoengine>`_.
|
||||
|
@@ -23,21 +23,32 @@ arguments should be provided::
|
||||
|
||||
connect('project1', username='webapp', password='pwd123')
|
||||
|
||||
Uri style connections are also supported - just supply the uri as
|
||||
URI style connections are also supported -- just supply the URI as
|
||||
the :attr:`host` to
|
||||
:func:`~mongoengine.connect`::
|
||||
|
||||
connect('project1', host='mongodb://localhost/database_name')
|
||||
|
||||
Note that database name from uri has priority over name
|
||||
in ::func:`~mongoengine.connect`
|
||||
.. note:: Database, username and password from URI string overrides
|
||||
corresponding parameters in :func:`~mongoengine.connect`: ::
|
||||
|
||||
connect(
|
||||
name='test',
|
||||
username='user',
|
||||
password='12345',
|
||||
host='mongodb://admin:qwerty@localhost/production'
|
||||
)
|
||||
|
||||
will establish connection to ``production`` database using
|
||||
``admin`` username and ``qwerty`` password.
|
||||
|
||||
ReplicaSets
|
||||
===========
|
||||
|
||||
MongoEngine supports :class:`~pymongo.mongo_replica_set_client.MongoReplicaSetClient`.
|
||||
To use them, please use a URI style connection and provide the `replicaSet` name in the
|
||||
connection kwargs.
|
||||
MongoEngine supports
|
||||
:class:`~pymongo.mongo_replica_set_client.MongoReplicaSetClient`. To use them,
|
||||
please use an URI style connection and provide the ``replicaSet`` name
|
||||
in the connection kwargs.
|
||||
|
||||
Read preferences are supported through the connection or via individual
|
||||
queries by passing the read_preference ::
|
||||
@@ -77,36 +88,38 @@ to point across databases and collections. Below is an example schema, using
|
||||
meta = {"db_alias": "users-books-db"}
|
||||
|
||||
|
||||
Switch Database Context Manager
|
||||
===============================
|
||||
|
||||
Sometimes you may want to switch the database to query against for a class
|
||||
for example, archiving older data into a separate database for performance
|
||||
reasons.
|
||||
Context Managers
|
||||
================
|
||||
Sometimes you may want to switch the database or collection to query against
|
||||
for a class.
|
||||
For example, archiving older data into a separate database for performance
|
||||
reasons or writing functions that dynamically choose collections to write
|
||||
document to.
|
||||
|
||||
Switch Database
|
||||
---------------
|
||||
The :class:`~mongoengine.context_managers.switch_db` context manager allows
|
||||
you to change the database alias for a given class allowing quick and easy
|
||||
access to the same User document across databases::
|
||||
access the same User document across databases::
|
||||
|
||||
from mongoengine.context_managers import switch_db
|
||||
from mongoengine.context_managers import switch_db
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
meta = {"db_alias": "user-db"}
|
||||
meta = {"db_alias": "user-db"}
|
||||
|
||||
with switch_db(User, 'archive-user-db') as User:
|
||||
User(name="Ross").save() # Saves the 'archive-user-db'
|
||||
with switch_db(User, 'archive-user-db') as User:
|
||||
User(name="Ross").save() # Saves the 'archive-user-db'
|
||||
|
||||
.. note:: Make sure any aliases have been registered with
|
||||
:func:`~mongoengine.register_connection` before using the context manager.
|
||||
|
||||
There is also a switch collection context manager as well. The
|
||||
:class:`~mongoengine.context_managers.switch_collection` context manager allows
|
||||
you to change the collection for a given class allowing quick and easy
|
||||
access to the same Group document across collection::
|
||||
Switch Collection
|
||||
-----------------
|
||||
The :class:`~mongoengine.context_managers.switch_collection` context manager
|
||||
allows you to change the collection for a given class allowing quick and easy
|
||||
access the same Group document across collection::
|
||||
|
||||
from mongoengine.context_managers import switch_db
|
||||
from mongoengine.context_managers import switch_collection
|
||||
|
||||
class Group(Document):
|
||||
name = StringField()
|
||||
@@ -115,3 +128,9 @@ access to the same Group document across collection::
|
||||
|
||||
with switch_collection(Group, 'group2000') as Group:
|
||||
Group(name="hello Group 2000 collection!").save() # Saves in group2000 collection
|
||||
|
||||
|
||||
|
||||
.. note:: Make sure any aliases have been registered with
|
||||
:func:`~mongoengine.register_connection` or :func:`~mongoengine.connect`
|
||||
before using the context manager.
|
||||
|
@@ -4,7 +4,7 @@ Defining documents
|
||||
In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When
|
||||
working with relational databases, rows are stored in **tables**, which have a
|
||||
strict **schema** that the rows follow. MongoDB stores documents in
|
||||
**collections** rather than tables - the principal difference is that no schema
|
||||
**collections** rather than tables --- the principal difference is that no schema
|
||||
is enforced at a database level.
|
||||
|
||||
Defining a document's schema
|
||||
@@ -91,6 +91,12 @@ are as follows:
|
||||
* :class:`~mongoengine.fields.StringField`
|
||||
* :class:`~mongoengine.fields.URLField`
|
||||
* :class:`~mongoengine.fields.UUIDField`
|
||||
* :class:`~mongoengine.fields.PointField`
|
||||
* :class:`~mongoengine.fields.LineStringField`
|
||||
* :class:`~mongoengine.fields.PolygonField`
|
||||
* :class:`~mongoengine.fields.MultiPointField`
|
||||
* :class:`~mongoengine.fields.MultiLineStringField`
|
||||
* :class:`~mongoengine.fields.MultiPolygonField`
|
||||
|
||||
Field arguments
|
||||
---------------
|
||||
@@ -108,7 +114,7 @@ arguments can be set on all fields:
|
||||
:attr:`default` (Default: None)
|
||||
A value to use when no value is set for this field.
|
||||
|
||||
The definion of default parameters follow `the general rules on Python
|
||||
The definition of default parameters follow `the general rules on Python
|
||||
<http://docs.python.org/reference/compound_stmts.html#function-definitions>`__,
|
||||
which means that some care should be taken when dealing with default mutable objects
|
||||
(like in :class:`~mongoengine.fields.ListField` or :class:`~mongoengine.fields.DictField`)::
|
||||
@@ -140,6 +146,8 @@ arguments can be set on all fields:
|
||||
When True, use this field as a primary key for the collection. `DictField`
|
||||
and `EmbeddedDocuments` both support being the primary key for a document.
|
||||
|
||||
.. note:: If set, this field is also accessible through the `pk` field.
|
||||
|
||||
:attr:`choices` (Default: None)
|
||||
An iterable (e.g. a list or tuple) of choices to which the value of this
|
||||
field should be limited.
|
||||
@@ -165,15 +173,15 @@ arguments can be set on all fields:
|
||||
size = StringField(max_length=3, choices=SIZE)
|
||||
|
||||
:attr:`help_text` (Default: None)
|
||||
Optional help text to output with the field - used by form libraries
|
||||
Optional help text to output with the field -- used by form libraries
|
||||
|
||||
:attr:`verbose_name` (Default: None)
|
||||
Optional human-readable name for the field - used by form libraries
|
||||
Optional human-readable name for the field -- used by form libraries
|
||||
|
||||
|
||||
List fields
|
||||
-----------
|
||||
MongoDB allows the storage of lists of items. To add a list of items to a
|
||||
MongoDB allows storing lists of items. To add a list of items to a
|
||||
:class:`~mongoengine.Document`, use the :class:`~mongoengine.fields.ListField` field
|
||||
type. :class:`~mongoengine.fields.ListField` takes another field object as its first
|
||||
argument, which specifies which type elements may be stored within the list::
|
||||
@@ -307,12 +315,12 @@ reference with a delete rule specification. A delete rule is specified by
|
||||
supplying the :attr:`reverse_delete_rule` attributes on the
|
||||
:class:`ReferenceField` definition, like this::
|
||||
|
||||
class Employee(Document):
|
||||
class ProfilePage(Document):
|
||||
...
|
||||
profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY)
|
||||
employee = ReferenceField('Employee', reverse_delete_rule=mongoengine.CASCADE)
|
||||
|
||||
The declaration in this example means that when an :class:`Employee` object is
|
||||
removed, the :class:`ProfilePage` that belongs to that employee is removed as
|
||||
removed, the :class:`ProfilePage` that references that employee is removed as
|
||||
well. If a whole batch of employees is removed, all profile pages that are
|
||||
linked are removed as well.
|
||||
|
||||
@@ -328,7 +336,7 @@ Its value can take any of the following constants:
|
||||
Any object's fields still referring to the object being deleted are removed
|
||||
(using MongoDB's "unset" operation), effectively nullifying the relationship.
|
||||
:const:`mongoengine.CASCADE`
|
||||
Any object containing fields that are refererring to the object being deleted
|
||||
Any object containing fields that are referring to the object being deleted
|
||||
are deleted first.
|
||||
:const:`mongoengine.PULL`
|
||||
Removes the reference to the object (using MongoDB's "pull" operation)
|
||||
@@ -395,7 +403,7 @@ MongoEngine allows you to specify that a field should be unique across a
|
||||
collection by providing ``unique=True`` to a :class:`~mongoengine.fields.Field`\ 's
|
||||
constructor. If you try to save a document that has the same value for a unique
|
||||
field as a document that is already in the database, a
|
||||
:class:`~mongoengine.OperationError` will be raised. You may also specify
|
||||
:class:`~mongoengine.NotUniqueError` will be raised. You may also specify
|
||||
multi-field uniqueness constraints by using :attr:`unique_with`, which may be
|
||||
either a single field name, or a list or tuple of field names::
|
||||
|
||||
@@ -422,7 +430,7 @@ Document collections
|
||||
====================
|
||||
Document classes that inherit **directly** from :class:`~mongoengine.Document`
|
||||
will have their own **collection** in the database. The name of the collection
|
||||
is by default the name of the class, coverted to lowercase (so in the example
|
||||
is by default the name of the class, converted to lowercase (so in the example
|
||||
above, the collection would be called `page`). If you need to change the name
|
||||
of the collection (e.g. to use MongoEngine with an existing database), then
|
||||
create a class dictionary attribute called :attr:`meta` on your document, and
|
||||
@@ -439,8 +447,10 @@ A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying
|
||||
:attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary.
|
||||
:attr:`max_documents` is the maximum number of documents that is allowed to be
|
||||
stored in the collection, and :attr:`max_size` is the maximum size of the
|
||||
collection in bytes. If :attr:`max_size` is not specified and
|
||||
:attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB).
|
||||
collection in bytes. :attr:`max_size` is rounded up to the next multiple of 256
|
||||
by MongoDB internally and mongoengine before. Use also a multiple of 256 to
|
||||
avoid confusions. If :attr:`max_size` is not specified and
|
||||
:attr:`max_documents` is, :attr:`max_size` defaults to 10485760 bytes (10MB).
|
||||
The following example shows a :class:`Log` document that will be limited to
|
||||
1000 entries and 2MB of disk space::
|
||||
|
||||
@@ -457,16 +467,31 @@ You can specify indexes on collections to make querying faster. This is done
|
||||
by creating a list of index specifications called :attr:`indexes` in the
|
||||
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
|
||||
either be a single field name, a tuple containing multiple field names, or a
|
||||
dictionary containing a full index definition. A direction may be specified on
|
||||
fields by prefixing the field name with a **+** (for ascending) or a **-** sign
|
||||
(for descending). Note that direction only matters on multi-field indexes.
|
||||
Text indexes may be specified by prefixing the field name with a **$**. ::
|
||||
dictionary containing a full index definition.
|
||||
|
||||
A direction may be specified on fields by prefixing the field name with a
|
||||
**+** (for ascending) or a **-** sign (for descending). Note that direction
|
||||
only matters on multi-field indexes. Text indexes may be specified by prefixing
|
||||
the field name with a **$**. Hashed indexes may be specified by prefixing
|
||||
the field name with a **#**::
|
||||
|
||||
class Page(Document):
|
||||
category = IntField()
|
||||
title = StringField()
|
||||
rating = StringField()
|
||||
created = DateTimeField()
|
||||
meta = {
|
||||
'indexes': ['title', ('title', '-rating')]
|
||||
'indexes': [
|
||||
'title',
|
||||
'$title', # text index
|
||||
'#title', # hashed index
|
||||
('title', '-rating'),
|
||||
('category', '_cls'),
|
||||
{
|
||||
'fields': ['created'],
|
||||
'expireAfterSeconds': 3600
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
If a dictionary is passed then the following options are available:
|
||||
@@ -516,11 +541,14 @@ There are a few top level defaults for all indexes that can be set::
|
||||
:attr:`index_background` (Optional)
|
||||
Set the default value for if an index should be indexed in the background
|
||||
|
||||
:attr:`index_cls` (Optional)
|
||||
A way to turn off a specific index for _cls.
|
||||
|
||||
:attr:`index_drop_dups` (Optional)
|
||||
Set the default value for if an index should drop duplicates
|
||||
|
||||
:attr:`index_cls` (Optional)
|
||||
A way to turn off a specific index for _cls.
|
||||
.. note:: Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning
|
||||
and has no effect
|
||||
|
||||
|
||||
Compound Indexes and Indexing sub documents
|
||||
@@ -544,6 +572,9 @@ The following fields will explicitly add a "2dsphere" index:
|
||||
- :class:`~mongoengine.fields.PointField`
|
||||
- :class:`~mongoengine.fields.LineStringField`
|
||||
- :class:`~mongoengine.fields.PolygonField`
|
||||
- :class:`~mongoengine.fields.MultiPointField`
|
||||
- :class:`~mongoengine.fields.MultiLineStringField`
|
||||
- :class:`~mongoengine.fields.MultiPolygonField`
|
||||
|
||||
As "2dsphere" indexes can be part of a compound index, you may not want the
|
||||
automatic index but would prefer a compound index. In this example we turn off
|
||||
@@ -655,11 +686,11 @@ Shard keys
|
||||
==========
|
||||
|
||||
If your collection is sharded, then you need to specify the shard key as a tuple,
|
||||
using the :attr:`shard_key` attribute of :attr:`-mongoengine.Document.meta`.
|
||||
using the :attr:`shard_key` attribute of :attr:`~mongoengine.Document.meta`.
|
||||
This ensures that the shard key is sent with the query when calling the
|
||||
:meth:`~mongoengine.document.Document.save` or
|
||||
:meth:`~mongoengine.document.Document.update` method on an existing
|
||||
:class:`-mongoengine.Document` instance::
|
||||
:class:`~mongoengine.Document` instance::
|
||||
|
||||
class LogEntry(Document):
|
||||
machine = StringField()
|
||||
@@ -681,7 +712,7 @@ defined, you may subclass it and add any extra fields or methods you may need.
|
||||
As this is new class is not a direct subclass of
|
||||
:class:`~mongoengine.Document`, it will not be stored in its own collection; it
|
||||
will use the same collection as its superclass uses. This allows for more
|
||||
convenient and efficient retrieval of related documents - all you need do is
|
||||
convenient and efficient retrieval of related documents -- all you need do is
|
||||
set :attr:`allow_inheritance` to True in the :attr:`meta` data for a
|
||||
document.::
|
||||
|
||||
@@ -695,12 +726,12 @@ document.::
|
||||
class DatedPage(Page):
|
||||
date = DateTimeField()
|
||||
|
||||
.. note:: From 0.8 onwards you must declare :attr:`allow_inheritance` defaults
|
||||
.. note:: From 0.8 onwards :attr:`allow_inheritance` defaults
|
||||
to False, meaning you must set it to True to use inheritance.
|
||||
|
||||
Working with existing data
|
||||
--------------------------
|
||||
As MongoEngine no longer defaults to needing :attr:`_cls` you can quickly and
|
||||
As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and
|
||||
easily get working with existing data. Just define the document to match
|
||||
the expected schema in your database ::
|
||||
|
||||
@@ -723,7 +754,7 @@ Abstract classes
|
||||
|
||||
If you want to add some extra functionality to a group of Document classes but
|
||||
you don't need or want the overhead of inheritance you can use the
|
||||
:attr:`abstract` attribute of :attr:`-mongoengine.Document.meta`.
|
||||
:attr:`abstract` attribute of :attr:`~mongoengine.Document.meta`.
|
||||
This won't turn on :ref:`document-inheritance` but will allow you to keep your
|
||||
code DRY::
|
||||
|
||||
|
@@ -2,7 +2,7 @@
|
||||
Documents instances
|
||||
===================
|
||||
To create a new document object, create an instance of the relevant document
|
||||
class, providing values for its fields as its constructor keyword arguments.
|
||||
class, providing values for its fields as constructor keyword arguments.
|
||||
You may provide values for any of the fields on the document::
|
||||
|
||||
>>> page = Page(title="Test Page")
|
||||
@@ -32,11 +32,11 @@ already exist, then any changes will be updated atomically. For example::
|
||||
|
||||
Changes to documents are tracked and on the whole perform ``set`` operations.
|
||||
|
||||
* ``list_field.push(0)`` - *sets* the resulting list
|
||||
* ``del(list_field)`` - *unsets* whole list
|
||||
* ``list_field.push(0)`` --- *sets* the resulting list
|
||||
* ``del(list_field)`` --- *unsets* whole list
|
||||
|
||||
With lists its preferable to use ``Doc.update(push__list_field=0)`` as
|
||||
this stops the whole list being updated - stopping any race conditions.
|
||||
this stops the whole list being updated --- stopping any race conditions.
|
||||
|
||||
.. seealso::
|
||||
:ref:`guide-atomic-updates`
|
||||
@@ -74,7 +74,7 @@ Cascading Saves
|
||||
If your document contains :class:`~mongoengine.fields.ReferenceField` or
|
||||
:class:`~mongoengine.fields.GenericReferenceField` objects, then by default the
|
||||
:meth:`~mongoengine.Document.save` method will not save any changes to
|
||||
those objects. If you want all references to also be saved also, noting each
|
||||
those objects. If you want all references to be saved also, noting each
|
||||
save is a separate query, then passing :attr:`cascade` as True
|
||||
to the save method will cascade any saves.
|
||||
|
||||
@@ -113,12 +113,13 @@ you may still use :attr:`id` to access the primary key if you want::
|
||||
>>> bob.id == bob.email == 'bob@example.com'
|
||||
True
|
||||
|
||||
You can also access the document's "primary key" using the :attr:`pk` field; in
|
||||
is an alias to :attr:`id`::
|
||||
You can also access the document's "primary key" using the :attr:`pk` field,
|
||||
it's an alias to :attr:`id`::
|
||||
|
||||
>>> page = Page(title="Another Test Page")
|
||||
>>> page.save()
|
||||
>>> page.id == page.pk
|
||||
True
|
||||
|
||||
.. note::
|
||||
|
||||
|
@@ -17,7 +17,7 @@ fetch documents from the database::
|
||||
|
||||
As of MongoEngine 0.8 the querysets utilise a local cache. So iterating
|
||||
it multiple times will only cause a single query. If this is not the
|
||||
desired behavour you can call :class:`~mongoengine.QuerySet.no_cache`
|
||||
desired behaviour you can call :class:`~mongoengine.QuerySet.no_cache`
|
||||
(version **0.8.3+**) to return a non-caching queryset.
|
||||
|
||||
Filtering queries
|
||||
@@ -39,10 +39,18 @@ syntax::
|
||||
# been written by a user whose 'country' field is set to 'uk'
|
||||
uk_pages = Page.objects(author__country='uk')
|
||||
|
||||
.. note::
|
||||
|
||||
(version **0.9.1+**) if your field name is like mongodb operator name (for example
|
||||
type, lte, lt...) and you want to place it at the end of lookup keyword
|
||||
mongoengine automatically prepend $ to it. To avoid this use __ at the end of
|
||||
your lookup keyword. For example if your field name is ``type`` and you want to
|
||||
query by this field you must use ``.objects(user__type__="admin")`` instead of
|
||||
``.objects(user__type="admin")``
|
||||
|
||||
Query operators
|
||||
===============
|
||||
Operators other than equality may also be used in queries; just attach the
|
||||
Operators other than equality may also be used in queries --- just attach the
|
||||
operator name to a key with a double-underscore::
|
||||
|
||||
# Only find users whose age is 18 or less
|
||||
@@ -84,19 +92,20 @@ expressions:
|
||||
Geo queries
|
||||
-----------
|
||||
|
||||
There are a few special operators for performing geographical queries. The following
|
||||
were added in 0.8 for: :class:`~mongoengine.fields.PointField`,
|
||||
There are a few special operators for performing geographical queries.
|
||||
The following were added in MongoEngine 0.8 for
|
||||
:class:`~mongoengine.fields.PointField`,
|
||||
:class:`~mongoengine.fields.LineStringField` and
|
||||
:class:`~mongoengine.fields.PolygonField`:
|
||||
|
||||
* ``geo_within`` -- Check if a geometry is within a polygon. For ease of use
|
||||
it accepts either a geojson geometry or just the polygon coordinates eg::
|
||||
* ``geo_within`` -- check if a geometry is within a polygon. For ease of use
|
||||
it accepts either a geojson geometry or just the polygon coordinates eg::
|
||||
|
||||
loc.objects(point__geo_within=[[[40, 5], [40, 6], [41, 6], [40, 5]]])
|
||||
loc.objects(point__geo_within={"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
|
||||
|
||||
* ``geo_within_box`` - simplified geo_within searching with a box eg::
|
||||
* ``geo_within_box`` -- simplified geo_within searching with a box eg::
|
||||
|
||||
loc.objects(point__geo_within_box=[(-125.0, 35.0), (-100.0, 40.0)])
|
||||
loc.objects(point__geo_within_box=[<bottom left coordinates>, <upper right coordinates>])
|
||||
@@ -132,23 +141,22 @@ were added in 0.8 for: :class:`~mongoengine.fields.PointField`,
|
||||
loc.objects(poly__geo_intersects={"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]})
|
||||
|
||||
* ``near`` -- Find all the locations near a given point::
|
||||
* ``near`` -- find all the locations near a given point::
|
||||
|
||||
loc.objects(point__near=[40, 5])
|
||||
loc.objects(point__near={"type": "Point", "coordinates": [40, 5]})
|
||||
|
||||
|
||||
You can also set the maximum distance in meters as well::
|
||||
You can also set the maximum and/or the minimum distance in meters as well::
|
||||
|
||||
loc.objects(point__near=[40, 5], point__max_distance=1000)
|
||||
|
||||
loc.objects(point__near=[40, 5], point__min_distance=100)
|
||||
|
||||
The older 2D indexes are still supported with the
|
||||
:class:`~mongoengine.fields.GeoPointField`:
|
||||
|
||||
* ``within_distance`` -- provide a list containing a point and a maximum
|
||||
distance (e.g. [(41.342, -87.653), 5])
|
||||
* ``within_spherical_distance`` -- Same as above but using the spherical geo model
|
||||
* ``within_spherical_distance`` -- same as above but using the spherical geo model
|
||||
(e.g. [(41.342, -87.653), 5/earth_radius])
|
||||
* ``near`` -- order the documents by how close they are to a given point
|
||||
* ``near_sphere`` -- Same as above but using the spherical geo model
|
||||
@@ -161,7 +169,8 @@ The older 2D indexes are still supported with the
|
||||
|
||||
* ``max_distance`` -- can be added to your location queries to set a maximum
|
||||
distance.
|
||||
|
||||
* ``min_distance`` -- can be added to your location queries to set a minimum
|
||||
distance.
|
||||
|
||||
Querying lists
|
||||
--------------
|
||||
@@ -198,12 +207,14 @@ However, this doesn't map well to the syntax so you can also use a capital S ins
|
||||
|
||||
Post.objects(comments__by="joe").update(inc__comments__S__votes=1)
|
||||
|
||||
.. note:: Due to Mongo currently the $ operator only applies to the first matched item in the query.
|
||||
.. note::
|
||||
Due to :program:`Mongo`, currently the $ operator only applies to the
|
||||
first matched item in the query.
|
||||
|
||||
|
||||
Raw queries
|
||||
-----------
|
||||
It is possible to provide a raw PyMongo query as a query parameter, which will
|
||||
It is possible to provide a raw :mod:`PyMongo` query as a query parameter, which will
|
||||
be integrated directly into the query. This is done using the ``__raw__``
|
||||
keyword argument::
|
||||
|
||||
@@ -213,12 +224,12 @@ keyword argument::
|
||||
|
||||
Limiting and skipping results
|
||||
=============================
|
||||
Just as with traditional ORMs, you may limit the number of results returned, or
|
||||
Just as with traditional ORMs, you may limit the number of results returned or
|
||||
skip a number or results in you query.
|
||||
:meth:`~mongoengine.queryset.QuerySet.limit` and
|
||||
:meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on
|
||||
:class:`~mongoengine.queryset.QuerySet` objects, but the prefered syntax for
|
||||
achieving this is using array-slicing syntax::
|
||||
:class:`~mongoengine.queryset.QuerySet` objects, but the `array-slicing` syntax
|
||||
is preferred for achieving this::
|
||||
|
||||
# Only the first 5 people
|
||||
users = User.objects[:5]
|
||||
@@ -252,23 +263,17 @@ To retrieve a result that should be unique in the collection, use
|
||||
no document matches the query, and
|
||||
:class:`~mongoengine.queryset.MultipleObjectsReturned`
|
||||
if more than one document matched the query. These exceptions are merged into
|
||||
your document defintions eg: `MyDoc.DoesNotExist`
|
||||
your document definitions eg: `MyDoc.DoesNotExist`
|
||||
|
||||
A variation of this method exists,
|
||||
:meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new
|
||||
document with the query arguments if no documents match the query. An
|
||||
additional keyword argument, :attr:`defaults` may be provided, which will be
|
||||
used as default values for the new document, in the case that it should need
|
||||
to be created::
|
||||
|
||||
>>> a, created = User.objects.get_or_create(name='User A', defaults={'age': 30})
|
||||
>>> b, created = User.objects.get_or_create(name='User A', defaults={'age': 40})
|
||||
>>> a.name == b.name and a.age == b.age
|
||||
True
|
||||
A variation of this method, get_or_create() existed, but it was unsafe. It
|
||||
could not be made safe, because there are no transactions in mongoDB. Other
|
||||
approaches should be investigated, to ensure you don't accidentally duplicate
|
||||
data when using something similar to this method. Therefore it was deprecated
|
||||
in 0.8 and removed in 0.10.
|
||||
|
||||
Default Document queries
|
||||
========================
|
||||
By default, the objects :attr:`~mongoengine.Document.objects` attribute on a
|
||||
By default, the objects :attr:`~Document.objects` attribute on a
|
||||
document returns a :class:`~mongoengine.queryset.QuerySet` that doesn't filter
|
||||
the collection -- it returns all objects. This may be changed by defining a
|
||||
method on a document that modifies a queryset. The method should accept two
|
||||
@@ -311,7 +316,7 @@ Should you want to add custom methods for interacting with or filtering
|
||||
documents, extending the :class:`~mongoengine.queryset.QuerySet` class may be
|
||||
the way to go. To use a custom :class:`~mongoengine.queryset.QuerySet` class on
|
||||
a document, set ``queryset_class`` to the custom class in a
|
||||
:class:`~mongoengine.Document`\ s ``meta`` dictionary::
|
||||
:class:`~mongoengine.Document`'s ``meta`` dictionary::
|
||||
|
||||
class AwesomerQuerySet(QuerySet):
|
||||
|
||||
@@ -491,11 +496,14 @@ Documents may be updated atomically by using the
|
||||
:meth:`~mongoengine.queryset.QuerySet.update_one`,
|
||||
:meth:`~mongoengine.queryset.QuerySet.update` and
|
||||
:meth:`~mongoengine.queryset.QuerySet.modify` methods on a
|
||||
:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers"
|
||||
that you may use with these methods:
|
||||
:class:`~mongoengine.queryset.QuerySet` or
|
||||
:meth:`~mongoengine.Document.modify` and
|
||||
:meth:`~mongoengine.Document.save` (with :attr:`save_condition` argument) on a
|
||||
:class:`~mongoengine.Document`.
|
||||
There are several different "modifiers" that you may use with these methods:
|
||||
|
||||
* ``set`` -- set a particular value
|
||||
* ``unset`` -- delete a particular value (since MongoDB v1.3+)
|
||||
* ``unset`` -- delete a particular value (since MongoDB v1.3)
|
||||
* ``inc`` -- increment a value by a given amount
|
||||
* ``dec`` -- decrement a value by a given amount
|
||||
* ``push`` -- append a value to a list
|
||||
@@ -590,7 +598,7 @@ Some variables are made available in the scope of the Javascript function:
|
||||
|
||||
The following example demonstrates the intended usage of
|
||||
:meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums
|
||||
over a field on a document (this functionality is already available throught
|
||||
over a field on a document (this functionality is already available through
|
||||
:meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of
|
||||
example)::
|
||||
|
||||
|
@@ -35,25 +35,25 @@ Available signals include:
|
||||
:class:`~mongoengine.EmbeddedDocument` instance has been completed.
|
||||
|
||||
`pre_save`
|
||||
Called within :meth:`~mongoengine.document.Document.save` prior to performing
|
||||
Called within :meth:`~mongoengine.Document.save` prior to performing
|
||||
any actions.
|
||||
|
||||
`pre_save_post_validation`
|
||||
Called within :meth:`~mongoengine.document.Document.save` after validation
|
||||
Called within :meth:`~mongoengine.Document.save` after validation
|
||||
has taken place but before saving.
|
||||
|
||||
`post_save`
|
||||
Called within :meth:`~mongoengine.document.Document.save` after all actions
|
||||
Called within :meth:`~mongoengine.Document.save` after all actions
|
||||
(validation, insert/update, cascades, clearing dirty flags) have completed
|
||||
successfully. Passed the additional boolean keyword argument `created` to
|
||||
indicate if the save was an insert or an update.
|
||||
|
||||
`pre_delete`
|
||||
Called within :meth:`~mongoengine.document.Document.delete` prior to
|
||||
Called within :meth:`~mongoengine.Document.delete` prior to
|
||||
attempting the delete operation.
|
||||
|
||||
`post_delete`
|
||||
Called within :meth:`~mongoengine.document.Document.delete` upon successful
|
||||
Called within :meth:`~mongoengine.Document.delete` upon successful
|
||||
deletion of the record.
|
||||
|
||||
`pre_bulk_insert`
|
||||
@@ -145,7 +145,7 @@ cleaner looking while still allowing manual execution of the callback::
|
||||
ReferenceFields and Signals
|
||||
---------------------------
|
||||
|
||||
Currently `reverse_delete_rules` do not trigger signals on the other part of
|
||||
Currently `reverse_delete_rule` does not trigger signals on the other part of
|
||||
the relationship. If this is required you must manually handle the
|
||||
reverse deletion.
|
||||
|
||||
|
@@ -46,4 +46,6 @@ Next, start a text search using :attr:`QuerySet.search_text` method::
|
||||
Ordering by text score
|
||||
======================
|
||||
|
||||
::
|
||||
|
||||
objects = News.objects.search('mongo').order_by('$text_score')
|
||||
|
@@ -14,7 +14,7 @@ MongoDB. To install it, simply run
|
||||
MongoEngine.
|
||||
|
||||
:doc:`guide/index`
|
||||
The Full guide to MongoEngine - from modeling documents to storing files,
|
||||
The Full guide to MongoEngine --- from modeling documents to storing files,
|
||||
from querying for data to firing signals and *everything* between.
|
||||
|
||||
:doc:`apireference`
|
||||
|
@@ -65,7 +65,7 @@ which fields a :class:`User` may have, and what types of data they might store::
|
||||
first_name = StringField(max_length=50)
|
||||
last_name = StringField(max_length=50)
|
||||
|
||||
This looks similar to how a the structure of a table would be defined in a
|
||||
This looks similar to how the structure of a table would be defined in a
|
||||
regular ORM. The key difference is that this schema will never be passed on to
|
||||
MongoDB --- this will only be enforced at the application level, making future
|
||||
changes easy to manage. Also, the User documents will be stored in a
|
||||
|
@@ -2,10 +2,20 @@
|
||||
Upgrading
|
||||
#########
|
||||
|
||||
0.9.0
|
||||
*****
|
||||
|
||||
The 0.8.7 package on pypi was corrupted. If upgrading from 0.8.7 to 0.9.0 please follow: ::
|
||||
|
||||
pip uninstall pymongo
|
||||
pip uninstall mongoengine
|
||||
pip install pymongo==2.8
|
||||
pip install mongoengine
|
||||
|
||||
0.8.7
|
||||
*****
|
||||
|
||||
Calling reload on deleted / nonexistant documents now raises a DoesNotExist
|
||||
Calling reload on deleted / nonexistent documents now raises a DoesNotExist
|
||||
exception.
|
||||
|
||||
|
||||
@@ -263,7 +273,7 @@ update your code like so: ::
|
||||
[m for m in mammals] # This will return all carnivores
|
||||
|
||||
Len iterates the queryset
|
||||
--------------------------
|
||||
-------------------------
|
||||
|
||||
If you ever did `len(queryset)` it previously did a `count()` under the covers,
|
||||
this caused some unusual issues. As `len(queryset)` is most often used by
|
||||
|
@@ -10,12 +10,11 @@ import signals
|
||||
from signals import *
|
||||
from errors import *
|
||||
import errors
|
||||
import django
|
||||
|
||||
__all__ = (list(document.__all__) + fields.__all__ + connection.__all__ +
|
||||
list(queryset.__all__) + signals.__all__ + list(errors.__all__))
|
||||
|
||||
VERSION = (0, 8, 7)
|
||||
VERSION = (0, 10, 0)
|
||||
|
||||
|
||||
def get_version():
|
||||
|
@@ -1,9 +1,10 @@
|
||||
import weakref
|
||||
import functools
|
||||
import itertools
|
||||
from mongoengine.common import _import_class
|
||||
|
||||
__all__ = ("BaseDict", "BaseList")
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
|
||||
|
||||
__all__ = ("BaseDict", "BaseList", "EmbeddedDocumentList")
|
||||
|
||||
|
||||
class BaseDict(dict):
|
||||
@@ -20,7 +21,7 @@ class BaseDict(dict):
|
||||
if isinstance(instance, (Document, EmbeddedDocument)):
|
||||
self._instance = weakref.proxy(instance)
|
||||
self._name = name
|
||||
return super(BaseDict, self).__init__(dict_items)
|
||||
super(BaseDict, self).__init__(dict_items)
|
||||
|
||||
def __getitem__(self, key, *args, **kwargs):
|
||||
value = super(BaseDict, self).__getitem__(key)
|
||||
@@ -65,7 +66,7 @@ class BaseDict(dict):
|
||||
|
||||
def clear(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).clear(*args, **kwargs)
|
||||
return super(BaseDict, self).clear()
|
||||
|
||||
def pop(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
@@ -73,7 +74,11 @@ class BaseDict(dict):
|
||||
|
||||
def popitem(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).popitem(*args, **kwargs)
|
||||
return super(BaseDict, self).popitem()
|
||||
|
||||
def setdefault(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).setdefault(*args, **kwargs)
|
||||
|
||||
def update(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
@@ -102,7 +107,7 @@ class BaseList(list):
|
||||
if isinstance(instance, (Document, EmbeddedDocument)):
|
||||
self._instance = weakref.proxy(instance)
|
||||
self._name = name
|
||||
return super(BaseList, self).__init__(list_items)
|
||||
super(BaseList, self).__init__(list_items)
|
||||
|
||||
def __getitem__(self, key, *args, **kwargs):
|
||||
value = super(BaseList, self).__getitem__(key)
|
||||
@@ -120,6 +125,10 @@ class BaseList(list):
|
||||
value._instance = self._instance
|
||||
return value
|
||||
|
||||
def __iter__(self):
|
||||
for i in xrange(self.__len__()):
|
||||
yield self[i]
|
||||
|
||||
def __setitem__(self, key, value, *args, **kwargs):
|
||||
if isinstance(key, slice):
|
||||
self._mark_as_changed()
|
||||
@@ -151,6 +160,14 @@ class BaseList(list):
|
||||
self = state
|
||||
return self
|
||||
|
||||
def __iadd__(self, other):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__iadd__(other)
|
||||
|
||||
def __imul__(self, other):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__imul__(other)
|
||||
|
||||
def append(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).append(*args, **kwargs)
|
||||
@@ -173,7 +190,7 @@ class BaseList(list):
|
||||
|
||||
def reverse(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).reverse(*args, **kwargs)
|
||||
return super(BaseList, self).reverse()
|
||||
|
||||
def sort(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
@@ -187,29 +204,196 @@ class BaseList(list):
|
||||
self._instance._mark_as_changed(self._name)
|
||||
|
||||
|
||||
class EmbeddedDocumentList(BaseList):
|
||||
|
||||
@classmethod
|
||||
def __match_all(cls, i, kwargs):
|
||||
items = kwargs.items()
|
||||
return all([
|
||||
getattr(i, k) == v or str(getattr(i, k)) == v for k, v in items
|
||||
])
|
||||
|
||||
@classmethod
|
||||
def __only_matches(cls, obj, kwargs):
|
||||
if not kwargs:
|
||||
return obj
|
||||
return filter(lambda i: cls.__match_all(i, kwargs), obj)
|
||||
|
||||
def __init__(self, list_items, instance, name):
|
||||
super(EmbeddedDocumentList, self).__init__(list_items, instance, name)
|
||||
self._instance = instance
|
||||
|
||||
def filter(self, **kwargs):
|
||||
"""
|
||||
Filters the list by only including embedded documents with the
|
||||
given keyword arguments.
|
||||
|
||||
:param kwargs: The keyword arguments corresponding to the fields to
|
||||
filter on. *Multiple arguments are treated as if they are ANDed
|
||||
together.*
|
||||
:return: A new ``EmbeddedDocumentList`` containing the matching
|
||||
embedded documents.
|
||||
|
||||
Raises ``AttributeError`` if a given keyword is not a valid field for
|
||||
the embedded document class.
|
||||
"""
|
||||
values = self.__only_matches(self, kwargs)
|
||||
return EmbeddedDocumentList(values, self._instance, self._name)
|
||||
|
||||
def exclude(self, **kwargs):
|
||||
"""
|
||||
Filters the list by excluding embedded documents with the given
|
||||
keyword arguments.
|
||||
|
||||
:param kwargs: The keyword arguments corresponding to the fields to
|
||||
exclude on. *Multiple arguments are treated as if they are ANDed
|
||||
together.*
|
||||
:return: A new ``EmbeddedDocumentList`` containing the non-matching
|
||||
embedded documents.
|
||||
|
||||
Raises ``AttributeError`` if a given keyword is not a valid field for
|
||||
the embedded document class.
|
||||
"""
|
||||
exclude = self.__only_matches(self, kwargs)
|
||||
values = [item for item in self if item not in exclude]
|
||||
return EmbeddedDocumentList(values, self._instance, self._name)
|
||||
|
||||
def count(self):
|
||||
"""
|
||||
The number of embedded documents in the list.
|
||||
|
||||
:return: The length of the list, equivalent to the result of ``len()``.
|
||||
"""
|
||||
return len(self)
|
||||
|
||||
def get(self, **kwargs):
|
||||
"""
|
||||
Retrieves an embedded document determined by the given keyword
|
||||
arguments.
|
||||
|
||||
:param kwargs: The keyword arguments corresponding to the fields to
|
||||
search on. *Multiple arguments are treated as if they are ANDed
|
||||
together.*
|
||||
:return: The embedded document matched by the given keyword arguments.
|
||||
|
||||
Raises ``DoesNotExist`` if the arguments used to query an embedded
|
||||
document returns no results. ``MultipleObjectsReturned`` if more
|
||||
than one result is returned.
|
||||
"""
|
||||
values = self.__only_matches(self, kwargs)
|
||||
if len(values) == 0:
|
||||
raise DoesNotExist(
|
||||
"%s matching query does not exist." % self._name
|
||||
)
|
||||
elif len(values) > 1:
|
||||
raise MultipleObjectsReturned(
|
||||
"%d items returned, instead of 1" % len(values)
|
||||
)
|
||||
|
||||
return values[0]
|
||||
|
||||
def first(self):
|
||||
"""
|
||||
Returns the first embedded document in the list, or ``None`` if empty.
|
||||
"""
|
||||
if len(self) > 0:
|
||||
return self[0]
|
||||
|
||||
def create(self, **values):
|
||||
"""
|
||||
Creates a new embedded document and saves it to the database.
|
||||
|
||||
.. note::
|
||||
The embedded document changes are not automatically saved
|
||||
to the database after calling this method.
|
||||
|
||||
:param values: A dictionary of values for the embedded document.
|
||||
:return: The new embedded document instance.
|
||||
"""
|
||||
name = self._name
|
||||
EmbeddedClass = self._instance._fields[name].field.document_type_obj
|
||||
self._instance[self._name].append(EmbeddedClass(**values))
|
||||
|
||||
return self._instance[self._name][-1]
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""
|
||||
Saves the ancestor document.
|
||||
|
||||
:param args: Arguments passed up to the ancestor Document's save
|
||||
method.
|
||||
:param kwargs: Keyword arguments passed up to the ancestor Document's
|
||||
save method.
|
||||
"""
|
||||
self._instance.save(*args, **kwargs)
|
||||
|
||||
def delete(self):
|
||||
"""
|
||||
Deletes the embedded documents from the database.
|
||||
|
||||
.. note::
|
||||
The embedded document changes are not automatically saved
|
||||
to the database after calling this method.
|
||||
|
||||
:return: The number of entries deleted.
|
||||
"""
|
||||
values = list(self)
|
||||
for item in values:
|
||||
self._instance[self._name].remove(item)
|
||||
|
||||
return len(values)
|
||||
|
||||
def update(self, **update):
|
||||
"""
|
||||
Updates the embedded documents with the given update values.
|
||||
|
||||
.. note::
|
||||
The embedded document changes are not automatically saved
|
||||
to the database after calling this method.
|
||||
|
||||
:param update: A dictionary of update values to apply to each
|
||||
embedded document.
|
||||
:return: The number of entries updated.
|
||||
"""
|
||||
if len(update) == 0:
|
||||
return 0
|
||||
values = list(self)
|
||||
for item in values:
|
||||
for k, v in update.items():
|
||||
setattr(item, k, v)
|
||||
|
||||
return len(values)
|
||||
|
||||
|
||||
class StrictDict(object):
|
||||
__slots__ = ()
|
||||
_special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create'])
|
||||
_classes = {}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
for k,v in kwargs.iteritems():
|
||||
for k, v in kwargs.iteritems():
|
||||
setattr(self, k, v)
|
||||
|
||||
def __getitem__(self, key):
|
||||
key = '_reserved_' + key if key in self._special_fields else key
|
||||
try:
|
||||
return getattr(self, key)
|
||||
except AttributeError:
|
||||
raise KeyError(key)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
key = '_reserved_' + key if key in self._special_fields else key
|
||||
return setattr(self, key, value)
|
||||
|
||||
def __contains__(self, key):
|
||||
return hasattr(self, key)
|
||||
|
||||
def get(self, key, default=None):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def pop(self, key, default=None):
|
||||
v = self.get(key, default)
|
||||
try:
|
||||
@@ -217,19 +401,29 @@ class StrictDict(object):
|
||||
except AttributeError:
|
||||
pass
|
||||
return v
|
||||
|
||||
def iteritems(self):
|
||||
for key in self:
|
||||
yield key, self[key]
|
||||
|
||||
def items(self):
|
||||
return [(k, self[k]) for k in iter(self)]
|
||||
|
||||
def iterkeys(self):
|
||||
return iter(self)
|
||||
|
||||
def keys(self):
|
||||
return list(iter(self))
|
||||
|
||||
def __iter__(self):
|
||||
return (key for key in self.__slots__ if hasattr(self, key))
|
||||
|
||||
def __len__(self):
|
||||
return len(list(self.iteritems()))
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.items() == other.items()
|
||||
|
||||
def __neq__(self, other):
|
||||
return self.items() != other.items()
|
||||
|
||||
@@ -240,15 +434,18 @@ class StrictDict(object):
|
||||
if allowed_keys not in cls._classes:
|
||||
class SpecificStrictDict(cls):
|
||||
__slots__ = allowed_keys_tuple
|
||||
|
||||
def __repr__(self):
|
||||
return "{%s}" % ', '.join('"{0!s}": {0!r}'.format(k,v) for (k,v) in self.iteritems())
|
||||
return "{%s}" % ', '.join('"{0!s}": {0!r}'.format(k) for k in self.iterkeys())
|
||||
|
||||
cls._classes[allowed_keys] = SpecificStrictDict
|
||||
return cls._classes[allowed_keys]
|
||||
|
||||
|
||||
class SemiStrictDict(StrictDict):
|
||||
__slots__ = ('_extras')
|
||||
__slots__ = ('_extras', )
|
||||
_classes = {}
|
||||
|
||||
def __getattr__(self, attr):
|
||||
try:
|
||||
super(SemiStrictDict, self).__getattr__(attr)
|
||||
@@ -257,6 +454,7 @@ class SemiStrictDict(StrictDict):
|
||||
return self.__getattribute__('_extras')[attr]
|
||||
except KeyError as e:
|
||||
raise AttributeError(e)
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
try:
|
||||
super(SemiStrictDict, self).__setattr__(attr, value)
|
||||
|
@@ -12,11 +12,16 @@ from bson.son import SON
|
||||
from mongoengine import signals
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import (ValidationError, InvalidDocumentError,
|
||||
LookUpError)
|
||||
LookUpError, FieldDoesNotExist)
|
||||
from mongoengine.python_support import PY3, txt_type
|
||||
|
||||
from mongoengine.base.common import get_document, ALLOW_INHERITANCE
|
||||
from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict, SemiStrictDict
|
||||
from mongoengine.base.datastructures import (
|
||||
BaseDict,
|
||||
BaseList,
|
||||
EmbeddedDocumentList,
|
||||
StrictDict,
|
||||
SemiStrictDict
|
||||
)
|
||||
from mongoengine.base.fields import ComplexBaseField
|
||||
|
||||
__all__ = ('BaseDocument', 'NON_FIELD_ERRORS')
|
||||
@@ -26,7 +31,7 @@ NON_FIELD_ERRORS = '__all__'
|
||||
|
||||
class BaseDocument(object):
|
||||
__slots__ = ('_changed_fields', '_initialised', '_created', '_data',
|
||||
'_dynamic_fields', '_auto_id_field', '_db_field_map', '_cls', '__weakref__')
|
||||
'_dynamic_fields', '_auto_id_field', '_db_field_map', '__weakref__')
|
||||
|
||||
_dynamic = False
|
||||
_dynamic_lock = True
|
||||
@@ -54,20 +59,32 @@ class BaseDocument(object):
|
||||
raise TypeError(
|
||||
"Multiple values for keyword argument '" + name + "'")
|
||||
values[name] = value
|
||||
|
||||
__auto_convert = values.pop("__auto_convert", True)
|
||||
|
||||
# 399: set default values only to fields loaded from DB
|
||||
__only_fields = set(values.pop("__only_fields", values))
|
||||
|
||||
_created = values.pop("_created", True)
|
||||
|
||||
signals.pre_init.send(self.__class__, document=self, values=values)
|
||||
|
||||
# Check if there are undefined fields supplied to the constructor,
|
||||
# if so raise an Exception.
|
||||
if not self._dynamic and (self._meta.get('strict', True) or _created):
|
||||
for var in values.keys():
|
||||
if var not in self._fields.keys() + ['id', 'pk', '_cls', '_text_score']:
|
||||
msg = (
|
||||
"The field '{0}' does not exist on the document '{1}'"
|
||||
).format(var, self._class_name)
|
||||
raise FieldDoesNotExist(msg)
|
||||
|
||||
if self.STRICT and not self._dynamic:
|
||||
self._data = StrictDict.create(allowed_keys=self._fields_ordered)()
|
||||
else:
|
||||
self._data = SemiStrictDict.create(
|
||||
allowed_keys=self._fields_ordered)()
|
||||
|
||||
_created = values.pop("_created", True)
|
||||
self._data = {}
|
||||
self._dynamic_fields = SON()
|
||||
|
||||
@@ -78,6 +95,9 @@ class BaseDocument(object):
|
||||
value = getattr(self, key, None)
|
||||
setattr(self, key, value)
|
||||
|
||||
if "_cls" not in values:
|
||||
self._cls = self._class_name
|
||||
|
||||
# Set passed values after initialisation
|
||||
if self._dynamic:
|
||||
dynamic_data = {}
|
||||
@@ -129,7 +149,6 @@ class BaseDocument(object):
|
||||
# Handle dynamic data only if an initialised dynamic document
|
||||
if self._dynamic and not self._dynamic_lock:
|
||||
|
||||
field = None
|
||||
if not hasattr(self, name) and not name.startswith('_'):
|
||||
DynamicField = _import_class("DynamicField")
|
||||
field = DynamicField(db_field=name)
|
||||
@@ -162,8 +181,8 @@ class BaseDocument(object):
|
||||
except AttributeError:
|
||||
self__initialised = False
|
||||
# Check if the user has created a new instance of a class
|
||||
if (self._is_document and self__initialised
|
||||
and self__created and name == self._meta['id_field']):
|
||||
if (self._is_document and self__initialised and
|
||||
self__created and name == self._meta.get('id_field')):
|
||||
super(BaseDocument, self).__setattr__('_created', False)
|
||||
|
||||
super(BaseDocument, self).__setattr__(name, value)
|
||||
@@ -185,7 +204,12 @@ class BaseDocument(object):
|
||||
if k in data:
|
||||
setattr(self, k, data[k])
|
||||
if '_fields_ordered' in data:
|
||||
setattr(type(self), '_fields_ordered', data['_fields_ordered'])
|
||||
if self._dynamic:
|
||||
setattr(self, '_fields_ordered', data['_fields_ordered'])
|
||||
else:
|
||||
_super_fields_ordered = type(self)._fields_ordered
|
||||
setattr(self, '_fields_ordered', _super_fields_ordered)
|
||||
|
||||
dynamic_fields = data.get('_dynamic_fields') or SON()
|
||||
for k in dynamic_fields.keys():
|
||||
setattr(self, k, data["_data"].get(k))
|
||||
@@ -226,7 +250,7 @@ class BaseDocument(object):
|
||||
u = self.__str__()
|
||||
except (UnicodeEncodeError, UnicodeDecodeError):
|
||||
u = '[Bad Unicode data]'
|
||||
repr_type = type(u)
|
||||
repr_type = str if u is None else type(u)
|
||||
return repr_type('<%s: %s>' % (self.__class__.__name__, u))
|
||||
|
||||
def __str__(self):
|
||||
@@ -238,10 +262,12 @@ class BaseDocument(object):
|
||||
return txt_type('%s object' % self.__class__.__name__)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__) and hasattr(other, 'id'):
|
||||
if isinstance(other, self.__class__) and hasattr(other, 'id') and other.id is not None:
|
||||
return self.id == other.id
|
||||
if isinstance(other, DBRef):
|
||||
return self._get_collection_name() == other.collection and self.id == other.id
|
||||
if self.id is None:
|
||||
return self is other
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
@@ -264,10 +290,23 @@ class BaseDocument(object):
|
||||
"""
|
||||
pass
|
||||
|
||||
def to_mongo(self, use_db_field=True, fields=[]):
|
||||
def get_text_score(self):
|
||||
"""
|
||||
Get text score from text query
|
||||
"""
|
||||
|
||||
if '_text_score' not in self._data:
|
||||
raise InvalidDocumentError('This document is not originally built from a text query')
|
||||
|
||||
return self._data['_text_score']
|
||||
|
||||
def to_mongo(self, use_db_field=True, fields=None):
|
||||
"""
|
||||
Return as SON data ready for use with MongoDB.
|
||||
"""
|
||||
if not fields:
|
||||
fields = []
|
||||
|
||||
data = SON()
|
||||
data["_id"] = None
|
||||
data['_cls'] = self._class_name
|
||||
@@ -287,7 +326,7 @@ class BaseDocument(object):
|
||||
|
||||
if value is not None:
|
||||
|
||||
if isinstance(field, (EmbeddedDocumentField)):
|
||||
if isinstance(field, EmbeddedDocumentField):
|
||||
if fields:
|
||||
key = '%s.' % field_name
|
||||
embedded_fields = [
|
||||
@@ -376,22 +415,24 @@ class BaseDocument(object):
|
||||
|
||||
def to_json(self, *args, **kwargs):
|
||||
"""Converts a document to JSON.
|
||||
:param use_db_field: Set to True by default but enables the output of the json structure with the field names and not the mongodb store db_names in case of set to False
|
||||
:param use_db_field: Set to True by default but enables the output of the json structure with the field names
|
||||
and not the mongodb store db_names in case of set to False
|
||||
"""
|
||||
use_db_field = kwargs.pop('use_db_field') if kwargs.has_key(
|
||||
'use_db_field') else True
|
||||
return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs)
|
||||
use_db_field = kwargs.pop('use_db_field', True)
|
||||
return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json_data):
|
||||
def from_json(cls, json_data, created=False):
|
||||
"""Converts json data to an unsaved document instance"""
|
||||
return cls._from_son(json_util.loads(json_data))
|
||||
return cls._from_son(json_util.loads(json_data), created=created)
|
||||
|
||||
def __expand_dynamic_values(self, name, value):
|
||||
"""expand any dynamic values to their correct types / values"""
|
||||
if not isinstance(value, (dict, list, tuple)):
|
||||
return value
|
||||
|
||||
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
|
||||
|
||||
is_list = False
|
||||
if not hasattr(value, 'items'):
|
||||
is_list = True
|
||||
@@ -415,7 +456,10 @@ class BaseDocument(object):
|
||||
# Convert lists / values so we can watch for any changes on them
|
||||
if (isinstance(value, (list, tuple)) and
|
||||
not isinstance(value, BaseList)):
|
||||
value = BaseList(value, self, name)
|
||||
if issubclass(type(self), EmbeddedDocumentListField):
|
||||
value = EmbeddedDocumentList(value, self, name)
|
||||
else:
|
||||
value = BaseList(value, self, name)
|
||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||
value = BaseDict(value, self, name)
|
||||
|
||||
@@ -438,7 +482,19 @@ class BaseDocument(object):
|
||||
key = self._db_field_map.get(key, key)
|
||||
|
||||
if key not in self._changed_fields:
|
||||
self._changed_fields.append(key)
|
||||
levels, idx = key.split('.'), 1
|
||||
while idx <= len(levels):
|
||||
if '.'.join(levels[:idx]) in self._changed_fields:
|
||||
break
|
||||
idx += 1
|
||||
else:
|
||||
self._changed_fields.append(key)
|
||||
# remove lower level changed fields
|
||||
level = '.'.join(levels[:idx]) + '.'
|
||||
remove = self._changed_fields.remove
|
||||
for field in self._changed_fields:
|
||||
if field.startswith(level):
|
||||
remove(field)
|
||||
|
||||
def _clear_changed_fields(self):
|
||||
"""Using get_changed_fields iterate and remove any fields that are
|
||||
@@ -490,6 +546,7 @@ class BaseDocument(object):
|
||||
EmbeddedDocument = _import_class("EmbeddedDocument")
|
||||
DynamicEmbeddedDocument = _import_class("DynamicEmbeddedDocument")
|
||||
ReferenceField = _import_class("ReferenceField")
|
||||
SortedListField = _import_class("SortedListField")
|
||||
changed_fields = []
|
||||
changed_fields += getattr(self, '_changed_fields', [])
|
||||
|
||||
@@ -508,12 +565,11 @@ class BaseDocument(object):
|
||||
if hasattr(data, 'id'):
|
||||
if data.id in inspected:
|
||||
continue
|
||||
inspected.add(data.id)
|
||||
if isinstance(field, ReferenceField):
|
||||
continue
|
||||
elif (isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument))
|
||||
and db_field_name not in changed_fields):
|
||||
# Find all embedded fields that have been changed
|
||||
# Find all embedded fields that have been changed
|
||||
changed = data._get_changed_fields(inspected)
|
||||
changed_fields += ["%s%s" % (key, k) for k in changed if k]
|
||||
elif (isinstance(data, (list, tuple, dict)) and
|
||||
@@ -521,6 +577,12 @@ class BaseDocument(object):
|
||||
if (hasattr(field, 'field') and
|
||||
isinstance(field.field, ReferenceField)):
|
||||
continue
|
||||
elif isinstance(field, SortedListField) and field._ordering:
|
||||
# if ordering is affected whole list is changed
|
||||
if any(map(lambda d: field._ordering in d._changed_fields, data)):
|
||||
changed_fields.append(db_field_name)
|
||||
continue
|
||||
|
||||
self._nestable_types_changed_fields(
|
||||
changed_fields, key, data, inspected)
|
||||
return changed_fields
|
||||
@@ -558,18 +620,18 @@ class BaseDocument(object):
|
||||
else:
|
||||
set_data = doc
|
||||
if '_id' in set_data:
|
||||
del(set_data['_id'])
|
||||
del set_data['_id']
|
||||
|
||||
# Determine if any changed items were actually unset.
|
||||
for path, value in set_data.items():
|
||||
if value or isinstance(value, (numbers.Number, bool)):
|
||||
continue
|
||||
|
||||
# If we've set a value that ain't the default value dont unset it.
|
||||
# If we've set a value that ain't the default value don't unset it.
|
||||
default = None
|
||||
if (self._dynamic and len(parts) and parts[0] in
|
||||
self._dynamic_fields):
|
||||
del(set_data[path])
|
||||
del set_data[path]
|
||||
unset_data[path] = 1
|
||||
continue
|
||||
elif path in self._fields:
|
||||
@@ -603,20 +665,22 @@ class BaseDocument(object):
|
||||
if default != value:
|
||||
continue
|
||||
|
||||
del(set_data[path])
|
||||
del set_data[path]
|
||||
unset_data[path] = 1
|
||||
return set_data, unset_data
|
||||
|
||||
@classmethod
|
||||
def _get_collection_name(cls):
|
||||
"""Returns the collection name for this class.
|
||||
"""Returns the collection name for this class. None for abstract class
|
||||
"""
|
||||
return cls._meta.get('collection', None)
|
||||
|
||||
@classmethod
|
||||
def _from_son(cls, son, _auto_dereference=True, only_fields=[]):
|
||||
def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False):
|
||||
"""Create an instance of a Document (subclass) from a PyMongo SON.
|
||||
"""
|
||||
if not only_fields:
|
||||
only_fields = []
|
||||
|
||||
# get the class name from the document, falling back to the given
|
||||
# class if unavailable
|
||||
@@ -664,7 +728,7 @@ class BaseDocument(object):
|
||||
if cls.STRICT:
|
||||
data = dict((k, v)
|
||||
for k, v in data.iteritems() if k in cls._fields)
|
||||
obj = cls(__auto_convert=False, _created=False, __only_fields=only_fields, **data)
|
||||
obj = cls(__auto_convert=False, _created=created, __only_fields=only_fields, **data)
|
||||
obj._changed_fields = changed_fields
|
||||
if not _auto_dereference:
|
||||
obj._fields = fields
|
||||
@@ -687,7 +751,7 @@ class BaseDocument(object):
|
||||
|
||||
spec_fields = [v['fields']
|
||||
for k, v in enumerate(index_specs)]
|
||||
# Merge unqiue_indexes with existing specs
|
||||
# Merge unique_indexes with existing specs
|
||||
for k, v in enumerate(indices):
|
||||
if v['fields'] in spec_fields:
|
||||
index_specs[spec_fields.index(v['fields'])].update(v)
|
||||
@@ -717,7 +781,10 @@ class BaseDocument(object):
|
||||
allow_inheritance = cls._meta.get('allow_inheritance',
|
||||
ALLOW_INHERITANCE)
|
||||
include_cls = (allow_inheritance and not spec.get('sparse', False) and
|
||||
spec.get('cls', True))
|
||||
spec.get('cls', True) and '_cls' not in spec['fields'])
|
||||
|
||||
# 733: don't include cls if index_cls is False unless there is an explicit cls with the index
|
||||
include_cls = include_cls and (spec.get('cls', False) or cls._meta.get('index_cls', True))
|
||||
if "cls" in spec:
|
||||
spec.pop('cls')
|
||||
for key in spec['fields']:
|
||||
@@ -727,16 +794,25 @@ class BaseDocument(object):
|
||||
|
||||
# ASCENDING from +
|
||||
# DESCENDING from -
|
||||
# GEO2D from *
|
||||
# TEXT from $
|
||||
# HASHED from #
|
||||
# GEOSPHERE from (
|
||||
# GEOHAYSTACK from )
|
||||
# GEO2D from *
|
||||
direction = pymongo.ASCENDING
|
||||
if key.startswith("-"):
|
||||
direction = pymongo.DESCENDING
|
||||
elif key.startswith("*"):
|
||||
direction = pymongo.GEO2D
|
||||
elif key.startswith("$"):
|
||||
direction = pymongo.TEXT
|
||||
if key.startswith(("+", "-", "*", "$")):
|
||||
elif key.startswith("#"):
|
||||
direction = pymongo.HASHED
|
||||
elif key.startswith("("):
|
||||
direction = pymongo.GEOSPHERE
|
||||
elif key.startswith(")"):
|
||||
direction = pymongo.GEOHAYSTACK
|
||||
elif key.startswith("*"):
|
||||
direction = pymongo.GEO2D
|
||||
if key.startswith(("+", "-", "*", "$", "#", "(", ")")):
|
||||
key = key[1:]
|
||||
|
||||
# Use real field name, do it manually because we need field
|
||||
@@ -744,7 +820,6 @@ class BaseDocument(object):
|
||||
parts = key.split('.')
|
||||
if parts in (['pk'], ['id'], ['_id']):
|
||||
key = '_id'
|
||||
fields = []
|
||||
else:
|
||||
fields = cls._lookup_field(parts)
|
||||
parts = []
|
||||
@@ -759,7 +834,8 @@ class BaseDocument(object):
|
||||
index_list.append((key, direction))
|
||||
|
||||
# Don't add cls to a geo index
|
||||
if include_cls and direction is not pymongo.GEO2D:
|
||||
if include_cls and direction not in (
|
||||
pymongo.GEO2D, pymongo.GEOHAYSTACK, pymongo.GEOSPHERE):
|
||||
index_list.insert(0, ('_cls', 1))
|
||||
|
||||
if index_list:
|
||||
@@ -778,10 +854,9 @@ class BaseDocument(object):
|
||||
"""
|
||||
unique_indexes = []
|
||||
for field_name, field in cls._fields.items():
|
||||
sparse = False
|
||||
sparse = field.sparse
|
||||
# Generate a list of indexes needed by uniqueness constraints
|
||||
if field.unique:
|
||||
field.required = True
|
||||
unique_fields = [field.db_field]
|
||||
|
||||
# Add any unique_with fields to the back of the index spec
|
||||
@@ -809,6 +884,9 @@ class BaseDocument(object):
|
||||
index = {'fields': fields, 'unique': True, 'sparse': sparse}
|
||||
unique_indexes.append(index)
|
||||
|
||||
if field.__class__.__name__ == "ListField":
|
||||
field = field.field
|
||||
|
||||
# Grab any embedded document field unique indexes
|
||||
if (field.__class__.__name__ == "EmbeddedDocumentField" and
|
||||
field.document_type != cls):
|
||||
@@ -855,6 +933,7 @@ class BaseDocument(object):
|
||||
"""
|
||||
|
||||
ListField = _import_class("ListField")
|
||||
DynamicField = _import_class('DynamicField')
|
||||
|
||||
if not isinstance(parts, (list, tuple)):
|
||||
parts = [parts]
|
||||
@@ -864,7 +943,6 @@ class BaseDocument(object):
|
||||
for field_name in parts:
|
||||
# Handle ListField indexing:
|
||||
if field_name.isdigit() and isinstance(field, ListField):
|
||||
new_field = field.field
|
||||
fields.append(field_name)
|
||||
continue
|
||||
|
||||
@@ -876,8 +954,19 @@ class BaseDocument(object):
|
||||
if field_name in cls._fields:
|
||||
field = cls._fields[field_name]
|
||||
elif cls._dynamic:
|
||||
DynamicField = _import_class('DynamicField')
|
||||
field = DynamicField(db_field=field_name)
|
||||
elif cls._meta.get("allow_inheritance", False) or cls._meta.get("abstract", False):
|
||||
# 744: in case the field is defined in a subclass
|
||||
for subcls in cls.__subclasses__():
|
||||
try:
|
||||
field = subcls._lookup_field([field_name])[0]
|
||||
except LookUpError:
|
||||
continue
|
||||
|
||||
if field is not None:
|
||||
break
|
||||
else:
|
||||
raise LookUpError('Cannot resolve field "%s"' % field_name)
|
||||
else:
|
||||
raise LookUpError('Cannot resolve field "%s"'
|
||||
% field_name)
|
||||
@@ -889,21 +978,20 @@ class BaseDocument(object):
|
||||
'__'.join(parts))
|
||||
if hasattr(getattr(field, 'field', None), 'lookup_member'):
|
||||
new_field = field.field.lookup_member(field_name)
|
||||
else:
|
||||
# Look up subfield on the previous field
|
||||
new_field = field.lookup_member(field_name)
|
||||
if not new_field and isinstance(field, ComplexBaseField):
|
||||
if hasattr(field.field, 'document_type') and cls._dynamic \
|
||||
and field.field.document_type._dynamic:
|
||||
DynamicField = _import_class('DynamicField')
|
||||
new_field = DynamicField(db_field=field_name)
|
||||
else:
|
||||
fields.append(field_name)
|
||||
continue
|
||||
elif not new_field and hasattr(field, 'document_type') and cls._dynamic \
|
||||
and field.document_type._dynamic:
|
||||
DynamicField = _import_class('DynamicField')
|
||||
elif cls._dynamic and (isinstance(field, DynamicField) or
|
||||
getattr(getattr(field, 'document_type'), '_dynamic')):
|
||||
new_field = DynamicField(db_field=field_name)
|
||||
else:
|
||||
# Look up subfield on the previous field or raise
|
||||
try:
|
||||
new_field = field.lookup_member(field_name)
|
||||
except AttributeError:
|
||||
raise LookUpError('Cannot resolve subfield or operator {} '
|
||||
'on the field {}'.format(
|
||||
field_name, field.name))
|
||||
if not new_field and isinstance(field, ComplexBaseField):
|
||||
fields.append(field_name)
|
||||
continue
|
||||
elif not new_field:
|
||||
raise LookUpError('Cannot resolve field "%s"'
|
||||
% field_name)
|
||||
|
@@ -7,16 +7,21 @@ import pymongo
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import ValidationError
|
||||
|
||||
from mongoengine.base.common import ALLOW_INHERITANCE
|
||||
from mongoengine.base.datastructures import BaseDict, BaseList
|
||||
from mongoengine.base.datastructures import (
|
||||
BaseDict, BaseList, EmbeddedDocumentList
|
||||
)
|
||||
|
||||
__all__ = ("BaseField", "ComplexBaseField",
|
||||
"ObjectIdField", "GeoJsonBaseField")
|
||||
|
||||
|
||||
class BaseField(object):
|
||||
UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push',
|
||||
'push_all', 'pull', 'pull_all', 'add_to_set',
|
||||
'set_on_insert', 'min', 'max'])
|
||||
|
||||
|
||||
class BaseField(object):
|
||||
"""A base class for fields in a MongoDB document. Instances of this class
|
||||
may be added to subclasses of `Document` to define a document's schema.
|
||||
|
||||
@@ -37,7 +42,7 @@ class BaseField(object):
|
||||
def __init__(self, db_field=None, name=None, required=False, default=None,
|
||||
unique=False, unique_with=None, primary_key=False,
|
||||
validation=None, choices=None, verbose_name=None,
|
||||
help_text=None):
|
||||
help_text=None, null=False, sparse=False, custom_data=None):
|
||||
"""
|
||||
:param db_field: The database field to store this field in
|
||||
(defaults to the name of the field)
|
||||
@@ -60,6 +65,11 @@ class BaseField(object):
|
||||
model forms from the document model.
|
||||
:param help_text: (optional) The help text for this field and is often
|
||||
used when generating model forms from the document model.
|
||||
:param null: (optional) Is the field value can be null. If no and there is a default value
|
||||
then the default value is set
|
||||
:param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False`
|
||||
means that uniqueness won't be enforced for `None` values
|
||||
:param custom_data: (optional) Custom metadata for this field.
|
||||
"""
|
||||
self.db_field = (db_field or name) if not primary_key else '_id'
|
||||
|
||||
@@ -75,6 +85,10 @@ class BaseField(object):
|
||||
self.choices = choices
|
||||
self.verbose_name = verbose_name
|
||||
self.help_text = help_text
|
||||
self.null = null
|
||||
self.sparse = sparse
|
||||
self._owner_document = None
|
||||
self.custom_data = custom_data
|
||||
|
||||
# Adjust the appropriate creation counter, and save our local copy.
|
||||
if self.db_field == '_id':
|
||||
@@ -98,12 +112,15 @@ class BaseField(object):
|
||||
"""Descriptor for assigning a value to a field in a document.
|
||||
"""
|
||||
|
||||
# If setting to None and theres a default
|
||||
# If setting to None and there is a default
|
||||
# Then set the value to the default value
|
||||
if value is None and self.default is not None:
|
||||
value = self.default
|
||||
if callable(value):
|
||||
value = value()
|
||||
if value is None:
|
||||
if self.null:
|
||||
value = None
|
||||
elif self.default is not None:
|
||||
value = self.default
|
||||
if callable(value):
|
||||
value = value()
|
||||
|
||||
if instance._initialised:
|
||||
try:
|
||||
@@ -139,6 +156,8 @@ class BaseField(object):
|
||||
def prepare_query_value(self, op, value):
|
||||
"""Prepare a value that is being used in a query for PyMongo.
|
||||
"""
|
||||
if op in UPDATE_OPERATORS:
|
||||
self.validate(value)
|
||||
return value
|
||||
|
||||
def validate(self, value, clean=True):
|
||||
@@ -149,21 +168,23 @@ class BaseField(object):
|
||||
def _validate(self, value, **kwargs):
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
# check choices
|
||||
|
||||
# Check the Choices Constraint
|
||||
if self.choices:
|
||||
is_cls = isinstance(value, (Document, EmbeddedDocument))
|
||||
value_to_check = value.__class__ if is_cls else value
|
||||
err_msg = 'an instance' if is_cls else 'one'
|
||||
|
||||
choice_list = self.choices
|
||||
if isinstance(self.choices[0], (list, tuple)):
|
||||
option_keys = [k for k, v in self.choices]
|
||||
if value_to_check not in option_keys:
|
||||
msg = ('Value must be %s of %s' %
|
||||
(err_msg, unicode(option_keys)))
|
||||
self.error(msg)
|
||||
elif value_to_check not in self.choices:
|
||||
msg = ('Value must be %s of %s' %
|
||||
(err_msg, unicode(self.choices)))
|
||||
self.error(msg)
|
||||
choice_list = [k for k, v in self.choices]
|
||||
|
||||
# Choices which are other types of Documents
|
||||
if isinstance(value, (Document, EmbeddedDocument)):
|
||||
if not any(isinstance(value, c) for c in choice_list):
|
||||
self.error(
|
||||
'Value must be instance of %s' % unicode(choice_list)
|
||||
)
|
||||
# Choices which are types other than Documents
|
||||
elif value not in choice_list:
|
||||
self.error('Value must be one of %s' % unicode(choice_list))
|
||||
|
||||
# check validation argument
|
||||
if self.validation is not None:
|
||||
@@ -176,9 +197,19 @@ class BaseField(object):
|
||||
|
||||
self.validate(value, **kwargs)
|
||||
|
||||
@property
|
||||
def owner_document(self):
|
||||
return self._owner_document
|
||||
|
||||
def _set_owner_document(self, owner_document):
|
||||
self._owner_document = owner_document
|
||||
|
||||
@owner_document.setter
|
||||
def owner_document(self, owner_document):
|
||||
self._set_owner_document(owner_document)
|
||||
|
||||
|
||||
class ComplexBaseField(BaseField):
|
||||
|
||||
"""Handles complex fields, such as lists / dictionaries.
|
||||
|
||||
Allows for nesting of embedded documents inside complex types.
|
||||
@@ -199,6 +230,7 @@ class ComplexBaseField(BaseField):
|
||||
|
||||
ReferenceField = _import_class('ReferenceField')
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
|
||||
dereference = (self._auto_dereference and
|
||||
(self.field is None or isinstance(self.field,
|
||||
(GenericReferenceField, ReferenceField))))
|
||||
@@ -215,17 +247,20 @@ class ComplexBaseField(BaseField):
|
||||
value = super(ComplexBaseField, self).__get__(instance, owner)
|
||||
|
||||
# Convert lists / values so we can watch for any changes on them
|
||||
if (isinstance(value, (list, tuple)) and
|
||||
not isinstance(value, BaseList)):
|
||||
value = BaseList(value, instance, self.name)
|
||||
if isinstance(value, (list, tuple)):
|
||||
if (issubclass(type(self), EmbeddedDocumentListField) and
|
||||
not isinstance(value, EmbeddedDocumentList)):
|
||||
value = EmbeddedDocumentList(value, instance, self.name)
|
||||
elif not isinstance(value, BaseList):
|
||||
value = BaseList(value, instance, self.name)
|
||||
instance._data[self.name] = value
|
||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||
value = BaseDict(value, instance, self.name)
|
||||
instance._data[self.name] = value
|
||||
|
||||
if (self._auto_dereference and instance._initialised and
|
||||
isinstance(value, (BaseList, BaseDict))
|
||||
and not value._dereferenced):
|
||||
isinstance(value, (BaseList, BaseDict)) and
|
||||
not value._dereferenced):
|
||||
value = _dereference(
|
||||
value, max_depth=1, instance=instance, name=self.name
|
||||
)
|
||||
@@ -254,6 +289,7 @@ class ComplexBaseField(BaseField):
|
||||
return value
|
||||
|
||||
if self.field:
|
||||
self.field._auto_dereference = self._auto_dereference
|
||||
value_dict = dict([(key, self.field.to_python(item))
|
||||
for key, item in value.items()])
|
||||
else:
|
||||
@@ -291,8 +327,8 @@ class ComplexBaseField(BaseField):
|
||||
return GenericReferenceField().to_mongo(value)
|
||||
cls = value.__class__
|
||||
val = value.to_mongo()
|
||||
# If we its a document thats not inherited add _cls
|
||||
if (isinstance(value, EmbeddedDocument)):
|
||||
# If it's a document that is not inherited add _cls
|
||||
if isinstance(value, EmbeddedDocument):
|
||||
val['_cls'] = cls.__name__
|
||||
return val
|
||||
|
||||
@@ -331,8 +367,8 @@ class ComplexBaseField(BaseField):
|
||||
elif hasattr(v, 'to_mongo'):
|
||||
cls = v.__class__
|
||||
val = v.to_mongo()
|
||||
# If we its a document thats not inherited add _cls
|
||||
if (isinstance(v, (Document, EmbeddedDocument))):
|
||||
# If it's a document that is not inherited add _cls
|
||||
if isinstance(v, (Document, EmbeddedDocument)):
|
||||
val['_cls'] = cls.__name__
|
||||
value_dict[k] = val
|
||||
else:
|
||||
@@ -381,20 +417,17 @@ class ComplexBaseField(BaseField):
|
||||
self.field.owner_document = owner_document
|
||||
self._owner_document = owner_document
|
||||
|
||||
def _get_owner_document(self, owner_document):
|
||||
self._owner_document = owner_document
|
||||
|
||||
owner_document = property(_get_owner_document, _set_owner_document)
|
||||
|
||||
|
||||
class ObjectIdField(BaseField):
|
||||
|
||||
"""A field wrapper around MongoDB's ObjectIds.
|
||||
"""
|
||||
|
||||
def to_python(self, value):
|
||||
if not isinstance(value, ObjectId):
|
||||
value = ObjectId(value)
|
||||
try:
|
||||
if not isinstance(value, ObjectId):
|
||||
value = ObjectId(value)
|
||||
except:
|
||||
pass
|
||||
return value
|
||||
|
||||
def to_mongo(self, value):
|
||||
@@ -417,8 +450,8 @@ class ObjectIdField(BaseField):
|
||||
|
||||
|
||||
class GeoJsonBaseField(BaseField):
|
||||
|
||||
"""A geo json field storing a geojson style object.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
|
||||
@@ -427,8 +460,8 @@ class GeoJsonBaseField(BaseField):
|
||||
|
||||
def __init__(self, auto_index=True, *args, **kwargs):
|
||||
"""
|
||||
:param auto_index: Automatically create a "2dsphere" index. Defaults
|
||||
to `True`.
|
||||
:param bool auto_index: Automatically create a "2dsphere" index.\
|
||||
Defaults to `True`.
|
||||
"""
|
||||
self._name = "%sField" % self._type
|
||||
if not auto_index:
|
||||
@@ -457,7 +490,7 @@ class GeoJsonBaseField(BaseField):
|
||||
if error:
|
||||
self.error(error)
|
||||
|
||||
def _validate_polygon(self, value):
|
||||
def _validate_polygon(self, value, top_level=True):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'Polygons must contain list of linestrings'
|
||||
|
||||
@@ -475,7 +508,10 @@ class GeoJsonBaseField(BaseField):
|
||||
if error and error not in errors:
|
||||
errors.append(error)
|
||||
if errors:
|
||||
return "Invalid Polygon:\n%s" % ", ".join(errors)
|
||||
if top_level:
|
||||
return "Invalid Polygon:\n%s" % ", ".join(errors)
|
||||
else:
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_linestring(self, value, top_level=True):
|
||||
"""Validates a linestring"""
|
||||
@@ -509,6 +545,66 @@ class GeoJsonBaseField(BaseField):
|
||||
not isinstance(value[1], (float, int))):
|
||||
return "Both values (%s) in point must be float or int" % repr(value)
|
||||
|
||||
def _validate_multipoint(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'MultiPoint must be a list of Point'
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0]
|
||||
except:
|
||||
return "Invalid MultiPoint must contain at least one valid point"
|
||||
|
||||
errors = []
|
||||
for point in value:
|
||||
error = self._validate_point(point)
|
||||
if error and error not in errors:
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_multilinestring(self, value, top_level=True):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'MultiLineString must be a list of LineString'
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0]
|
||||
except:
|
||||
return "Invalid MultiLineString must contain at least one valid linestring"
|
||||
|
||||
errors = []
|
||||
for linestring in value:
|
||||
error = self._validate_linestring(linestring, False)
|
||||
if error and error not in errors:
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
if top_level:
|
||||
return "Invalid MultiLineString:\n%s" % ", ".join(errors)
|
||||
else:
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_multipolygon(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'MultiPolygon must be a list of Polygon'
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0][0]
|
||||
except:
|
||||
return "Invalid MultiPolygon must contain at least one valid Polygon"
|
||||
|
||||
errors = []
|
||||
for polygon in value:
|
||||
error = self._validate_polygon(polygon, False)
|
||||
if error and error not in errors:
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
return "Invalid MultiPolygon:\n%s" % ", ".join(errors)
|
||||
|
||||
def to_mongo(self, value):
|
||||
if isinstance(value, dict):
|
||||
return value
|
||||
|
@@ -1,13 +1,11 @@
|
||||
import warnings
|
||||
|
||||
import pymongo
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import InvalidDocumentError
|
||||
from mongoengine.python_support import PY3
|
||||
from mongoengine.queryset import (DO_NOTHING, DoesNotExist,
|
||||
MultipleObjectsReturned,
|
||||
QuerySet, QuerySetManager)
|
||||
QuerySetManager)
|
||||
|
||||
from mongoengine.base.common import _document_registry, ALLOW_INHERITANCE
|
||||
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
|
||||
@@ -16,7 +14,6 @@ __all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass')
|
||||
|
||||
|
||||
class DocumentMetaclass(type):
|
||||
|
||||
"""Metaclass for all documents.
|
||||
"""
|
||||
|
||||
@@ -46,6 +43,11 @@ class DocumentMetaclass(type):
|
||||
elif hasattr(base, '_meta'):
|
||||
meta.merge(base._meta)
|
||||
attrs['_meta'] = meta
|
||||
attrs['_meta']['abstract'] = False # 789: EmbeddedDocument shouldn't inherit abstract
|
||||
|
||||
if attrs['_meta'].get('allow_inheritance', ALLOW_INHERITANCE):
|
||||
StringField = _import_class('StringField')
|
||||
attrs['_cls'] = StringField()
|
||||
|
||||
# Handle document Fields
|
||||
|
||||
@@ -108,7 +110,7 @@ class DocumentMetaclass(type):
|
||||
for base in flattened_bases:
|
||||
if (not getattr(base, '_is_base_cls', True) and
|
||||
not getattr(base, '_meta', {}).get('abstract', True)):
|
||||
# Collate heirarchy for _cls and _subclasses
|
||||
# Collate hierarchy for _cls and _subclasses
|
||||
class_name.append(base.__name__)
|
||||
|
||||
if hasattr(base, '_meta'):
|
||||
@@ -141,7 +143,7 @@ class DocumentMetaclass(type):
|
||||
for base in document_bases:
|
||||
if _cls not in base._subclasses:
|
||||
base._subclasses += (_cls,)
|
||||
base._types = base._subclasses # TODO depreciate _types
|
||||
base._types = base._subclasses # TODO depreciate _types
|
||||
|
||||
(Document, EmbeddedDocument, DictField,
|
||||
CachedReferenceField) = cls._import_classes()
|
||||
@@ -171,7 +173,8 @@ class DocumentMetaclass(type):
|
||||
# Handle delete rules
|
||||
for field in new_class._fields.itervalues():
|
||||
f = field
|
||||
f.owner_document = new_class
|
||||
if f.owner_document is None:
|
||||
f.owner_document = new_class
|
||||
delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING)
|
||||
if isinstance(f, CachedReferenceField):
|
||||
|
||||
@@ -180,7 +183,7 @@ class DocumentMetaclass(type):
|
||||
"CachedReferenceFields is not allowed in EmbeddedDocuments")
|
||||
if not f.document_type:
|
||||
raise InvalidDocumentError(
|
||||
"Document is not avaiable to sync")
|
||||
"Document is not available to sync")
|
||||
|
||||
if f.auto_sync:
|
||||
f.start_listener()
|
||||
@@ -242,11 +245,10 @@ class DocumentMetaclass(type):
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
DictField = _import_class('DictField')
|
||||
CachedReferenceField = _import_class('CachedReferenceField')
|
||||
return (Document, EmbeddedDocument, DictField, CachedReferenceField)
|
||||
return Document, EmbeddedDocument, DictField, CachedReferenceField
|
||||
|
||||
|
||||
class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
"""Metaclass for top-level documents (i.e. documents that have their own
|
||||
collection in the database.
|
||||
"""
|
||||
@@ -256,7 +258,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
super_new = super(TopLevelDocumentMetaclass, cls).__new__
|
||||
|
||||
# Set default _meta data if base class, otherwise get user defined meta
|
||||
if (attrs.get('my_metaclass') == TopLevelDocumentMetaclass):
|
||||
if attrs.get('my_metaclass') == TopLevelDocumentMetaclass:
|
||||
# defaults
|
||||
attrs['_meta'] = {
|
||||
'abstract': True,
|
||||
@@ -275,7 +277,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
attrs['_meta'].update(attrs.get('meta', {}))
|
||||
else:
|
||||
attrs['_meta'] = attrs.get('meta', {})
|
||||
# Explictly set abstract to false unless set
|
||||
# Explicitly set abstract to false unless set
|
||||
attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False)
|
||||
attrs['_is_base_cls'] = False
|
||||
|
||||
@@ -290,7 +292,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
# Clean up top level meta
|
||||
if 'meta' in attrs:
|
||||
del(attrs['meta'])
|
||||
del attrs['meta']
|
||||
|
||||
# Find the parent document class
|
||||
parent_doc_cls = [b for b in flattened_bases
|
||||
@@ -299,11 +301,11 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
# Prevent classes setting collection different to their parents
|
||||
# If parent wasn't an abstract class
|
||||
if (parent_doc_cls and 'collection' in attrs.get('_meta', {})
|
||||
and not parent_doc_cls._meta.get('abstract', True)):
|
||||
if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and
|
||||
not parent_doc_cls._meta.get('abstract', True)):
|
||||
msg = "Trying to set a collection on a subclass (%s)" % name
|
||||
warnings.warn(msg, SyntaxWarning)
|
||||
del(attrs['_meta']['collection'])
|
||||
del attrs['_meta']['collection']
|
||||
|
||||
# Ensure abstract documents have abstract bases
|
||||
if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
|
||||
@@ -381,15 +383,17 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
new_class._auto_id_field = getattr(parent_doc_cls,
|
||||
'_auto_id_field', False)
|
||||
if not new_class._meta.get('id_field'):
|
||||
# After 0.10, find not existing names, instead of overwriting
|
||||
id_name, id_db_name = cls.get_auto_id_names(new_class)
|
||||
new_class._auto_id_field = True
|
||||
new_class._meta['id_field'] = 'id'
|
||||
new_class._fields['id'] = ObjectIdField(db_field='_id')
|
||||
new_class._fields['id'].name = 'id'
|
||||
new_class.id = new_class._fields['id']
|
||||
|
||||
# Prepend id field to _fields_ordered
|
||||
if 'id' in new_class._fields and 'id' not in new_class._fields_ordered:
|
||||
new_class._fields_ordered = ('id', ) + new_class._fields_ordered
|
||||
new_class._meta['id_field'] = id_name
|
||||
new_class._fields[id_name] = ObjectIdField(db_field=id_db_name)
|
||||
new_class._fields[id_name].name = id_name
|
||||
new_class.id = new_class._fields[id_name]
|
||||
new_class._db_field_map[id_name] = id_db_name
|
||||
new_class._reverse_db_field_map[id_db_name] = id_name
|
||||
# Prepend id field to _fields_ordered
|
||||
new_class._fields_ordered = (id_name, ) + new_class._fields_ordered
|
||||
|
||||
# Merge in exceptions with parent hierarchy
|
||||
exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned)
|
||||
@@ -404,9 +408,22 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
return new_class
|
||||
|
||||
@classmethod
|
||||
def get_auto_id_names(cls, new_class):
|
||||
id_name, id_db_name = ('id', '_id')
|
||||
if id_name not in new_class._fields and \
|
||||
id_db_name not in (v.db_field for v in new_class._fields.values()):
|
||||
return id_name, id_db_name
|
||||
id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0
|
||||
while id_name in new_class._fields or \
|
||||
id_db_name in (v.db_field for v in new_class._fields.values()):
|
||||
id_name = '{0}_{1}'.format(id_basename, i)
|
||||
id_db_name = '{0}_{1}'.format(id_db_basename, i)
|
||||
i += 1
|
||||
return id_name, id_db_name
|
||||
|
||||
|
||||
class MetaDict(dict):
|
||||
|
||||
"""Custom dictionary for meta classes.
|
||||
Handles the merging of set indexes
|
||||
"""
|
||||
@@ -421,6 +438,5 @@ class MetaDict(dict):
|
||||
|
||||
|
||||
class BasesTuple(tuple):
|
||||
|
||||
"""Special class to handle introspection of bases tuple in __new__"""
|
||||
pass
|
||||
|
@@ -1,4 +1,5 @@
|
||||
_class_registry_cache = {}
|
||||
_field_list_cache = []
|
||||
|
||||
|
||||
def _import_class(cls_name):
|
||||
@@ -20,13 +21,16 @@ def _import_class(cls_name):
|
||||
|
||||
doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument',
|
||||
'MapReduceDocument')
|
||||
field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField',
|
||||
'FileField', 'GenericReferenceField',
|
||||
'GenericEmbeddedDocumentField', 'GeoPointField',
|
||||
'PointField', 'LineStringField', 'ListField',
|
||||
'PolygonField', 'ReferenceField', 'StringField',
|
||||
'CachedReferenceField',
|
||||
'ComplexBaseField', 'GeoJsonBaseField')
|
||||
|
||||
# Field Classes
|
||||
if not _field_list_cache:
|
||||
from mongoengine.fields import __all__ as fields
|
||||
_field_list_cache.extend(fields)
|
||||
from mongoengine.base.fields import __all__ as fields
|
||||
_field_list_cache.extend(fields)
|
||||
|
||||
field_classes = _field_list_cache
|
||||
|
||||
queryset_classes = ('OperationError',)
|
||||
deref_classes = ('DeReference',)
|
||||
|
||||
|
@@ -1,12 +1,16 @@
|
||||
import pymongo
|
||||
from pymongo import MongoClient, MongoReplicaSetClient, uri_parser
|
||||
|
||||
from pymongo import MongoClient, ReadPreference, uri_parser
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
|
||||
__all__ = ['ConnectionError', 'connect', 'register_connection',
|
||||
'DEFAULT_CONNECTION_NAME']
|
||||
|
||||
|
||||
DEFAULT_CONNECTION_NAME = 'default'
|
||||
if IS_PYMONGO_3:
|
||||
READ_PREFERENCE = ReadPreference.PRIMARY
|
||||
else:
|
||||
from pymongo import MongoReplicaSetClient
|
||||
READ_PREFERENCE = False
|
||||
|
||||
|
||||
class ConnectionError(Exception):
|
||||
@@ -18,8 +22,8 @@ _connections = {}
|
||||
_dbs = {}
|
||||
|
||||
|
||||
def register_connection(alias, name, host=None, port=None,
|
||||
read_preference=False,
|
||||
def register_connection(alias, name=None, host=None, port=None,
|
||||
read_preference=READ_PREFERENCE,
|
||||
username=None, password=None, authentication_source=None,
|
||||
**kwargs):
|
||||
"""Add a connection.
|
||||
@@ -40,7 +44,7 @@ def register_connection(alias, name, host=None, port=None,
|
||||
global _connection_settings
|
||||
|
||||
conn_settings = {
|
||||
'name': name,
|
||||
'name': name or 'test',
|
||||
'host': host or 'localhost',
|
||||
'port': port or 27017,
|
||||
'read_preference': read_preference,
|
||||
@@ -58,8 +62,11 @@ def register_connection(alias, name, host=None, port=None,
|
||||
'password': uri_dict.get('password'),
|
||||
'read_preference': read_preference,
|
||||
})
|
||||
if "replicaSet" in conn_settings['host']:
|
||||
uri_options = uri_dict['options']
|
||||
if 'replicaset' in uri_options:
|
||||
conn_settings['replicaSet'] = True
|
||||
if 'authsource' in uri_options:
|
||||
conn_settings['authentication_source'] = uri_options['authsource']
|
||||
|
||||
# Deprecated parameters that should not be passed on
|
||||
kwargs.pop('slaves', None)
|
||||
@@ -107,17 +114,20 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
# Discard replicaSet if not base string
|
||||
if not isinstance(conn_settings['replicaSet'], basestring):
|
||||
conn_settings.pop('replicaSet', None)
|
||||
connection_class = MongoReplicaSetClient
|
||||
if not IS_PYMONGO_3:
|
||||
connection_class = MongoReplicaSetClient
|
||||
|
||||
try:
|
||||
connection = None
|
||||
connection_settings_iterator = ((alias, settings.copy()) for alias, settings in _connection_settings.iteritems())
|
||||
for alias, connection_settings in connection_settings_iterator:
|
||||
# check for shared connections
|
||||
connection_settings_iterator = (
|
||||
(db_alias, settings.copy()) for db_alias, settings in _connection_settings.iteritems())
|
||||
for db_alias, connection_settings in connection_settings_iterator:
|
||||
connection_settings.pop('name', None)
|
||||
connection_settings.pop('username', None)
|
||||
connection_settings.pop('password', None)
|
||||
if conn_settings == connection_settings and _connections.get(alias, None):
|
||||
connection = _connections[alias]
|
||||
if conn_settings == connection_settings and _connections.get(db_alias, None):
|
||||
connection = _connections[db_alias]
|
||||
break
|
||||
|
||||
_connections[alias] = connection if connection else connection_class(**conn_settings)
|
||||
@@ -144,7 +154,7 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
return _dbs[alias]
|
||||
|
||||
|
||||
def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||
def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||
"""Connect to the database specified by the 'db' argument.
|
||||
|
||||
Connection settings may be provided here as well if the database is not
|
||||
|
@@ -1,6 +1,9 @@
|
||||
from bson import DBRef, SON
|
||||
|
||||
from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document)
|
||||
from base import (
|
||||
BaseDict, BaseList, EmbeddedDocumentList,
|
||||
TopLevelDocumentMetaclass, get_document
|
||||
)
|
||||
from fields import (ReferenceField, ListField, DictField, MapField)
|
||||
from connection import get_db
|
||||
from queryset import QuerySet
|
||||
@@ -8,7 +11,6 @@ from document import Document, EmbeddedDocument
|
||||
|
||||
|
||||
class DeReference(object):
|
||||
|
||||
def __call__(self, items, max_depth=1, instance=None, name=None):
|
||||
"""
|
||||
Cheaply dereferences the items to a set depth.
|
||||
@@ -46,8 +48,8 @@ class DeReference(object):
|
||||
|
||||
if is_list and all([i.__class__ == doc_type for i in items]):
|
||||
return items
|
||||
elif not is_list and all([i.__class__ == doc_type
|
||||
for i in items.values()]):
|
||||
elif not is_list and all(
|
||||
[i.__class__ == doc_type for i in items.values()]):
|
||||
return items
|
||||
elif not field.dbref:
|
||||
if not hasattr(items, 'items'):
|
||||
@@ -98,25 +100,25 @@ class DeReference(object):
|
||||
if isinstance(item, (Document, EmbeddedDocument)):
|
||||
for field_name, field in item._fields.iteritems():
|
||||
v = item._data.get(field_name, None)
|
||||
if isinstance(v, (DBRef)):
|
||||
reference_map.setdefault(field.document_type, []).append(v.id)
|
||||
if isinstance(v, DBRef):
|
||||
reference_map.setdefault(field.document_type, set()).add(v.id)
|
||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||
reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id)
|
||||
reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
|
||||
references = self._find_references(v, depth)
|
||||
for key, refs in references.iteritems():
|
||||
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
||||
key = field_cls
|
||||
reference_map.setdefault(key, []).extend(refs)
|
||||
elif isinstance(item, (DBRef)):
|
||||
reference_map.setdefault(item.collection, []).append(item.id)
|
||||
reference_map.setdefault(key, set()).update(refs)
|
||||
elif isinstance(item, DBRef):
|
||||
reference_map.setdefault(item.collection, set()).add(item.id)
|
||||
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
||||
reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id)
|
||||
reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id)
|
||||
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
||||
references = self._find_references(item, depth - 1)
|
||||
for key, refs in references.iteritems():
|
||||
reference_map.setdefault(key, []).extend(refs)
|
||||
reference_map.setdefault(key, set()).update(refs)
|
||||
|
||||
return reference_map
|
||||
|
||||
@@ -125,21 +127,25 @@ class DeReference(object):
|
||||
"""
|
||||
object_map = {}
|
||||
for collection, dbrefs in self.reference_map.iteritems():
|
||||
keys = object_map.keys()
|
||||
refs = list(set([dbref for dbref in dbrefs if unicode(dbref).encode('utf-8') not in keys]))
|
||||
if hasattr(collection, 'objects'): # We have a document class for the refs
|
||||
col_name = collection._get_collection_name()
|
||||
refs = [dbref for dbref in dbrefs
|
||||
if (col_name, dbref) not in object_map]
|
||||
references = collection.objects.in_bulk(refs)
|
||||
for key, doc in references.iteritems():
|
||||
object_map[key] = doc
|
||||
object_map[(col_name, key)] = doc
|
||||
else: # Generic reference: use the refs data to convert to document
|
||||
if isinstance(doc_type, (ListField, DictField, MapField,)):
|
||||
continue
|
||||
|
||||
refs = [dbref for dbref in dbrefs
|
||||
if (collection, dbref) not in object_map]
|
||||
|
||||
if doc_type:
|
||||
references = doc_type._get_db()[collection].find({'_id': {'$in': refs}})
|
||||
for ref in references:
|
||||
doc = doc_type._from_son(ref)
|
||||
object_map[doc.id] = doc
|
||||
object_map[(collection, doc.id)] = doc
|
||||
else:
|
||||
references = get_db()[collection].find({'_id': {'$in': refs}})
|
||||
for ref in references:
|
||||
@@ -148,10 +154,10 @@ class DeReference(object):
|
||||
elif doc_type is None:
|
||||
doc = get_document(
|
||||
''.join(x.capitalize()
|
||||
for x in collection.split('_')))._from_son(ref)
|
||||
for x in collection.split('_')))._from_son(ref)
|
||||
else:
|
||||
doc = doc_type._from_son(ref)
|
||||
object_map[doc.id] = doc
|
||||
object_map[(collection, doc.id)] = doc
|
||||
return object_map
|
||||
|
||||
def _attach_objects(self, items, depth=0, instance=None, name=None):
|
||||
@@ -177,14 +183,22 @@ class DeReference(object):
|
||||
|
||||
if isinstance(items, (dict, SON)):
|
||||
if '_ref' in items:
|
||||
return self.object_map.get(items['_ref'].id, items)
|
||||
return self.object_map.get(
|
||||
(items['_ref'].collection, items['_ref'].id), items)
|
||||
elif '_cls' in items:
|
||||
doc = get_document(items['_cls'])._from_son(items)
|
||||
_cls = doc._data.pop('_cls', None)
|
||||
del items['_cls']
|
||||
doc._data = self._attach_objects(doc._data, depth, doc, None)
|
||||
if _cls is not None:
|
||||
doc._data['_cls'] = _cls
|
||||
return doc
|
||||
|
||||
if not hasattr(items, 'items'):
|
||||
is_list = True
|
||||
list_type = BaseList
|
||||
if isinstance(items, EmbeddedDocumentList):
|
||||
list_type = EmbeddedDocumentList
|
||||
as_tuple = isinstance(items, tuple)
|
||||
iterator = enumerate(items)
|
||||
data = []
|
||||
@@ -205,23 +219,24 @@ class DeReference(object):
|
||||
elif isinstance(v, (Document, EmbeddedDocument)):
|
||||
for field_name, field in v._fields.iteritems():
|
||||
v = data[k]._data.get(field_name, None)
|
||||
if isinstance(v, (DBRef)):
|
||||
data[k]._data[field_name] = self.object_map.get(v.id, v)
|
||||
if isinstance(v, DBRef):
|
||||
data[k]._data[field_name] = self.object_map.get(
|
||||
(v.collection, v.id), v)
|
||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||
data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v)
|
||||
elif isinstance(v, dict) and depth <= self.max_depth:
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
||||
elif isinstance(v, (list, tuple)) and depth <= self.max_depth:
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
||||
data[k]._data[field_name] = self.object_map.get(
|
||||
(v['_ref'].collection, v['_ref'].id), v)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
item_name = "{0}.{1}.{2}".format(name, k, field_name)
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
item_name = '%s.%s' % (name, k) if name else name
|
||||
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name)
|
||||
elif hasattr(v, 'id'):
|
||||
data[k] = self.object_map.get(v.id, v)
|
||||
data[k] = self.object_map.get((v.collection, v.id), v)
|
||||
|
||||
if instance and name:
|
||||
if is_list:
|
||||
return tuple(data) if as_tuple else BaseList(data, instance, name)
|
||||
return tuple(data) if as_tuple else list_type(data, instance, name)
|
||||
return BaseDict(data, instance, name)
|
||||
depth += 1
|
||||
return data
|
||||
|
@@ -1,412 +0,0 @@
|
||||
from mongoengine import *
|
||||
|
||||
from django.utils.encoding import smart_str
|
||||
from django.contrib.auth.models import _user_has_perm, _user_get_all_permissions, _user_has_module_perms
|
||||
from django.db import models
|
||||
from django.contrib.contenttypes.models import ContentTypeManager
|
||||
from django.contrib import auth
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from .utils import datetime_now
|
||||
|
||||
REDIRECT_FIELD_NAME = 'next'
|
||||
|
||||
try:
|
||||
from django.contrib.auth.hashers import check_password, make_password
|
||||
except ImportError:
|
||||
"""Handle older versions of Django"""
|
||||
from django.utils.hashcompat import md5_constructor, sha_constructor
|
||||
|
||||
def get_hexdigest(algorithm, salt, raw_password):
|
||||
raw_password, salt = smart_str(raw_password), smart_str(salt)
|
||||
if algorithm == 'md5':
|
||||
return md5_constructor(salt + raw_password).hexdigest()
|
||||
elif algorithm == 'sha1':
|
||||
return sha_constructor(salt + raw_password).hexdigest()
|
||||
raise ValueError('Got unknown password algorithm type in password')
|
||||
|
||||
def check_password(raw_password, password):
|
||||
algo, salt, hash = password.split('$')
|
||||
return hash == get_hexdigest(algo, salt, raw_password)
|
||||
|
||||
def make_password(raw_password):
|
||||
from random import random
|
||||
algo = 'sha1'
|
||||
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
|
||||
hash = get_hexdigest(algo, salt, raw_password)
|
||||
return '%s$%s$%s' % (algo, salt, hash)
|
||||
|
||||
|
||||
class ContentType(Document):
|
||||
name = StringField(max_length=100)
|
||||
app_label = StringField(max_length=100)
|
||||
model = StringField(max_length=100, verbose_name=_('python model class name'),
|
||||
unique_with='app_label')
|
||||
objects = ContentTypeManager()
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('content type')
|
||||
verbose_name_plural = _('content types')
|
||||
# db_table = 'django_content_type'
|
||||
# ordering = ('name',)
|
||||
# unique_together = (('app_label', 'model'),)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
def model_class(self):
|
||||
"Returns the Python model class for this type of content."
|
||||
from django.db import models
|
||||
return models.get_model(self.app_label, self.model)
|
||||
|
||||
def get_object_for_this_type(self, **kwargs):
|
||||
"""
|
||||
Returns an object of this type for the keyword arguments given.
|
||||
Basically, this is a proxy around this object_type's get_object() model
|
||||
method. The ObjectNotExist exception, if thrown, will not be caught,
|
||||
so code that calls this method should catch it.
|
||||
"""
|
||||
return self.model_class()._default_manager.using(self._state.db).get(**kwargs)
|
||||
|
||||
def natural_key(self):
|
||||
return (self.app_label, self.model)
|
||||
|
||||
|
||||
class SiteProfileNotAvailable(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class PermissionManager(models.Manager):
|
||||
def get_by_natural_key(self, codename, app_label, model):
|
||||
return self.get(
|
||||
codename=codename,
|
||||
content_type=ContentType.objects.get_by_natural_key(app_label, model)
|
||||
)
|
||||
|
||||
|
||||
class Permission(Document):
|
||||
"""The permissions system provides a way to assign permissions to specific
|
||||
users and groups of users.
|
||||
|
||||
The permission system is used by the Django admin site, but may also be
|
||||
useful in your own code. The Django admin site uses permissions as follows:
|
||||
|
||||
- The "add" permission limits the user's ability to view the "add"
|
||||
form and add an object.
|
||||
- The "change" permission limits a user's ability to view the change
|
||||
list, view the "change" form and change an object.
|
||||
- The "delete" permission limits the ability to delete an object.
|
||||
|
||||
Permissions are set globally per type of object, not per specific object
|
||||
instance. It is possible to say "Mary may change news stories," but it's
|
||||
not currently possible to say "Mary may change news stories, but only the
|
||||
ones she created herself" or "Mary may only change news stories that have
|
||||
a certain status or publication date."
|
||||
|
||||
Three basic permissions -- add, change and delete -- are automatically
|
||||
created for each Django model.
|
||||
"""
|
||||
name = StringField(max_length=50, verbose_name=_('username'))
|
||||
content_type = ReferenceField(ContentType)
|
||||
codename = StringField(max_length=100, verbose_name=_('codename'))
|
||||
# FIXME: don't access field of the other class
|
||||
# unique_with=['content_type__app_label', 'content_type__model'])
|
||||
|
||||
objects = PermissionManager()
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('permission')
|
||||
verbose_name_plural = _('permissions')
|
||||
# unique_together = (('content_type', 'codename'),)
|
||||
# ordering = ('content_type__app_label', 'content_type__model', 'codename')
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s | %s | %s" % (
|
||||
unicode(self.content_type.app_label),
|
||||
unicode(self.content_type),
|
||||
unicode(self.name))
|
||||
|
||||
def natural_key(self):
|
||||
return (self.codename,) + self.content_type.natural_key()
|
||||
natural_key.dependencies = ['contenttypes.contenttype']
|
||||
|
||||
|
||||
class Group(Document):
|
||||
"""Groups are a generic way of categorizing users to apply permissions,
|
||||
or some other label, to those users. A user can belong to any number of
|
||||
groups.
|
||||
|
||||
A user in a group automatically has all the permissions granted to that
|
||||
group. For example, if the group Site editors has the permission
|
||||
can_edit_home_page, any user in that group will have that permission.
|
||||
|
||||
Beyond permissions, groups are a convenient way to categorize users to
|
||||
apply some label, or extended functionality, to them. For example, you
|
||||
could create a group 'Special users', and you could write code that would
|
||||
do special things to those users -- such as giving them access to a
|
||||
members-only portion of your site, or sending them members-only
|
||||
e-mail messages.
|
||||
"""
|
||||
name = StringField(max_length=80, unique=True, verbose_name=_('name'))
|
||||
permissions = ListField(ReferenceField(Permission, verbose_name=_('permissions'), required=False))
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('group')
|
||||
verbose_name_plural = _('groups')
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class UserManager(models.Manager):
|
||||
def create_user(self, username, email, password=None):
|
||||
"""
|
||||
Creates and saves a User with the given username, e-mail and password.
|
||||
"""
|
||||
now = datetime_now()
|
||||
|
||||
# Normalize the address by lowercasing the domain part of the email
|
||||
# address.
|
||||
try:
|
||||
email_name, domain_part = email.strip().split('@', 1)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
email = '@'.join([email_name, domain_part.lower()])
|
||||
|
||||
user = self.model(username=username, email=email, is_staff=False,
|
||||
is_active=True, is_superuser=False, last_login=now,
|
||||
date_joined=now)
|
||||
|
||||
user.set_password(password)
|
||||
user.save(using=self._db)
|
||||
return user
|
||||
|
||||
def create_superuser(self, username, email, password):
|
||||
u = self.create_user(username, email, password)
|
||||
u.is_staff = True
|
||||
u.is_active = True
|
||||
u.is_superuser = True
|
||||
u.save(using=self._db)
|
||||
return u
|
||||
|
||||
def make_random_password(self, length=10, allowed_chars='abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789'):
|
||||
"Generates a random password with the given length and given allowed_chars"
|
||||
# Note that default value of allowed_chars does not have "I" or letters
|
||||
# that look like it -- just to avoid confusion.
|
||||
from random import choice
|
||||
return ''.join([choice(allowed_chars) for i in range(length)])
|
||||
|
||||
|
||||
class User(Document):
|
||||
"""A User document that aims to mirror most of the API specified by Django
|
||||
at http://docs.djangoproject.com/en/dev/topics/auth/#users
|
||||
"""
|
||||
username = StringField(max_length=30, required=True,
|
||||
verbose_name=_('username'),
|
||||
help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters"))
|
||||
|
||||
first_name = StringField(max_length=30,
|
||||
verbose_name=_('first name'))
|
||||
|
||||
last_name = StringField(max_length=30,
|
||||
verbose_name=_('last name'))
|
||||
email = EmailField(verbose_name=_('e-mail address'))
|
||||
password = StringField(max_length=128,
|
||||
verbose_name=_('password'),
|
||||
help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
|
||||
is_staff = BooleanField(default=False,
|
||||
verbose_name=_('staff status'),
|
||||
help_text=_("Designates whether the user can log into this admin site."))
|
||||
is_active = BooleanField(default=True,
|
||||
verbose_name=_('active'),
|
||||
help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts."))
|
||||
is_superuser = BooleanField(default=False,
|
||||
verbose_name=_('superuser status'),
|
||||
help_text=_("Designates that this user has all permissions without explicitly assigning them."))
|
||||
last_login = DateTimeField(default=datetime_now,
|
||||
verbose_name=_('last login'))
|
||||
date_joined = DateTimeField(default=datetime_now,
|
||||
verbose_name=_('date joined'))
|
||||
|
||||
user_permissions = ListField(ReferenceField(Permission), verbose_name=_('user permissions'),
|
||||
help_text=_('Permissions for the user.'))
|
||||
|
||||
USERNAME_FIELD = 'username'
|
||||
REQUIRED_FIELDS = ['email']
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'indexes': [
|
||||
{'fields': ['username'], 'unique': True, 'sparse': True}
|
||||
]
|
||||
}
|
||||
|
||||
def __unicode__(self):
|
||||
return self.username
|
||||
|
||||
def get_full_name(self):
|
||||
"""Returns the users first and last names, separated by a space.
|
||||
"""
|
||||
full_name = u'%s %s' % (self.first_name or '', self.last_name or '')
|
||||
return full_name.strip()
|
||||
|
||||
def is_anonymous(self):
|
||||
return False
|
||||
|
||||
def is_authenticated(self):
|
||||
return True
|
||||
|
||||
def set_password(self, raw_password):
|
||||
"""Sets the user's password - always use this rather than directly
|
||||
assigning to :attr:`~mongoengine.django.auth.User.password` as the
|
||||
password is hashed before storage.
|
||||
"""
|
||||
self.password = make_password(raw_password)
|
||||
self.save()
|
||||
return self
|
||||
|
||||
def check_password(self, raw_password):
|
||||
"""Checks the user's password against a provided password - always use
|
||||
this rather than directly comparing to
|
||||
:attr:`~mongoengine.django.auth.User.password` as the password is
|
||||
hashed before storage.
|
||||
"""
|
||||
return check_password(raw_password, self.password)
|
||||
|
||||
@classmethod
|
||||
def create_user(cls, username, password, email=None):
|
||||
"""Create (and save) a new user with the given username, password and
|
||||
email address.
|
||||
"""
|
||||
now = datetime_now()
|
||||
|
||||
# Normalize the address by lowercasing the domain part of the email
|
||||
# address.
|
||||
if email is not None:
|
||||
try:
|
||||
email_name, domain_part = email.strip().split('@', 1)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
email = '@'.join([email_name, domain_part.lower()])
|
||||
|
||||
user = cls(username=username, email=email, date_joined=now)
|
||||
user.set_password(password)
|
||||
user.save()
|
||||
return user
|
||||
|
||||
def get_group_permissions(self, obj=None):
|
||||
"""
|
||||
Returns a list of permission strings that this user has through his/her
|
||||
groups. This method queries all available auth backends. If an object
|
||||
is passed in, only permissions matching this object are returned.
|
||||
"""
|
||||
permissions = set()
|
||||
for backend in auth.get_backends():
|
||||
if hasattr(backend, "get_group_permissions"):
|
||||
permissions.update(backend.get_group_permissions(self, obj))
|
||||
return permissions
|
||||
|
||||
def get_all_permissions(self, obj=None):
|
||||
return _user_get_all_permissions(self, obj)
|
||||
|
||||
def has_perm(self, perm, obj=None):
|
||||
"""
|
||||
Returns True if the user has the specified permission. This method
|
||||
queries all available auth backends, but returns immediately if any
|
||||
backend returns True. Thus, a user who has permission from a single
|
||||
auth backend is assumed to have permission in general. If an object is
|
||||
provided, permissions for this specific object are checked.
|
||||
"""
|
||||
|
||||
# Active superusers have all permissions.
|
||||
if self.is_active and self.is_superuser:
|
||||
return True
|
||||
|
||||
# Otherwise we need to check the backends.
|
||||
return _user_has_perm(self, perm, obj)
|
||||
|
||||
def has_module_perms(self, app_label):
|
||||
"""
|
||||
Returns True if the user has any permissions in the given app label.
|
||||
Uses pretty much the same logic as has_perm, above.
|
||||
"""
|
||||
# Active superusers have all permissions.
|
||||
if self.is_active and self.is_superuser:
|
||||
return True
|
||||
|
||||
return _user_has_module_perms(self, app_label)
|
||||
|
||||
def email_user(self, subject, message, from_email=None):
|
||||
"Sends an e-mail to this User."
|
||||
from django.core.mail import send_mail
|
||||
send_mail(subject, message, from_email, [self.email])
|
||||
|
||||
def get_profile(self):
|
||||
"""
|
||||
Returns site-specific profile for this user. Raises
|
||||
SiteProfileNotAvailable if this site does not allow profiles.
|
||||
"""
|
||||
if not hasattr(self, '_profile_cache'):
|
||||
from django.conf import settings
|
||||
if not getattr(settings, 'AUTH_PROFILE_MODULE', False):
|
||||
raise SiteProfileNotAvailable('You need to set AUTH_PROFILE_MO'
|
||||
'DULE in your project settings')
|
||||
try:
|
||||
app_label, model_name = settings.AUTH_PROFILE_MODULE.split('.')
|
||||
except ValueError:
|
||||
raise SiteProfileNotAvailable('app_label and model_name should'
|
||||
' be separated by a dot in the AUTH_PROFILE_MODULE set'
|
||||
'ting')
|
||||
|
||||
try:
|
||||
model = models.get_model(app_label, model_name)
|
||||
if model is None:
|
||||
raise SiteProfileNotAvailable('Unable to load the profile '
|
||||
'model, check AUTH_PROFILE_MODULE in your project sett'
|
||||
'ings')
|
||||
self._profile_cache = model._default_manager.using(self._state.db).get(user__id__exact=self.id)
|
||||
self._profile_cache.user = self
|
||||
except (ImportError, ImproperlyConfigured):
|
||||
raise SiteProfileNotAvailable
|
||||
return self._profile_cache
|
||||
|
||||
|
||||
class MongoEngineBackend(object):
|
||||
"""Authenticate using MongoEngine and mongoengine.django.auth.User.
|
||||
"""
|
||||
|
||||
supports_object_permissions = False
|
||||
supports_anonymous_user = False
|
||||
supports_inactive_user = False
|
||||
_user_doc = False
|
||||
|
||||
def authenticate(self, username=None, password=None):
|
||||
user = self.user_document.objects(username=username).first()
|
||||
if user:
|
||||
if password and user.check_password(password):
|
||||
backend = auth.get_backends()[0]
|
||||
user.backend = "%s.%s" % (backend.__module__, backend.__class__.__name__)
|
||||
return user
|
||||
return None
|
||||
|
||||
def get_user(self, user_id):
|
||||
return self.user_document.objects.with_id(user_id)
|
||||
|
||||
@property
|
||||
def user_document(self):
|
||||
if self._user_doc is False:
|
||||
from .mongo_auth.models import get_user_document
|
||||
self._user_doc = get_user_document()
|
||||
return self._user_doc
|
||||
|
||||
def get_user(userid):
|
||||
"""Returns a User object from an id (User.id). Django's equivalent takes
|
||||
request, but taking an id instead leaves it up to the developer to store
|
||||
the id in any way they want (session, signed cookie, etc.)
|
||||
"""
|
||||
if not userid:
|
||||
return AnonymousUser()
|
||||
return MongoEngineBackend().get_user(userid) or AnonymousUser()
|
@@ -1,115 +0,0 @@
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.hashers import make_password
|
||||
from django.contrib.auth.models import UserManager
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db import models
|
||||
from django.utils.importlib import import_module
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
||||
__all__ = (
|
||||
'get_user_document',
|
||||
)
|
||||
|
||||
|
||||
MONGOENGINE_USER_DOCUMENT = getattr(
|
||||
settings, 'MONGOENGINE_USER_DOCUMENT', 'mongoengine.django.auth.User')
|
||||
|
||||
|
||||
def get_user_document():
|
||||
"""Get the user document class used for authentication.
|
||||
|
||||
This is the class defined in settings.MONGOENGINE_USER_DOCUMENT, which
|
||||
defaults to `mongoengine.django.auth.User`.
|
||||
|
||||
"""
|
||||
|
||||
name = MONGOENGINE_USER_DOCUMENT
|
||||
dot = name.rindex('.')
|
||||
module = import_module(name[:dot])
|
||||
return getattr(module, name[dot + 1:])
|
||||
|
||||
|
||||
class MongoUserManager(UserManager):
|
||||
"""A User manager wich allows the use of MongoEngine documents in Django.
|
||||
|
||||
To use the manager, you must tell django.contrib.auth to use MongoUser as
|
||||
the user model. In you settings.py, you need:
|
||||
|
||||
INSTALLED_APPS = (
|
||||
...
|
||||
'django.contrib.auth',
|
||||
'mongoengine.django.mongo_auth',
|
||||
...
|
||||
)
|
||||
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
|
||||
|
||||
Django will use the model object to access the custom Manager, which will
|
||||
replace the original queryset with MongoEngine querysets.
|
||||
|
||||
By default, mongoengine.django.auth.User will be used to store users. You
|
||||
can specify another document class in MONGOENGINE_USER_DOCUMENT in your
|
||||
settings.py.
|
||||
|
||||
The User Document class has the same requirements as a standard custom user
|
||||
model: https://docs.djangoproject.com/en/dev/topics/auth/customizing/
|
||||
|
||||
In particular, the User Document class must define USERNAME_FIELD and
|
||||
REQUIRED_FIELDS.
|
||||
|
||||
`AUTH_USER_MODEL` has been added in Django 1.5.
|
||||
|
||||
"""
|
||||
|
||||
def contribute_to_class(self, model, name):
|
||||
super(MongoUserManager, self).contribute_to_class(model, name)
|
||||
self.dj_model = self.model
|
||||
self.model = get_user_document()
|
||||
|
||||
self.dj_model.USERNAME_FIELD = self.model.USERNAME_FIELD
|
||||
username = models.CharField(_('username'), max_length=30, unique=True)
|
||||
username.contribute_to_class(self.dj_model, self.dj_model.USERNAME_FIELD)
|
||||
|
||||
self.dj_model.REQUIRED_FIELDS = self.model.REQUIRED_FIELDS
|
||||
for name in self.dj_model.REQUIRED_FIELDS:
|
||||
field = models.CharField(_(name), max_length=30)
|
||||
field.contribute_to_class(self.dj_model, name)
|
||||
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
try:
|
||||
return self.get_query_set().get(*args, **kwargs)
|
||||
except self.model.DoesNotExist:
|
||||
# ModelBackend expects this exception
|
||||
raise self.dj_model.DoesNotExist
|
||||
|
||||
@property
|
||||
def db(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_empty_query_set(self):
|
||||
return self.model.objects.none()
|
||||
|
||||
def get_query_set(self):
|
||||
return self.model.objects
|
||||
|
||||
|
||||
class MongoUser(models.Model):
|
||||
""""Dummy user model for Django.
|
||||
|
||||
MongoUser is used to replace Django's UserManager with MongoUserManager.
|
||||
The actual user document class is mongoengine.django.auth.User or any
|
||||
other document class specified in MONGOENGINE_USER_DOCUMENT.
|
||||
|
||||
To get the user document class, use `get_user_document()`.
|
||||
|
||||
"""
|
||||
|
||||
objects = MongoUserManager()
|
||||
|
||||
class Meta:
|
||||
app_label = 'mongo_auth'
|
||||
|
||||
def set_password(self, password):
|
||||
"""Doesn't do anything, but works around the issue with Django 1.6."""
|
||||
make_password(password)
|
@@ -1,124 +0,0 @@
|
||||
from bson import json_util
|
||||
from django.conf import settings
|
||||
from django.contrib.sessions.backends.base import SessionBase, CreateError
|
||||
from django.core.exceptions import SuspiciousOperation
|
||||
try:
|
||||
from django.utils.encoding import force_unicode
|
||||
except ImportError:
|
||||
from django.utils.encoding import force_text as force_unicode
|
||||
|
||||
from mongoengine.document import Document
|
||||
from mongoengine import fields
|
||||
from mongoengine.queryset import OperationError
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME
|
||||
|
||||
from .utils import datetime_now
|
||||
|
||||
|
||||
MONGOENGINE_SESSION_DB_ALIAS = getattr(
|
||||
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
|
||||
DEFAULT_CONNECTION_NAME)
|
||||
|
||||
# a setting for the name of the collection used to store sessions
|
||||
MONGOENGINE_SESSION_COLLECTION = getattr(
|
||||
settings, 'MONGOENGINE_SESSION_COLLECTION',
|
||||
'django_session')
|
||||
|
||||
# a setting for whether session data is stored encoded or not
|
||||
MONGOENGINE_SESSION_DATA_ENCODE = getattr(
|
||||
settings, 'MONGOENGINE_SESSION_DATA_ENCODE',
|
||||
True)
|
||||
|
||||
|
||||
class MongoSession(Document):
|
||||
session_key = fields.StringField(primary_key=True, max_length=40)
|
||||
session_data = fields.StringField() if MONGOENGINE_SESSION_DATA_ENCODE \
|
||||
else fields.DictField()
|
||||
expire_date = fields.DateTimeField()
|
||||
|
||||
meta = {
|
||||
'collection': MONGOENGINE_SESSION_COLLECTION,
|
||||
'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
|
||||
'allow_inheritance': False,
|
||||
'indexes': [
|
||||
{
|
||||
'fields': ['expire_date'],
|
||||
'expireAfterSeconds': 0
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
def get_decoded(self):
|
||||
return SessionStore().decode(self.session_data)
|
||||
|
||||
|
||||
class SessionStore(SessionBase):
|
||||
"""A MongoEngine-based session store for Django.
|
||||
"""
|
||||
|
||||
def _get_session(self, *args, **kwargs):
|
||||
sess = super(SessionStore, self)._get_session(*args, **kwargs)
|
||||
if sess.get('_auth_user_id', None):
|
||||
sess['_auth_user_id'] = str(sess.get('_auth_user_id'))
|
||||
return sess
|
||||
|
||||
def load(self):
|
||||
try:
|
||||
s = MongoSession.objects(session_key=self.session_key,
|
||||
expire_date__gt=datetime_now)[0]
|
||||
if MONGOENGINE_SESSION_DATA_ENCODE:
|
||||
return self.decode(force_unicode(s.session_data))
|
||||
else:
|
||||
return s.session_data
|
||||
except (IndexError, SuspiciousOperation):
|
||||
self.create()
|
||||
return {}
|
||||
|
||||
def exists(self, session_key):
|
||||
return bool(MongoSession.objects(session_key=session_key).first())
|
||||
|
||||
def create(self):
|
||||
while True:
|
||||
self._session_key = self._get_new_session_key()
|
||||
try:
|
||||
self.save(must_create=True)
|
||||
except CreateError:
|
||||
continue
|
||||
self.modified = True
|
||||
self._session_cache = {}
|
||||
return
|
||||
|
||||
def save(self, must_create=False):
|
||||
if self.session_key is None:
|
||||
self._session_key = self._get_new_session_key()
|
||||
s = MongoSession(session_key=self.session_key)
|
||||
if MONGOENGINE_SESSION_DATA_ENCODE:
|
||||
s.session_data = self.encode(self._get_session(no_load=must_create))
|
||||
else:
|
||||
s.session_data = self._get_session(no_load=must_create)
|
||||
s.expire_date = self.get_expiry_date()
|
||||
try:
|
||||
s.save(force_insert=must_create)
|
||||
except OperationError:
|
||||
if must_create:
|
||||
raise CreateError
|
||||
raise
|
||||
|
||||
def delete(self, session_key=None):
|
||||
if session_key is None:
|
||||
if self.session_key is None:
|
||||
return
|
||||
session_key = self.session_key
|
||||
MongoSession.objects(session_key=session_key).delete()
|
||||
|
||||
|
||||
class BSONSerializer(object):
|
||||
"""
|
||||
Serializer that can handle BSON types (eg ObjectId).
|
||||
"""
|
||||
def dumps(self, obj):
|
||||
return json_util.dumps(obj, separators=(',', ':')).encode('ascii')
|
||||
|
||||
def loads(self, data):
|
||||
return json_util.loads(data.decode('ascii'))
|
||||
|
@@ -1,47 +0,0 @@
|
||||
from mongoengine.queryset import QuerySet
|
||||
from mongoengine.base import BaseDocument
|
||||
from mongoengine.errors import ValidationError
|
||||
|
||||
def _get_queryset(cls):
|
||||
"""Inspired by django.shortcuts.*"""
|
||||
if isinstance(cls, QuerySet):
|
||||
return cls
|
||||
else:
|
||||
return cls.objects
|
||||
|
||||
def get_document_or_404(cls, *args, **kwargs):
|
||||
"""
|
||||
Uses get() to return an document, or raises a Http404 exception if the document
|
||||
does not exist.
|
||||
|
||||
cls may be a Document or QuerySet object. All other passed
|
||||
arguments and keyword arguments are used in the get() query.
|
||||
|
||||
Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
|
||||
object is found.
|
||||
|
||||
Inspired by django.shortcuts.*
|
||||
"""
|
||||
queryset = _get_queryset(cls)
|
||||
try:
|
||||
return queryset.get(*args, **kwargs)
|
||||
except (queryset._document.DoesNotExist, ValidationError):
|
||||
from django.http import Http404
|
||||
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
||||
|
||||
def get_list_or_404(cls, *args, **kwargs):
|
||||
"""
|
||||
Uses filter() to return a list of documents, or raise a Http404 exception if
|
||||
the list is empty.
|
||||
|
||||
cls may be a Document or QuerySet object. All other passed
|
||||
arguments and keyword arguments are used in the filter() query.
|
||||
|
||||
Inspired by django.shortcuts.*
|
||||
"""
|
||||
queryset = _get_queryset(cls)
|
||||
obj_list = list(queryset.filter(*args, **kwargs))
|
||||
if not obj_list:
|
||||
from django.http import Http404
|
||||
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
||||
return obj_list
|
@@ -1,112 +0,0 @@
|
||||
import os
|
||||
import itertools
|
||||
import urlparse
|
||||
|
||||
from mongoengine import *
|
||||
from django.conf import settings
|
||||
from django.core.files.storage import Storage
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
|
||||
|
||||
class FileDocument(Document):
|
||||
"""A document used to store a single file in GridFS.
|
||||
"""
|
||||
file = FileField()
|
||||
|
||||
|
||||
class GridFSStorage(Storage):
|
||||
"""A custom storage backend to store files in GridFS
|
||||
"""
|
||||
|
||||
def __init__(self, base_url=None):
|
||||
|
||||
if base_url is None:
|
||||
base_url = settings.MEDIA_URL
|
||||
self.base_url = base_url
|
||||
self.document = FileDocument
|
||||
self.field = 'file'
|
||||
|
||||
def delete(self, name):
|
||||
"""Deletes the specified file from the storage system.
|
||||
"""
|
||||
if self.exists(name):
|
||||
doc = self.document.objects.first()
|
||||
field = getattr(doc, self.field)
|
||||
self._get_doc_with_name(name).delete() # Delete the FileField
|
||||
field.delete() # Delete the FileDocument
|
||||
|
||||
def exists(self, name):
|
||||
"""Returns True if a file referened by the given name already exists in the
|
||||
storage system, or False if the name is available for a new file.
|
||||
"""
|
||||
doc = self._get_doc_with_name(name)
|
||||
if doc:
|
||||
field = getattr(doc, self.field)
|
||||
return bool(field.name)
|
||||
else:
|
||||
return False
|
||||
|
||||
def listdir(self, path=None):
|
||||
"""Lists the contents of the specified path, returning a 2-tuple of lists;
|
||||
the first item being directories, the second item being files.
|
||||
"""
|
||||
def name(doc):
|
||||
return getattr(doc, self.field).name
|
||||
docs = self.document.objects
|
||||
return [], [name(d) for d in docs if name(d)]
|
||||
|
||||
def size(self, name):
|
||||
"""Returns the total size, in bytes, of the file specified by name.
|
||||
"""
|
||||
doc = self._get_doc_with_name(name)
|
||||
if doc:
|
||||
return getattr(doc, self.field).length
|
||||
else:
|
||||
raise ValueError("No such file or directory: '%s'" % name)
|
||||
|
||||
def url(self, name):
|
||||
"""Returns an absolute URL where the file's contents can be accessed
|
||||
directly by a web browser.
|
||||
"""
|
||||
if self.base_url is None:
|
||||
raise ValueError("This file is not accessible via a URL.")
|
||||
return urlparse.urljoin(self.base_url, name).replace('\\', '/')
|
||||
|
||||
def _get_doc_with_name(self, name):
|
||||
"""Find the documents in the store with the given name
|
||||
"""
|
||||
docs = self.document.objects
|
||||
doc = [d for d in docs if hasattr(getattr(d, self.field), 'name') and getattr(d, self.field).name == name]
|
||||
if doc:
|
||||
return doc[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def _open(self, name, mode='rb'):
|
||||
doc = self._get_doc_with_name(name)
|
||||
if doc:
|
||||
return getattr(doc, self.field)
|
||||
else:
|
||||
raise ValueError("No file found with the name '%s'." % name)
|
||||
|
||||
def get_available_name(self, name):
|
||||
"""Returns a filename that's free on the target storage system, and
|
||||
available for new content to be written to.
|
||||
"""
|
||||
file_root, file_ext = os.path.splitext(name)
|
||||
# If the filename already exists, add an underscore and a number (before
|
||||
# the file extension, if one exists) to the filename until the generated
|
||||
# filename doesn't exist.
|
||||
count = itertools.count(1)
|
||||
while self.exists(name):
|
||||
# file_ext includes the dot.
|
||||
name = os.path.join("%s_%s%s" % (file_root, count.next(), file_ext))
|
||||
|
||||
return name
|
||||
|
||||
def _save(self, name, content):
|
||||
doc = self.document()
|
||||
getattr(doc, self.field).put(content, filename=name)
|
||||
doc.save()
|
||||
|
||||
return name
|
@@ -1,31 +0,0 @@
|
||||
#coding: utf-8
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
from mongoengine import connect
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
|
||||
class MongoTestCase(TestCase):
|
||||
"""
|
||||
TestCase class that clear the collection between the tests
|
||||
"""
|
||||
|
||||
@property
|
||||
def db_name(self):
|
||||
from django.conf import settings
|
||||
return 'test_%s' % getattr(settings, 'MONGO_DATABASE_NAME', 'dummy')
|
||||
|
||||
def __init__(self, methodName='runtest'):
|
||||
connect(self.db_name)
|
||||
self.db = get_db()
|
||||
super(MongoTestCase, self).__init__(methodName)
|
||||
|
||||
def dropCollections(self):
|
||||
for collection in self.db.collection_names():
|
||||
if collection == 'system.indexes':
|
||||
continue
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def tearDown(self):
|
||||
self.dropCollections()
|
@@ -1,6 +0,0 @@
|
||||
try:
|
||||
# django >= 1.4
|
||||
from django.utils.timezone import now as datetime_now
|
||||
except ImportError:
|
||||
from datetime import datetime
|
||||
datetime_now = datetime.now
|
@@ -1,18 +1,23 @@
|
||||
import warnings
|
||||
|
||||
import hashlib
|
||||
import pymongo
|
||||
import re
|
||||
|
||||
from pymongo.read_preferences import ReadPreference
|
||||
from bson import ObjectId
|
||||
from bson.dbref import DBRef
|
||||
from mongoengine import signals
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.base import (DocumentMetaclass, TopLevelDocumentMetaclass,
|
||||
BaseDocument, BaseDict, BaseList,
|
||||
ALLOW_INHERITANCE, get_document)
|
||||
from mongoengine.errors import ValidationError
|
||||
from mongoengine.base import (
|
||||
DocumentMetaclass,
|
||||
TopLevelDocumentMetaclass,
|
||||
BaseDocument,
|
||||
BaseDict,
|
||||
BaseList,
|
||||
EmbeddedDocumentList,
|
||||
ALLOW_INHERITANCE,
|
||||
get_document
|
||||
)
|
||||
from mongoengine.errors import InvalidQueryError, InvalidDocumentError
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
from mongoengine.queryset import (OperationError, NotUniqueError,
|
||||
QuerySet, transform)
|
||||
from mongoengine.connection import get_db, DEFAULT_CONNECTION_NAME
|
||||
@@ -41,7 +46,6 @@ class InvalidCollectionError(Exception):
|
||||
|
||||
|
||||
class EmbeddedDocument(BaseDocument):
|
||||
|
||||
"""A :class:`~mongoengine.Document` that isn't stored in its own
|
||||
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
|
||||
fields on :class:`~mongoengine.Document`\ s through the
|
||||
@@ -56,7 +60,7 @@ class EmbeddedDocument(BaseDocument):
|
||||
dictionary.
|
||||
"""
|
||||
|
||||
__slots__ = ('_instance')
|
||||
__slots__ = ('_instance', )
|
||||
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
@@ -76,9 +80,14 @@ class EmbeddedDocument(BaseDocument):
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self._instance.save(*args, **kwargs)
|
||||
|
||||
def reload(self, *args, **kwargs):
|
||||
self._instance.reload(*args, **kwargs)
|
||||
|
||||
|
||||
class Document(BaseDocument):
|
||||
|
||||
"""The base class used for defining the structure and properties of
|
||||
collections of documents stored in MongoDB. Inherit from this class, and
|
||||
add fields as class attributes to define a document's structure.
|
||||
@@ -103,9 +112,11 @@ class Document(BaseDocument):
|
||||
specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta`
|
||||
dictionary. :attr:`max_documents` is the maximum number of documents that
|
||||
is allowed to be stored in the collection, and :attr:`max_size` is the
|
||||
maximum size of the collection in bytes. If :attr:`max_size` is not
|
||||
maximum size of the collection in bytes. :attr:`max_size` is rounded up
|
||||
to the next multiple of 256 by MongoDB internally and mongoengine before.
|
||||
Use also a multiple of 256 to avoid confusions. If :attr:`max_size` is not
|
||||
specified and :attr:`max_documents` is, :attr:`max_size` defaults to
|
||||
10000000 bytes (10MB).
|
||||
10485760 bytes (10MB).
|
||||
|
||||
Indexes may be created by specifying :attr:`indexes` in the :attr:`meta`
|
||||
dictionary. The value should be a list of field names or tuples of field
|
||||
@@ -113,7 +124,7 @@ class Document(BaseDocument):
|
||||
a **+** or **-** sign.
|
||||
|
||||
Automatic index creation can be disabled by specifying
|
||||
attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to
|
||||
:attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to
|
||||
False then indexes will not be created by MongoEngine. This is useful in
|
||||
production systems where index creation is performed as part of a
|
||||
deployment system.
|
||||
@@ -122,6 +133,11 @@ class Document(BaseDocument):
|
||||
doesn't contain a list) if allow_inheritance is True. This can be
|
||||
disabled by either setting cls to False on the specific index or
|
||||
by setting index_cls to False on the meta dictionary for the document.
|
||||
|
||||
By default, any extra attribute existing in stored data but not declared
|
||||
in your model will raise a :class:`~mongoengine.FieldDoesNotExist` error.
|
||||
This can be disabled by setting :attr:`strict` to ``False``
|
||||
in the :attr:`meta` dictionary.
|
||||
"""
|
||||
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
@@ -129,26 +145,23 @@ class Document(BaseDocument):
|
||||
my_metaclass = TopLevelDocumentMetaclass
|
||||
__metaclass__ = TopLevelDocumentMetaclass
|
||||
|
||||
__slots__ = ('__objects')
|
||||
__slots__ = ('__objects',)
|
||||
|
||||
def pk():
|
||||
"""Primary key alias
|
||||
"""
|
||||
|
||||
def fget(self):
|
||||
if 'id_field' not in self._meta:
|
||||
return None
|
||||
return getattr(self, self._meta['id_field'])
|
||||
|
||||
def fset(self, value):
|
||||
return setattr(self, self._meta['id_field'], value)
|
||||
return property(fget, fset)
|
||||
pk = pk()
|
||||
|
||||
@property
|
||||
def text_score(self):
|
||||
"""
|
||||
Used for text searchs
|
||||
"""
|
||||
return self._data.get('text_score')
|
||||
return property(fget, fset)
|
||||
|
||||
pk = pk()
|
||||
|
||||
@classmethod
|
||||
def _get_db(cls):
|
||||
@@ -158,14 +171,18 @@ class Document(BaseDocument):
|
||||
@classmethod
|
||||
def _get_collection(cls):
|
||||
"""Returns the collection for the document."""
|
||||
# TODO: use new get_collection() with PyMongo3 ?
|
||||
if not hasattr(cls, '_collection') or cls._collection is None:
|
||||
db = cls._get_db()
|
||||
collection_name = cls._get_collection_name()
|
||||
# Create collection as a capped collection if specified
|
||||
if cls._meta['max_size'] or cls._meta['max_documents']:
|
||||
if cls._meta.get('max_size') or cls._meta.get('max_documents'):
|
||||
# Get max document limit and max byte size from meta
|
||||
max_size = cls._meta['max_size'] or 10000000 # 10MB default
|
||||
max_documents = cls._meta['max_documents']
|
||||
max_size = cls._meta.get('max_size') or 10 * 2 ** 20 # 10MB default
|
||||
max_documents = cls._meta.get('max_documents')
|
||||
# Round up to next 256 bytes as MongoDB would do it to avoid exception
|
||||
if max_size % 256:
|
||||
max_size = (max_size // 256 + 1) * 256
|
||||
|
||||
if collection_name in db.collection_names():
|
||||
cls._collection = db[collection_name]
|
||||
@@ -173,7 +190,7 @@ class Document(BaseDocument):
|
||||
# options match the specified capped options
|
||||
options = cls._collection.options()
|
||||
if options.get('max') != max_documents or \
|
||||
options.get('size') != max_size:
|
||||
options.get('size') != max_size:
|
||||
msg = (('Cannot create collection "%s" as a capped '
|
||||
'collection as it already exists')
|
||||
% cls._collection)
|
||||
@@ -192,8 +209,46 @@ class Document(BaseDocument):
|
||||
cls.ensure_indexes()
|
||||
return cls._collection
|
||||
|
||||
def modify(self, query={}, **update):
|
||||
"""Perform an atomic update of the document in the database and reload
|
||||
the document object using updated version.
|
||||
|
||||
Returns True if the document has been updated or False if the document
|
||||
in the database doesn't match the query.
|
||||
|
||||
.. note:: All unsaved changes that has been made to the document are
|
||||
rejected if the method returns True.
|
||||
|
||||
:param query: the update will be performed only if the document in the
|
||||
database matches the query
|
||||
:param update: Django-style update keyword arguments
|
||||
"""
|
||||
|
||||
if self.pk is None:
|
||||
raise InvalidDocumentError("The document does not have a primary key.")
|
||||
|
||||
id_field = self._meta["id_field"]
|
||||
query = query.copy() if isinstance(query, dict) else query.to_query(self)
|
||||
|
||||
if id_field not in query:
|
||||
query[id_field] = self.pk
|
||||
elif query[id_field] != self.pk:
|
||||
raise InvalidQueryError("Invalid document modify query: it must modify only this document.")
|
||||
|
||||
updated = self._qs(**query).modify(new=True, **update)
|
||||
if updated is None:
|
||||
return False
|
||||
|
||||
for field in self._fields_ordered:
|
||||
setattr(self, field, self._reload(field, updated[field]))
|
||||
|
||||
self._changed_fields = updated._changed_fields
|
||||
self._created = False
|
||||
|
||||
return True
|
||||
|
||||
def save(self, force_insert=False, validate=True, clean=True,
|
||||
write_concern=None, cascade=None, cascade_kwargs=None,
|
||||
write_concern=None, cascade=None, cascade_kwargs=None,
|
||||
_refs=None, save_condition=None, **kwargs):
|
||||
"""Save the :class:`~mongoengine.Document` to the database. If the
|
||||
document already exists, it will be updated, otherwise it will be
|
||||
@@ -218,7 +273,9 @@ class Document(BaseDocument):
|
||||
to cascading saves. Implies ``cascade=True``.
|
||||
:param _refs: A list of processed references used in cascading saves
|
||||
:param save_condition: only perform save if matching record in db
|
||||
satisfies condition(s) (e.g., version number)
|
||||
satisfies condition(s) (e.g. version number).
|
||||
Raises :class:`OperationError` if the conditions are not satisfied
|
||||
|
||||
.. versionchanged:: 0.5
|
||||
In existing documents it only saves changed fields using
|
||||
set / unset. Saves are cascaded and any
|
||||
@@ -235,6 +292,8 @@ class Document(BaseDocument):
|
||||
.. versionchanged:: 0.8.5
|
||||
Optional save_condition that only overwrites existing documents
|
||||
if the condition is satisfied in the current db record.
|
||||
.. versionchanged:: 0.10
|
||||
:class:`OperationError` exception raised if save_condition fails.
|
||||
"""
|
||||
signals.pre_save.send(self.__class__, document=self)
|
||||
|
||||
@@ -253,11 +312,20 @@ class Document(BaseDocument):
|
||||
|
||||
try:
|
||||
collection = self._get_collection()
|
||||
if self._meta.get('auto_create_index', True):
|
||||
self.ensure_indexes()
|
||||
if created:
|
||||
if force_insert:
|
||||
object_id = collection.insert(doc, **write_concern)
|
||||
else:
|
||||
object_id = collection.save(doc, **write_concern)
|
||||
# In PyMongo 3.0, the save() call calls internally the _update() call
|
||||
# but they forget to return the _id value passed back, therefore getting it back here
|
||||
# Correct behaviour in 2.X and in 3.0.1+ versions
|
||||
if not object_id and pymongo.version_tuple == (3, 0):
|
||||
pk_as_mongo_obj = self._fields.get(self._meta['id_field']).to_mongo(self.pk)
|
||||
object_id = self._qs.filter(pk=pk_as_mongo_obj).first() and \
|
||||
self._qs.filter(pk=pk_as_mongo_obj).first().pk
|
||||
else:
|
||||
object_id = doc['_id']
|
||||
updates, removals = self._delta()
|
||||
@@ -290,6 +358,9 @@ class Document(BaseDocument):
|
||||
upsert = save_condition is None
|
||||
last_error = collection.update(select_dict, update_query,
|
||||
upsert=upsert, **write_concern)
|
||||
if not upsert and last_error['nModified'] == 0:
|
||||
raise OperationError('Race condition preventing'
|
||||
' document update detected')
|
||||
created = is_new_object(last_error)
|
||||
|
||||
if cascade is None:
|
||||
@@ -384,7 +455,7 @@ class Document(BaseDocument):
|
||||
if kwargs.get('upsert', False):
|
||||
query = self.to_mongo()
|
||||
if "_cls" in query:
|
||||
del(query["_cls"])
|
||||
del query["_cls"]
|
||||
return self._qs.filter(**query).update_one(**kwargs)
|
||||
else:
|
||||
raise OperationError(
|
||||
@@ -406,6 +477,12 @@ class Document(BaseDocument):
|
||||
"""
|
||||
signals.pre_delete.send(self.__class__, document=self)
|
||||
|
||||
# Delete FileFields separately
|
||||
FileField = _import_class('FileField')
|
||||
for name, field in self._fields.iteritems():
|
||||
if isinstance(field, FileField):
|
||||
getattr(self, name).delete()
|
||||
|
||||
try:
|
||||
self._qs.filter(
|
||||
**self._object_key).delete(write_concern=write_concern, _from_doc_delete=True)
|
||||
@@ -414,7 +491,7 @@ class Document(BaseDocument):
|
||||
raise OperationError(message)
|
||||
signals.post_delete.send(self.__class__, document=self)
|
||||
|
||||
def switch_db(self, db_alias):
|
||||
def switch_db(self, db_alias, keep_created=True):
|
||||
"""
|
||||
Temporarily switch the database for a document instance.
|
||||
|
||||
@@ -424,10 +501,14 @@ class Document(BaseDocument):
|
||||
user.switch_db('archive-db')
|
||||
user.save()
|
||||
|
||||
If you need to read from another database see
|
||||
:class:`~mongoengine.context_managers.switch_db`
|
||||
:param str db_alias: The database alias to use for saving the document
|
||||
|
||||
:param db_alias: The database alias to use for saving the document
|
||||
:param bool keep_created: keep self._created value after switching db, else is reset to True
|
||||
|
||||
|
||||
.. seealso::
|
||||
Use :class:`~mongoengine.context_managers.switch_collection`
|
||||
if you need to read from another collection
|
||||
"""
|
||||
with switch_db(self.__class__, db_alias) as cls:
|
||||
collection = cls._get_collection()
|
||||
@@ -435,12 +516,12 @@ class Document(BaseDocument):
|
||||
self._get_collection = lambda: collection
|
||||
self._get_db = lambda: db
|
||||
self._collection = collection
|
||||
self._created = True
|
||||
self._created = True if not keep_created else self._created
|
||||
self.__objects = self._qs
|
||||
self.__objects._collection_obj = collection
|
||||
return self
|
||||
|
||||
def switch_collection(self, collection_name):
|
||||
def switch_collection(self, collection_name, keep_created=True):
|
||||
"""
|
||||
Temporarily switch the collection for a document instance.
|
||||
|
||||
@@ -450,17 +531,21 @@ class Document(BaseDocument):
|
||||
user.switch_collection('old-users')
|
||||
user.save()
|
||||
|
||||
If you need to read from another database see
|
||||
:class:`~mongoengine.context_managers.switch_db`
|
||||
|
||||
:param collection_name: The database alias to use for saving the
|
||||
:param str collection_name: The database alias to use for saving the
|
||||
document
|
||||
|
||||
:param bool keep_created: keep self._created value after switching collection, else is reset to True
|
||||
|
||||
|
||||
.. seealso::
|
||||
Use :class:`~mongoengine.context_managers.switch_db`
|
||||
if you need to read from another database
|
||||
"""
|
||||
with switch_collection(self.__class__, collection_name) as cls:
|
||||
collection = cls._get_collection()
|
||||
self._get_collection = lambda: collection
|
||||
self._collection = collection
|
||||
self._created = True
|
||||
self._created = True if not keep_created else self._created
|
||||
self.__objects = self._qs
|
||||
self.__objects._collection_obj = collection
|
||||
return self
|
||||
@@ -495,8 +580,8 @@ class Document(BaseDocument):
|
||||
if not self.pk:
|
||||
raise self.DoesNotExist("Document does not exist")
|
||||
obj = self._qs.read_preference(ReadPreference.PRIMARY).filter(
|
||||
**self._object_key).only(*fields).limit(1
|
||||
).select_related(max_depth=max_depth)
|
||||
**self._object_key).only(*fields).limit(
|
||||
1).select_related(max_depth=max_depth)
|
||||
|
||||
if obj:
|
||||
obj = obj[0]
|
||||
@@ -505,7 +590,13 @@ class Document(BaseDocument):
|
||||
|
||||
for field in self._fields_ordered:
|
||||
if not fields or field in fields:
|
||||
setattr(self, field, self._reload(field, obj[field]))
|
||||
try:
|
||||
setattr(self, field, self._reload(field, obj[field]))
|
||||
except KeyError:
|
||||
# If field is removed from the database while the object
|
||||
# is in memory, a reload would cause a KeyError
|
||||
# i.e. obj.update(unset__field=1) followed by obj.reload()
|
||||
delattr(self, field)
|
||||
|
||||
self._changed_fields = obj._changed_fields
|
||||
self._created = False
|
||||
@@ -518,6 +609,9 @@ class Document(BaseDocument):
|
||||
if isinstance(value, BaseDict):
|
||||
value = [(k, self._reload(k, v)) for k, v in value.items()]
|
||||
value = BaseDict(value, self, key)
|
||||
elif isinstance(value, EmbeddedDocumentList):
|
||||
value = [self._reload(key, v) for v in value]
|
||||
value = EmbeddedDocumentList(value, self, key)
|
||||
elif isinstance(value, BaseList):
|
||||
value = [self._reload(key, v) for v in value]
|
||||
value = BaseList(value, self, key)
|
||||
@@ -546,11 +640,11 @@ class Document(BaseDocument):
|
||||
for class_name in document_cls._subclasses
|
||||
if class_name != document_cls.__name__] + [document_cls]
|
||||
|
||||
for cls in classes:
|
||||
for klass in classes:
|
||||
for document_cls in documents:
|
||||
delete_rules = cls._meta.get('delete_rules') or {}
|
||||
delete_rules = klass._meta.get('delete_rules') or {}
|
||||
delete_rules[(document_cls, field_name)] = rule
|
||||
cls._meta['delete_rules'] = delete_rules
|
||||
klass._meta['delete_rules'] = delete_rules
|
||||
|
||||
@classmethod
|
||||
def drop_collection(cls):
|
||||
@@ -561,23 +655,51 @@ class Document(BaseDocument):
|
||||
db = cls._get_db()
|
||||
db.drop_collection(cls._get_collection_name())
|
||||
|
||||
@classmethod
|
||||
def create_index(cls, keys, background=False, **kwargs):
|
||||
"""Creates the given indexes if required.
|
||||
|
||||
:param keys: a single index key or a list of index keys (to
|
||||
construct a multi-field index); keys may be prefixed with a **+**
|
||||
or a **-** to determine the index ordering
|
||||
:param background: Allows index creation in the background
|
||||
"""
|
||||
index_spec = cls._build_index_spec(keys)
|
||||
index_spec = index_spec.copy()
|
||||
fields = index_spec.pop('fields')
|
||||
drop_dups = kwargs.get('drop_dups', False)
|
||||
if IS_PYMONGO_3 and drop_dups:
|
||||
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
elif not IS_PYMONGO_3:
|
||||
index_spec['drop_dups'] = drop_dups
|
||||
index_spec['background'] = background
|
||||
index_spec.update(kwargs)
|
||||
|
||||
if IS_PYMONGO_3:
|
||||
return cls._get_collection().create_index(fields, **index_spec)
|
||||
else:
|
||||
return cls._get_collection().ensure_index(fields, **index_spec)
|
||||
|
||||
@classmethod
|
||||
def ensure_index(cls, key_or_list, drop_dups=False, background=False,
|
||||
**kwargs):
|
||||
"""Ensure that the given indexes are in place.
|
||||
"""Ensure that the given indexes are in place. Deprecated in favour
|
||||
of create_index.
|
||||
|
||||
:param key_or_list: a single index key or a list of index keys (to
|
||||
construct a multi-field index); keys may be prefixed with a **+**
|
||||
or a **-** to determine the index ordering
|
||||
:param background: Allows index creation in the background
|
||||
:param drop_dups: Was removed/ignored with MongoDB >2.7.5. The value
|
||||
will be removed if PyMongo3+ is used
|
||||
"""
|
||||
index_spec = cls._build_index_spec(key_or_list)
|
||||
index_spec = index_spec.copy()
|
||||
fields = index_spec.pop('fields')
|
||||
index_spec['drop_dups'] = drop_dups
|
||||
index_spec['background'] = background
|
||||
index_spec.update(kwargs)
|
||||
|
||||
return cls._get_collection().ensure_index(fields, **index_spec)
|
||||
if IS_PYMONGO_3 and drop_dups:
|
||||
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
elif not IS_PYMONGO_3:
|
||||
kwargs.update({'drop_dups': drop_dups})
|
||||
return cls.create_index(key_or_list, background=background, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def ensure_indexes(cls):
|
||||
@@ -592,9 +714,14 @@ class Document(BaseDocument):
|
||||
drop_dups = cls._meta.get('index_drop_dups', False)
|
||||
index_opts = cls._meta.get('index_opts') or {}
|
||||
index_cls = cls._meta.get('index_cls', True)
|
||||
if IS_PYMONGO_3 and drop_dups:
|
||||
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
|
||||
collection = cls._get_collection()
|
||||
if collection.read_preference > 1:
|
||||
# 746: when connection is via mongos, the read preference is not necessarily an indication that
|
||||
# this code runs on a secondary
|
||||
if not collection.is_mongos and collection.read_preference > 1:
|
||||
return
|
||||
|
||||
# determine if an index which we are creating includes
|
||||
@@ -612,18 +739,37 @@ class Document(BaseDocument):
|
||||
cls_indexed = cls_indexed or includes_cls(fields)
|
||||
opts = index_opts.copy()
|
||||
opts.update(spec)
|
||||
collection.ensure_index(fields, background=background,
|
||||
drop_dups=drop_dups, **opts)
|
||||
|
||||
# we shouldn't pass 'cls' to the collection.ensureIndex options
|
||||
# because of https://jira.mongodb.org/browse/SERVER-769
|
||||
if 'cls' in opts:
|
||||
del opts['cls']
|
||||
|
||||
if IS_PYMONGO_3:
|
||||
collection.create_index(fields, background=background, **opts)
|
||||
else:
|
||||
collection.ensure_index(fields, background=background,
|
||||
drop_dups=drop_dups, **opts)
|
||||
|
||||
# If _cls is being used (for polymorphism), it needs an index,
|
||||
# only if another index doesn't begin with _cls
|
||||
if (index_cls and not cls_indexed and
|
||||
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
|
||||
collection.ensure_index('_cls', background=background,
|
||||
**index_opts)
|
||||
|
||||
# we shouldn't pass 'cls' to the collection.ensureIndex options
|
||||
# because of https://jira.mongodb.org/browse/SERVER-769
|
||||
if 'cls' in index_opts:
|
||||
del index_opts['cls']
|
||||
|
||||
if IS_PYMONGO_3:
|
||||
collection.create_index('_cls', background=background,
|
||||
**index_opts)
|
||||
else:
|
||||
collection.ensure_index('_cls', background=background,
|
||||
**index_opts)
|
||||
|
||||
@classmethod
|
||||
def list_indexes(cls, go_up=True, go_down=True):
|
||||
def list_indexes(cls):
|
||||
""" Lists all of the indexes that should be created for given
|
||||
collection. It includes all the indexes from super- and sub-classes.
|
||||
"""
|
||||
@@ -631,7 +777,7 @@ class Document(BaseDocument):
|
||||
if cls._meta.get('abstract'):
|
||||
return []
|
||||
|
||||
# get all the base classes, subclasses and sieblings
|
||||
# get all the base classes, subclasses and siblings
|
||||
classes = []
|
||||
|
||||
def get_classes(cls):
|
||||
@@ -670,8 +816,8 @@ class Document(BaseDocument):
|
||||
return indexes
|
||||
|
||||
indexes = []
|
||||
for cls in classes:
|
||||
for index in get_indexes_spec(cls):
|
||||
for klass in classes:
|
||||
for index in get_indexes_spec(klass):
|
||||
if index not in indexes:
|
||||
indexes.append(index)
|
||||
|
||||
@@ -710,7 +856,6 @@ class Document(BaseDocument):
|
||||
|
||||
|
||||
class DynamicDocument(Document):
|
||||
|
||||
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
|
||||
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
|
||||
way as an ordinary document but has expando style properties. Any data
|
||||
@@ -742,7 +887,6 @@ class DynamicDocument(Document):
|
||||
|
||||
|
||||
class DynamicEmbeddedDocument(EmbeddedDocument):
|
||||
|
||||
"""A Dynamic Embedded Document class allowing flexible, expandable and
|
||||
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
|
||||
information about dynamic documents.
|
||||
@@ -769,7 +913,6 @@ class DynamicEmbeddedDocument(EmbeddedDocument):
|
||||
|
||||
|
||||
class MapReduceDocument(object):
|
||||
|
||||
"""A document returned from a map/reduce query.
|
||||
|
||||
:param collection: An instance of :class:`~pymongo.Collection`
|
||||
|
@@ -5,7 +5,8 @@ from mongoengine.python_support import txt_type
|
||||
|
||||
__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError',
|
||||
'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError',
|
||||
'OperationError', 'NotUniqueError', 'ValidationError')
|
||||
'OperationError', 'NotUniqueError', 'FieldDoesNotExist',
|
||||
'ValidationError')
|
||||
|
||||
|
||||
class NotRegistered(Exception):
|
||||
@@ -40,6 +41,17 @@ class NotUniqueError(OperationError):
|
||||
pass
|
||||
|
||||
|
||||
class FieldDoesNotExist(Exception):
|
||||
"""Raised when trying to set a field
|
||||
not declared in a :class:`~mongoengine.Document`
|
||||
or an :class:`~mongoengine.EmbeddedDocument`.
|
||||
|
||||
To avoid this behavior on data loading,
|
||||
you should the :attr:`strict` to ``False``
|
||||
in the :attr:`meta` dictionnary.
|
||||
"""
|
||||
|
||||
|
||||
class ValidationError(AssertionError):
|
||||
"""Validation exception.
|
||||
|
||||
@@ -103,6 +115,7 @@ class ValidationError(AssertionError):
|
||||
else:
|
||||
return unicode(source)
|
||||
return errors_dict
|
||||
|
||||
if not self.errors:
|
||||
return {}
|
||||
return build_dict(self.errors)
|
||||
@@ -113,9 +126,9 @@ class ValidationError(AssertionError):
|
||||
def generate_key(value, prefix=''):
|
||||
if isinstance(value, list):
|
||||
value = ' '.join([generate_key(k) for k in value])
|
||||
if isinstance(value, dict):
|
||||
elif isinstance(value, dict):
|
||||
value = ' '.join(
|
||||
[generate_key(v, k) for k, v in value.iteritems()])
|
||||
[generate_key(v, k) for k, v in value.iteritems()])
|
||||
|
||||
results = "%s.%s" % (prefix, value) if prefix else value
|
||||
return results
|
||||
|
@@ -39,19 +39,18 @@ __all__ = [
|
||||
'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField',
|
||||
'ComplexDateTimeField', 'EmbeddedDocumentField', 'ObjectIdField',
|
||||
'GenericEmbeddedDocumentField', 'DynamicField', 'ListField',
|
||||
'SortedListField', 'DictField', 'MapField', 'ReferenceField',
|
||||
'CachedReferenceField', 'GenericReferenceField', 'BinaryField',
|
||||
'GridFSError', 'GridFSProxy', 'FileField', 'ImageGridFsProxy',
|
||||
'ImproperlyConfigured', 'ImageField', 'GeoPointField', 'PointField',
|
||||
'LineStringField', 'PolygonField', 'SequenceField', 'UUIDField',
|
||||
'GeoJsonBaseField']
|
||||
|
||||
'SortedListField', 'EmbeddedDocumentListField', 'DictField',
|
||||
'MapField', 'ReferenceField', 'CachedReferenceField',
|
||||
'GenericReferenceField', 'BinaryField', 'GridFSError', 'GridFSProxy',
|
||||
'FileField', 'ImageGridFsProxy', 'ImproperlyConfigured', 'ImageField',
|
||||
'GeoPointField', 'PointField', 'LineStringField', 'PolygonField',
|
||||
'SequenceField', 'UUIDField', 'MultiPointField', 'MultiLineStringField',
|
||||
'MultiPolygonField', 'GeoJsonBaseField']
|
||||
|
||||
RECURSIVE_REFERENCE_CONSTANT = 'self'
|
||||
|
||||
|
||||
class StringField(BaseField):
|
||||
|
||||
"""A unicode string field.
|
||||
"""
|
||||
|
||||
@@ -107,33 +106,41 @@ class StringField(BaseField):
|
||||
# escape unsafe characters which could lead to a re.error
|
||||
value = re.escape(value)
|
||||
value = re.compile(regex % value, flags)
|
||||
return value
|
||||
return super(StringField, self).prepare_query_value(op, value)
|
||||
|
||||
|
||||
class URLField(StringField):
|
||||
|
||||
"""A field that validates input as an URL.
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
|
||||
_URL_REGEX = re.compile(
|
||||
r'^(?:http|ftp)s?://' # http:// or https://
|
||||
# domain...
|
||||
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|'
|
||||
r'^(?:[a-z0-9\.\-]*)://' # scheme is validated separately
|
||||
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}(?<!-)\.?)|' # domain...
|
||||
r'localhost|' # localhost...
|
||||
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
|
||||
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
|
||||
r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
|
||||
r'(?::\d+)?' # optional port
|
||||
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
|
||||
_URL_SCHEMES = ['http', 'https', 'ftp', 'ftps']
|
||||
|
||||
def __init__(self, verify_exists=False, url_regex=None, **kwargs):
|
||||
def __init__(self, verify_exists=False, url_regex=None, schemes=None, **kwargs):
|
||||
self.verify_exists = verify_exists
|
||||
self.url_regex = url_regex or self._URL_REGEX
|
||||
self.schemes = schemes or self._URL_SCHEMES
|
||||
super(URLField, self).__init__(**kwargs)
|
||||
|
||||
def validate(self, value):
|
||||
# Check first if the scheme is valid
|
||||
scheme = value.split('://')[0].lower()
|
||||
if scheme not in self.schemes:
|
||||
self.error('Invalid scheme {} in URL: {}'.format(scheme, value))
|
||||
return
|
||||
|
||||
# Then check full URL
|
||||
if not self.url_regex.match(value):
|
||||
self.error('Invalid URL: %s' % value)
|
||||
self.error('Invalid URL: {}'.format(value))
|
||||
return
|
||||
|
||||
if self.verify_exists:
|
||||
@@ -149,7 +156,6 @@ class URLField(StringField):
|
||||
|
||||
|
||||
class EmailField(StringField):
|
||||
|
||||
"""A field that validates input as an E-Mail-Address.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
@@ -160,8 +166,8 @@ class EmailField(StringField):
|
||||
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*"
|
||||
# quoted-string
|
||||
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"'
|
||||
# domain
|
||||
r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,253}[A-Z0-9])?\.)+[A-Z]{2,6}$', re.IGNORECASE
|
||||
# domain (max length of an ICAAN TLD is 22 characters)
|
||||
r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}|[A-Z0-9-]{2,}(?<!-))$', re.IGNORECASE
|
||||
)
|
||||
|
||||
def validate(self, value):
|
||||
@@ -171,7 +177,6 @@ class EmailField(StringField):
|
||||
|
||||
|
||||
class IntField(BaseField):
|
||||
|
||||
"""An 32-bit integer field.
|
||||
"""
|
||||
|
||||
@@ -202,11 +207,10 @@ class IntField(BaseField):
|
||||
if value is None:
|
||||
return value
|
||||
|
||||
return int(value)
|
||||
return super(IntField, self).prepare_query_value(op, int(value))
|
||||
|
||||
|
||||
class LongField(BaseField):
|
||||
|
||||
"""An 64-bit integer field.
|
||||
"""
|
||||
|
||||
@@ -237,11 +241,10 @@ class LongField(BaseField):
|
||||
if value is None:
|
||||
return value
|
||||
|
||||
return long(value)
|
||||
return super(LongField, self).prepare_query_value(op, long(value))
|
||||
|
||||
|
||||
class FloatField(BaseField):
|
||||
|
||||
"""An floating point number field.
|
||||
"""
|
||||
|
||||
@@ -272,11 +275,10 @@ class FloatField(BaseField):
|
||||
if value is None:
|
||||
return value
|
||||
|
||||
return float(value)
|
||||
return super(FloatField, self).prepare_query_value(op, float(value))
|
||||
|
||||
|
||||
class DecimalField(BaseField):
|
||||
|
||||
"""A fixed-point decimal number field.
|
||||
|
||||
.. versionchanged:: 0.8
|
||||
@@ -290,7 +292,7 @@ class DecimalField(BaseField):
|
||||
:param max_value: Validation rule for the maximum acceptable value.
|
||||
:param force_string: Store as a string.
|
||||
:param precision: Number of decimal places to store.
|
||||
:param rounding: The rounding rule from the python decimal libary:
|
||||
:param rounding: The rounding rule from the python decimal library:
|
||||
|
||||
- decimal.ROUND_CEILING (towards Infinity)
|
||||
- decimal.ROUND_DOWN (towards zero)
|
||||
@@ -307,7 +309,7 @@ class DecimalField(BaseField):
|
||||
self.min_value = min_value
|
||||
self.max_value = max_value
|
||||
self.force_string = force_string
|
||||
self.precision = decimal.Decimal(".%s" % ("0" * precision))
|
||||
self.precision = precision
|
||||
self.rounding = rounding
|
||||
|
||||
super(DecimalField, self).__init__(**kwargs)
|
||||
@@ -321,7 +323,7 @@ class DecimalField(BaseField):
|
||||
value = decimal.Decimal("%s" % value)
|
||||
except decimal.InvalidOperation:
|
||||
return value
|
||||
return value.quantize(self.precision, rounding=self.rounding)
|
||||
return value.quantize(decimal.Decimal(".%s" % ("0" * self.precision)), rounding=self.rounding)
|
||||
|
||||
def to_mongo(self, value, use_db_field=True):
|
||||
if value is None:
|
||||
@@ -346,11 +348,10 @@ class DecimalField(BaseField):
|
||||
self.error('Decimal value is too large')
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
return self.to_mongo(value)
|
||||
return super(DecimalField, self).prepare_query_value(op, self.to_mongo(value))
|
||||
|
||||
|
||||
class BooleanField(BaseField):
|
||||
|
||||
"""A boolean field type.
|
||||
|
||||
.. versionadded:: 0.1.2
|
||||
@@ -369,16 +370,15 @@ class BooleanField(BaseField):
|
||||
|
||||
|
||||
class DateTimeField(BaseField):
|
||||
|
||||
"""A datetime field.
|
||||
|
||||
Uses the python-dateutil library if available alternatively use time.strptime
|
||||
to parse the dates. Note: python-dateutil's parser is fully featured and when
|
||||
installed you can utilise it to convert varing types of date formats into valid
|
||||
installed you can utilise it to convert varying types of date formats into valid
|
||||
python datetime objects.
|
||||
|
||||
Note: Microseconds are rounded to the nearest millisecond.
|
||||
Pre UTC microsecond support is effecively broken.
|
||||
Pre UTC microsecond support is effectively broken.
|
||||
Use :class:`~mongoengine.fields.ComplexDateTimeField` if you
|
||||
need accurate microsecond support.
|
||||
"""
|
||||
@@ -433,11 +433,10 @@ class DateTimeField(BaseField):
|
||||
return None
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
return self.to_mongo(value)
|
||||
return super(DateTimeField, self).prepare_query_value(op, self.to_mongo(value))
|
||||
|
||||
|
||||
class ComplexDateTimeField(StringField):
|
||||
|
||||
"""
|
||||
ComplexDateTimeField handles microseconds exactly instead of rounding
|
||||
like DateTimeField does.
|
||||
@@ -457,37 +456,22 @@ class ComplexDateTimeField(StringField):
|
||||
"""
|
||||
|
||||
def __init__(self, separator=',', **kwargs):
|
||||
self.names = ['year', 'month', 'day', 'hour', 'minute', 'second',
|
||||
'microsecond']
|
||||
self.separtor = separator
|
||||
self.names = ['year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond']
|
||||
self.separator = separator
|
||||
self.format = separator.join(['%Y', '%m', '%d', '%H', '%M', '%S', '%f'])
|
||||
super(ComplexDateTimeField, self).__init__(**kwargs)
|
||||
|
||||
def _leading_zero(self, number):
|
||||
"""
|
||||
Converts the given number to a string.
|
||||
|
||||
If it has only one digit, a leading zero so as it has always at least
|
||||
two digits.
|
||||
"""
|
||||
if int(number) < 10:
|
||||
return "0%s" % number
|
||||
else:
|
||||
return str(number)
|
||||
|
||||
def _convert_from_datetime(self, val):
|
||||
"""
|
||||
Convert a `datetime` object to a string representation (which will be
|
||||
stored in MongoDB). This is the reverse function of
|
||||
`_convert_from_string`.
|
||||
|
||||
>>> a = datetime(2011, 6, 8, 20, 26, 24, 192284)
|
||||
>>> RealDateTimeField()._convert_from_datetime(a)
|
||||
'2011,06,08,20,26,24,192284'
|
||||
>>> a = datetime(2011, 6, 8, 20, 26, 24, 92284)
|
||||
>>> ComplexDateTimeField()._convert_from_datetime(a)
|
||||
'2011,06,08,20,26,24,092284'
|
||||
"""
|
||||
data = []
|
||||
for name in self.names:
|
||||
data.append(self._leading_zero(getattr(val, name)))
|
||||
return ','.join(data)
|
||||
return val.strftime(self.format)
|
||||
|
||||
def _convert_from_string(self, data):
|
||||
"""
|
||||
@@ -495,21 +479,17 @@ class ComplexDateTimeField(StringField):
|
||||
will manipulate). This is the reverse function of
|
||||
`_convert_from_datetime`.
|
||||
|
||||
>>> a = '2011,06,08,20,26,24,192284'
|
||||
>>> a = '2011,06,08,20,26,24,092284'
|
||||
>>> ComplexDateTimeField()._convert_from_string(a)
|
||||
datetime.datetime(2011, 6, 8, 20, 26, 24, 192284)
|
||||
datetime.datetime(2011, 6, 8, 20, 26, 24, 92284)
|
||||
"""
|
||||
data = data.split(',')
|
||||
data = map(int, data)
|
||||
values = {}
|
||||
for i in range(7):
|
||||
values[self.names[i]] = data[i]
|
||||
return datetime.datetime(**values)
|
||||
values = map(int, data.split(self.separator))
|
||||
return datetime.datetime(*values)
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
data = super(ComplexDateTimeField, self).__get__(instance, owner)
|
||||
if data is None:
|
||||
return datetime.datetime.now()
|
||||
return None if self.null else datetime.datetime.now()
|
||||
if isinstance(data, datetime.datetime):
|
||||
return data
|
||||
return self._convert_from_string(data)
|
||||
@@ -536,11 +516,10 @@ class ComplexDateTimeField(StringField):
|
||||
return self._convert_from_datetime(value)
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
return self._convert_from_datetime(value)
|
||||
return super(ComplexDateTimeField, self).prepare_query_value(op, self._convert_from_datetime(value))
|
||||
|
||||
|
||||
class EmbeddedDocumentField(BaseField):
|
||||
|
||||
"""An embedded document field - with a declared document_type.
|
||||
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
|
||||
"""
|
||||
@@ -564,7 +543,7 @@ class EmbeddedDocumentField(BaseField):
|
||||
|
||||
def to_python(self, value):
|
||||
if not isinstance(value, self.document_type):
|
||||
return self.document_type._from_son(value)
|
||||
return self.document_type._from_son(value, _auto_dereference=self._auto_dereference)
|
||||
return value
|
||||
|
||||
def to_mongo(self, value, use_db_field=True, fields=[]):
|
||||
@@ -587,11 +566,13 @@ class EmbeddedDocumentField(BaseField):
|
||||
return self.document_type._fields.get(member_name)
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if not isinstance(value, self.document_type):
|
||||
value = self.document_type._from_son(value)
|
||||
super(EmbeddedDocumentField, self).prepare_query_value(op, value)
|
||||
return self.to_mongo(value)
|
||||
|
||||
|
||||
class GenericEmbeddedDocumentField(BaseField):
|
||||
|
||||
"""A generic embedded document field - allows any
|
||||
:class:`~mongoengine.EmbeddedDocument` to be stored.
|
||||
|
||||
@@ -603,7 +584,7 @@ class GenericEmbeddedDocumentField(BaseField):
|
||||
"""
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
return self.to_mongo(value)
|
||||
return super(GenericEmbeddedDocumentField, self).prepare_query_value(op, self.to_mongo(value))
|
||||
|
||||
def to_python(self, value):
|
||||
if isinstance(value, dict):
|
||||
@@ -624,20 +605,19 @@ class GenericEmbeddedDocumentField(BaseField):
|
||||
return None
|
||||
|
||||
data = document.to_mongo(use_db_field)
|
||||
if not '_cls' in data:
|
||||
if '_cls' not in data:
|
||||
data['_cls'] = document._class_name
|
||||
return data
|
||||
|
||||
|
||||
class DynamicField(BaseField):
|
||||
|
||||
"""A truly dynamic field type capable of handling different and varying
|
||||
types of data.
|
||||
|
||||
Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
|
||||
|
||||
def to_mongo(self, value):
|
||||
"""Convert a Python type to a MongoDBcompatible type.
|
||||
"""Convert a Python type to a MongoDB compatible type.
|
||||
"""
|
||||
|
||||
if isinstance(value, basestring):
|
||||
@@ -647,9 +627,9 @@ class DynamicField(BaseField):
|
||||
cls = value.__class__
|
||||
val = value.to_mongo()
|
||||
# If we its a document thats not inherited add _cls
|
||||
if (isinstance(value, Document)):
|
||||
if isinstance(value, Document):
|
||||
val = {"_ref": value.to_dbref(), "_cls": cls.__name__}
|
||||
if (isinstance(value, EmbeddedDocument)):
|
||||
if isinstance(value, EmbeddedDocument):
|
||||
val['_cls'] = cls.__name__
|
||||
return val
|
||||
|
||||
@@ -684,9 +664,8 @@ class DynamicField(BaseField):
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if isinstance(value, basestring):
|
||||
from mongoengine.fields import StringField
|
||||
return StringField().prepare_query_value(op, value)
|
||||
return self.to_mongo(value)
|
||||
return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value))
|
||||
|
||||
def validate(self, value, clean=True):
|
||||
if hasattr(value, "validate"):
|
||||
@@ -694,7 +673,6 @@ class DynamicField(BaseField):
|
||||
|
||||
|
||||
class ListField(ComplexBaseField):
|
||||
|
||||
"""A list field that wraps a standard field, allowing multiple instances
|
||||
of the field to be used as a list in the database.
|
||||
|
||||
@@ -719,16 +697,40 @@ class ListField(ComplexBaseField):
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if self.field:
|
||||
if op in ('set', 'unset') and (not isinstance(value, basestring)
|
||||
and not isinstance(value, BaseDocument)
|
||||
and hasattr(value, '__iter__')):
|
||||
if op in ('set', 'unset') and (
|
||||
not isinstance(value, basestring) and
|
||||
not isinstance(value, BaseDocument) and
|
||||
hasattr(value, '__iter__')):
|
||||
return [self.field.prepare_query_value(op, v) for v in value]
|
||||
return self.field.prepare_query_value(op, value)
|
||||
return super(ListField, self).prepare_query_value(op, value)
|
||||
|
||||
|
||||
class SortedListField(ListField):
|
||||
class EmbeddedDocumentListField(ListField):
|
||||
"""A :class:`~mongoengine.ListField` designed specially to hold a list of
|
||||
embedded documents to provide additional query helpers.
|
||||
|
||||
.. note::
|
||||
The only valid list values are subclasses of
|
||||
:class:`~mongoengine.EmbeddedDocument`.
|
||||
|
||||
.. versionadded:: 0.9
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, document_type, **kwargs):
|
||||
"""
|
||||
:param document_type: The type of
|
||||
:class:`~mongoengine.EmbeddedDocument` the list will hold.
|
||||
:param kwargs: Keyword arguments passed directly into the parent
|
||||
:class:`~mongoengine.ListField`.
|
||||
"""
|
||||
super(EmbeddedDocumentListField, self).__init__(
|
||||
field=EmbeddedDocumentField(document_type), **kwargs
|
||||
)
|
||||
|
||||
|
||||
class SortedListField(ListField):
|
||||
"""A ListField that sorts the contents of its list before writing to
|
||||
the database in order to ensure that a sorted list is always
|
||||
retrieved.
|
||||
@@ -780,7 +782,6 @@ def key_has_dot_or_dollar(d):
|
||||
|
||||
|
||||
class DictField(ComplexBaseField):
|
||||
|
||||
"""A dictionary field that wraps a standard Python dictionary. This is
|
||||
similar to an embedded document, but the structure is not defined.
|
||||
|
||||
@@ -826,13 +827,16 @@ class DictField(ComplexBaseField):
|
||||
return StringField().prepare_query_value(op, value)
|
||||
|
||||
if hasattr(self.field, 'field'):
|
||||
if op in ('set', 'unset') and isinstance(value, dict):
|
||||
return dict(
|
||||
(k, self.field.prepare_query_value(op, v))
|
||||
for k, v in value.items())
|
||||
return self.field.prepare_query_value(op, value)
|
||||
|
||||
return super(DictField, self).prepare_query_value(op, value)
|
||||
|
||||
|
||||
class MapField(DictField):
|
||||
|
||||
"""A field that maps a name to a specified field type. Similar to
|
||||
a DictField, except the 'value' of each item must match the specified
|
||||
field type.
|
||||
@@ -848,13 +852,12 @@ class MapField(DictField):
|
||||
|
||||
|
||||
class ReferenceField(BaseField):
|
||||
|
||||
"""A reference to a document that will be automatically dereferenced on
|
||||
access (lazily).
|
||||
|
||||
Use the `reverse_delete_rule` to handle what should happen if the document
|
||||
the field is referencing is deleted. EmbeddedDocuments, DictFields and
|
||||
MapFields do not support reverse_delete_rules and an `InvalidDocumentError`
|
||||
MapFields does not support reverse_delete_rule and an `InvalidDocumentError`
|
||||
will be raised if trying to set on one of these Document / Field types.
|
||||
|
||||
The options are:
|
||||
@@ -878,7 +881,7 @@ class ReferenceField(BaseField):
|
||||
Bar.register_delete_rule(Foo, 'bar', NULLIFY)
|
||||
|
||||
.. note ::
|
||||
`reverse_delete_rules` do not trigger pre / post delete signals to be
|
||||
`reverse_delete_rule` does not trigger pre / post delete signals to be
|
||||
triggered.
|
||||
|
||||
.. versionchanged:: 0.5 added `reverse_delete_rule`
|
||||
@@ -967,6 +970,7 @@ class ReferenceField(BaseField):
|
||||
def prepare_query_value(self, op, value):
|
||||
if value is None:
|
||||
return None
|
||||
super(ReferenceField, self).prepare_query_value(op, value)
|
||||
return self.to_mongo(value)
|
||||
|
||||
def validate(self, value):
|
||||
@@ -983,9 +987,9 @@ class ReferenceField(BaseField):
|
||||
|
||||
|
||||
class CachedReferenceField(BaseField):
|
||||
|
||||
"""
|
||||
A referencefield with cache fields to porpuse pseudo-joins
|
||||
A referencefield with cache fields to purpose pseudo-joins
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
|
||||
@@ -997,7 +1001,6 @@ class CachedReferenceField(BaseField):
|
||||
"""
|
||||
if not isinstance(document_type, basestring) and \
|
||||
not issubclass(document_type, (Document, basestring)):
|
||||
|
||||
self.error('Argument to CachedReferenceField constructor must be a'
|
||||
' document class or a string')
|
||||
|
||||
@@ -1008,6 +1011,7 @@ class CachedReferenceField(BaseField):
|
||||
|
||||
def start_listener(self):
|
||||
from mongoengine import signals
|
||||
|
||||
signals.post_save.connect(self.on_document_pre_save,
|
||||
sender=self.document_type)
|
||||
|
||||
@@ -1061,7 +1065,6 @@ class CachedReferenceField(BaseField):
|
||||
def to_mongo(self, document):
|
||||
id_field_name = self.document_type._meta['id_field']
|
||||
id_field = self.document_type._fields[id_field_name]
|
||||
doc_tipe = self.document_type
|
||||
|
||||
if isinstance(document, Document):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
@@ -1071,6 +1074,7 @@ class CachedReferenceField(BaseField):
|
||||
' been saved to the database')
|
||||
else:
|
||||
self.error('Only accept a document object')
|
||||
# TODO: should raise here or will fail next statement
|
||||
|
||||
value = SON((
|
||||
("_id", id_field.to_mongo(id_)),
|
||||
@@ -1093,7 +1097,7 @@ class CachedReferenceField(BaseField):
|
||||
|
||||
def validate(self, value):
|
||||
|
||||
if not isinstance(value, (self.document_type)):
|
||||
if not isinstance(value, self.document_type):
|
||||
self.error("A CachedReferenceField only accepts documents")
|
||||
|
||||
if isinstance(value, Document) and value.id is None:
|
||||
@@ -1122,7 +1126,6 @@ class CachedReferenceField(BaseField):
|
||||
|
||||
|
||||
class GenericReferenceField(BaseField):
|
||||
|
||||
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
|
||||
that will be automatically dereferenced on access (lazily).
|
||||
|
||||
@@ -1204,7 +1207,6 @@ class GenericReferenceField(BaseField):
|
||||
|
||||
|
||||
class BinaryField(BaseField):
|
||||
|
||||
"""A binary data field.
|
||||
"""
|
||||
|
||||
@@ -1236,7 +1238,6 @@ class GridFSError(Exception):
|
||||
|
||||
|
||||
class GridFSProxy(object):
|
||||
|
||||
"""Proxy object to handle writing and reading of files to and from GridFS
|
||||
|
||||
.. versionadded:: 0.4
|
||||
@@ -1250,12 +1251,12 @@ class GridFSProxy(object):
|
||||
instance=None,
|
||||
db_alias=DEFAULT_CONNECTION_NAME,
|
||||
collection_name='fs'):
|
||||
self.grid_id = grid_id # Store GridFS id for file
|
||||
self.grid_id = grid_id # Store GridFS id for file
|
||||
self.key = key
|
||||
self.instance = instance
|
||||
self.db_alias = db_alias
|
||||
self.collection_name = collection_name
|
||||
self.newfile = None # Used for partial writes
|
||||
self.newfile = None # Used for partial writes
|
||||
self.gridout = None
|
||||
|
||||
def __getattr__(self, name):
|
||||
@@ -1326,6 +1327,7 @@ class GridFSProxy(object):
|
||||
def new_file(self, **kwargs):
|
||||
self.newfile = self.fs.new_file(**kwargs)
|
||||
self.grid_id = self.newfile._id
|
||||
self._mark_as_changed()
|
||||
|
||||
def put(self, file_obj, **kwargs):
|
||||
if self.grid_id:
|
||||
@@ -1381,7 +1383,6 @@ class GridFSProxy(object):
|
||||
|
||||
|
||||
class FileField(BaseField):
|
||||
|
||||
"""A GridFS storage field.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
@@ -1415,7 +1416,7 @@ class FileField(BaseField):
|
||||
def __set__(self, instance, value):
|
||||
key = self.name
|
||||
if ((hasattr(value, 'read') and not
|
||||
isinstance(value, GridFSProxy)) or isinstance(value, str_types)):
|
||||
isinstance(value, GridFSProxy)) or isinstance(value, str_types)):
|
||||
# using "FileField() = file/string" notation
|
||||
grid_file = instance._data.get(self.name)
|
||||
# If a file already exists, delete it
|
||||
@@ -1465,7 +1466,6 @@ class FileField(BaseField):
|
||||
|
||||
|
||||
class ImageGridFsProxy(GridFSProxy):
|
||||
|
||||
"""
|
||||
Proxy for ImageField
|
||||
|
||||
@@ -1489,6 +1489,7 @@ class ImageGridFsProxy(GridFSProxy):
|
||||
raise ValidationError('Invalid image: %s' % e)
|
||||
|
||||
# Progressive JPEG
|
||||
# TODO: fixme, at least unused, at worst bad implementation
|
||||
progressive = img.info.get('progressive') or False
|
||||
|
||||
if (kwargs.get('progressive') and
|
||||
@@ -1549,7 +1550,7 @@ class ImageGridFsProxy(GridFSProxy):
|
||||
if out and out.thumbnail_id:
|
||||
self.fs.delete(out.thumbnail_id)
|
||||
|
||||
return super(ImageGridFsProxy, self).delete(*args, **kwargs)
|
||||
return super(ImageGridFsProxy, self).delete()
|
||||
|
||||
def _put_thumbnail(self, thumbnail, format, progressive, **kwargs):
|
||||
w, h = thumbnail.size
|
||||
@@ -1604,7 +1605,6 @@ class ImproperlyConfigured(Exception):
|
||||
|
||||
|
||||
class ImageField(FileField):
|
||||
|
||||
"""
|
||||
A Image File storage field.
|
||||
|
||||
@@ -1643,8 +1643,7 @@ class ImageField(FileField):
|
||||
|
||||
|
||||
class SequenceField(BaseField):
|
||||
|
||||
"""Provides a sequental counter see:
|
||||
"""Provides a sequential counter see:
|
||||
http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers
|
||||
|
||||
.. note::
|
||||
@@ -1655,12 +1654,21 @@ class SequenceField(BaseField):
|
||||
cluster of machines, it is easier to create an object ID than have
|
||||
global, uniformly increasing sequence numbers.
|
||||
|
||||
:param collection_name: Name of the counter collection (default 'mongoengine.counters')
|
||||
:param sequence_name: Name of the sequence in the collection (default 'ClassName.counter')
|
||||
:param value_decorator: Any callable to use as a counter (default int)
|
||||
|
||||
Use any callable as `value_decorator` to transform calculated counter into
|
||||
any value suitable for your needs, e.g. string or hexadecimal
|
||||
representation of the default integer counter value.
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
In case the counter is defined in the abstract document, it will be
|
||||
common to all inherited documents and the default sequence name will
|
||||
be the class name of the abstract document.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
|
||||
.. versionchanged:: 0.8 added `value_decorator`
|
||||
"""
|
||||
|
||||
@@ -1675,7 +1683,7 @@ class SequenceField(BaseField):
|
||||
self.sequence_name = sequence_name
|
||||
self.value_decorator = (callable(value_decorator) and
|
||||
value_decorator or self.VALUE_DECORATOR)
|
||||
return super(SequenceField, self).__init__(*args, **kwargs)
|
||||
super(SequenceField, self).__init__(*args, **kwargs)
|
||||
|
||||
def generate(self):
|
||||
"""
|
||||
@@ -1721,7 +1729,7 @@ class SequenceField(BaseField):
|
||||
if self.sequence_name:
|
||||
return self.sequence_name
|
||||
owner = self.owner_document
|
||||
if issubclass(owner, Document):
|
||||
if issubclass(owner, Document) and not owner._meta.get('abstract'):
|
||||
return owner._get_collection_name()
|
||||
else:
|
||||
return ''.join('_%s' % c if c.isupper() else c
|
||||
@@ -1745,7 +1753,7 @@ class SequenceField(BaseField):
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
"""
|
||||
This method is overriden in order to convert the query value into to required
|
||||
This method is overridden in order to convert the query value into to required
|
||||
type. We need to do this in order to be able to successfully compare query
|
||||
values passed as string, the base implementation returns the value as is.
|
||||
"""
|
||||
@@ -1758,7 +1766,6 @@ class SequenceField(BaseField):
|
||||
|
||||
|
||||
class UUIDField(BaseField):
|
||||
|
||||
"""A UUID field.
|
||||
|
||||
.. versionadded:: 0.6
|
||||
@@ -1805,13 +1812,12 @@ class UUIDField(BaseField):
|
||||
if not isinstance(value, basestring):
|
||||
value = str(value)
|
||||
try:
|
||||
value = uuid.UUID(value)
|
||||
uuid.UUID(value)
|
||||
except Exception, exc:
|
||||
self.error('Could not convert to UUID: %s' % exc)
|
||||
|
||||
|
||||
class GeoPointField(BaseField):
|
||||
|
||||
"""A list storing a longitude and latitude coordinate.
|
||||
|
||||
.. note:: this represents a generic point in a 2D plane and a legacy way of
|
||||
@@ -1841,7 +1847,6 @@ class GeoPointField(BaseField):
|
||||
|
||||
|
||||
class PointField(GeoJsonBaseField):
|
||||
|
||||
"""A GeoJSON field storing a longitude and latitude coordinate.
|
||||
|
||||
The data is represented as:
|
||||
@@ -1855,13 +1860,13 @@ class PointField(GeoJsonBaseField):
|
||||
to set the value.
|
||||
|
||||
Requires mongodb >= 2.4
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
_type = "Point"
|
||||
|
||||
|
||||
class LineStringField(GeoJsonBaseField):
|
||||
|
||||
"""A GeoJSON field storing a line of longitude and latitude coordinates.
|
||||
|
||||
The data is represented as:
|
||||
@@ -1874,13 +1879,13 @@ class LineStringField(GeoJsonBaseField):
|
||||
You can either pass a dict with the full information or a list of points.
|
||||
|
||||
Requires mongodb >= 2.4
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
_type = "LineString"
|
||||
|
||||
|
||||
class PolygonField(GeoJsonBaseField):
|
||||
|
||||
"""A GeoJSON field storing a polygon of longitude and latitude coordinates.
|
||||
|
||||
The data is represented as:
|
||||
@@ -1896,6 +1901,74 @@ class PolygonField(GeoJsonBaseField):
|
||||
holes.
|
||||
|
||||
Requires mongodb >= 2.4
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
_type = "Polygon"
|
||||
|
||||
|
||||
class MultiPointField(GeoJsonBaseField):
|
||||
"""A GeoJSON field storing a list of Points.
|
||||
|
||||
The data is represented as:
|
||||
|
||||
.. code-block:: js
|
||||
|
||||
{ "type" : "MultiPoint" ,
|
||||
"coordinates" : [[x1, y1], [x2, y2]]}
|
||||
|
||||
You can either pass a dict with the full information or a list
|
||||
to set the value.
|
||||
|
||||
Requires mongodb >= 2.6
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
_type = "MultiPoint"
|
||||
|
||||
|
||||
class MultiLineStringField(GeoJsonBaseField):
|
||||
"""A GeoJSON field storing a list of LineStrings.
|
||||
|
||||
The data is represented as:
|
||||
|
||||
.. code-block:: js
|
||||
|
||||
{ "type" : "MultiLineString" ,
|
||||
"coordinates" : [[[x1, y1], [x1, y1] ... [xn, yn]],
|
||||
[[x1, y1], [x1, y1] ... [xn, yn]]]}
|
||||
|
||||
You can either pass a dict with the full information or a list of points.
|
||||
|
||||
Requires mongodb >= 2.6
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
_type = "MultiLineString"
|
||||
|
||||
|
||||
class MultiPolygonField(GeoJsonBaseField):
|
||||
"""A GeoJSON field storing list of Polygons.
|
||||
|
||||
The data is represented as:
|
||||
|
||||
.. code-block:: js
|
||||
|
||||
{ "type" : "MultiPolygon" ,
|
||||
"coordinates" : [[
|
||||
[[x1, y1], [x1, y1] ... [xn, yn]],
|
||||
[[x1, y1], [x1, y1] ... [xn, yn]]
|
||||
], [
|
||||
[[x1, y1], [x1, y1] ... [xn, yn]],
|
||||
[[x1, y1], [x1, y1] ... [xn, yn]]
|
||||
]
|
||||
}
|
||||
|
||||
You can either pass a dict with the full information or a list
|
||||
of Polygons.
|
||||
|
||||
Requires mongodb >= 2.6
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
_type = "MultiPolygon"
|
||||
|
@@ -1,18 +1,26 @@
|
||||
"""Helper functions and types to aid with Python 2.5 - 3 support."""
|
||||
|
||||
import sys
|
||||
import pymongo
|
||||
|
||||
|
||||
if pymongo.version_tuple[0] < 3:
|
||||
IS_PYMONGO_3 = False
|
||||
else:
|
||||
IS_PYMONGO_3 = True
|
||||
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3:
|
||||
import codecs
|
||||
from io import BytesIO as StringIO
|
||||
|
||||
# return s converted to binary. b('test') should be equivalent to b'test'
|
||||
def b(s):
|
||||
return codecs.latin_1_encode(s)[0]
|
||||
|
||||
bin_type = bytes
|
||||
txt_type = str
|
||||
txt_type = str
|
||||
else:
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
|
@@ -21,10 +21,14 @@ from mongoengine.common import _import_class
|
||||
from mongoengine.base.common import get_document
|
||||
from mongoengine.errors import (OperationError, NotUniqueError,
|
||||
InvalidQueryError, LookUpError)
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
from mongoengine.queryset import transform
|
||||
from mongoengine.queryset.field_list import QueryFieldList
|
||||
from mongoengine.queryset.visitor import Q, QNode
|
||||
|
||||
if IS_PYMONGO_3:
|
||||
from pymongo.collection import ReturnDocument
|
||||
|
||||
|
||||
__all__ = ('BaseQuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL')
|
||||
|
||||
@@ -39,7 +43,6 @@ RE_TYPE = type(re.compile(''))
|
||||
|
||||
|
||||
class BaseQuerySet(object):
|
||||
|
||||
"""A set of results returned from a query. Wraps a MongoDB cursor,
|
||||
providing :class:`~mongoengine.Document` objects as the results.
|
||||
"""
|
||||
@@ -66,7 +69,6 @@ class BaseQuerySet(object):
|
||||
self._as_pymongo = False
|
||||
self._as_pymongo_coerce = False
|
||||
self._search_text = None
|
||||
self._include_text_scores = False
|
||||
|
||||
# If inheritance is allowed, only return instances and instances of
|
||||
# subclasses of the class being used
|
||||
@@ -82,9 +84,10 @@ class BaseQuerySet(object):
|
||||
self._skip = None
|
||||
self._hint = -1 # Using -1 as None is a valid value for hint
|
||||
self.only_fields = []
|
||||
self._max_time_ms = None
|
||||
|
||||
def __call__(self, q_obj=None, class_check=True, slave_okay=False,
|
||||
read_preference=None, **query):
|
||||
def __call__(self, q_obj=None, class_check=True, read_preference=None,
|
||||
**query):
|
||||
"""Filter the selected documents by calling the
|
||||
:class:`~mongoengine.queryset.QuerySet` with a query.
|
||||
|
||||
@@ -94,9 +97,7 @@ class BaseQuerySet(object):
|
||||
objects, only the last one will be used
|
||||
:param class_check: If set to False bypass class name check when
|
||||
querying collection
|
||||
:param slave_okay: if True, allows this query to be run against a
|
||||
replica secondary.
|
||||
:params read_preference: if set, overrides connection-level
|
||||
:param read_preference: if set, overrides connection-level
|
||||
read_preference from `ReplicaSetConnection`.
|
||||
:param query: Django-style query keyword arguments
|
||||
"""
|
||||
@@ -158,7 +159,9 @@ class BaseQuerySet(object):
|
||||
if queryset._as_pymongo:
|
||||
return queryset._get_as_pymongo(queryset._cursor[key])
|
||||
return queryset._document._from_son(queryset._cursor[key],
|
||||
_auto_dereference=self._auto_dereference, only_fields=self.only_fields)
|
||||
_auto_dereference=self._auto_dereference,
|
||||
only_fields=self.only_fields)
|
||||
|
||||
raise AttributeError
|
||||
|
||||
def __iter__(self):
|
||||
@@ -191,7 +194,7 @@ class BaseQuerySet(object):
|
||||
"""
|
||||
return self.__call__(*q_objs, **query)
|
||||
|
||||
def search_text(self, text, language=None, include_text_scores=False):
|
||||
def search_text(self, text, language=None):
|
||||
"""
|
||||
Start a text search, using text indexes.
|
||||
Require: MongoDB server version 2.6+.
|
||||
@@ -199,15 +202,13 @@ class BaseQuerySet(object):
|
||||
:param language: The language that determines the list of stop words
|
||||
for the search and the rules for the stemmer and tokenizer.
|
||||
If not specified, the search uses the default language of the index.
|
||||
For supported languages, see `Text Search Languages <http://docs.mongodb.org/manual/reference/text-search-languages/#text-search-languages>`.
|
||||
|
||||
:param include_text_scores: If True, automaticaly add a text_score attribute to Document.
|
||||
|
||||
For supported languages, see
|
||||
`Text Search Languages <http://docs.mongodb.org/manual/reference/text-search-languages/#text-search-languages>`.
|
||||
"""
|
||||
queryset = self.clone()
|
||||
if queryset._search_text:
|
||||
raise OperationError(
|
||||
"Is not possible to use search_text two times.")
|
||||
"It is not possible to use search_text two times.")
|
||||
|
||||
query_kwargs = SON({'$search': text})
|
||||
if language:
|
||||
@@ -217,7 +218,6 @@ class BaseQuerySet(object):
|
||||
queryset._mongo_query = None
|
||||
queryset._cursor_obj = None
|
||||
queryset._search_text = text
|
||||
queryset._include_text_scores = include_text_scores
|
||||
|
||||
return queryset
|
||||
|
||||
@@ -256,54 +256,6 @@ class BaseQuerySet(object):
|
||||
"""
|
||||
return self._document(**kwargs).save()
|
||||
|
||||
def get_or_create(self, write_concern=None, auto_save=True,
|
||||
*q_objs, **query):
|
||||
"""Retrieve unique object or create, if it doesn't exist. Returns a
|
||||
tuple of ``(object, created)``, where ``object`` is the retrieved or
|
||||
created object and ``created`` is a boolean specifying whether a new
|
||||
object was created. Raises
|
||||
:class:`~mongoengine.queryset.MultipleObjectsReturned` or
|
||||
`DocumentName.MultipleObjectsReturned` if multiple results are found.
|
||||
A new document will be created if the document doesn't exists; a
|
||||
dictionary of default values for the new document may be provided as a
|
||||
keyword argument called :attr:`defaults`.
|
||||
|
||||
.. note:: This requires two separate operations and therefore a
|
||||
race condition exists. Because there are no transactions in
|
||||
mongoDB other approaches should be investigated, to ensure you
|
||||
don't accidently duplicate data when using this method. This is
|
||||
now scheduled to be removed before 1.0
|
||||
|
||||
:param write_concern: optional extra keyword arguments used if we
|
||||
have to create a new document.
|
||||
Passes any write_concern onto :meth:`~mongoengine.Document.save`
|
||||
|
||||
:param auto_save: if the object is to be saved automatically if
|
||||
not found.
|
||||
|
||||
.. deprecated:: 0.8
|
||||
.. versionchanged:: 0.6 - added `auto_save`
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
msg = ("get_or_create is scheduled to be deprecated. The approach is "
|
||||
"flawed without transactions. Upserts should be preferred.")
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
|
||||
defaults = query.get('defaults', {})
|
||||
if 'defaults' in query:
|
||||
del query['defaults']
|
||||
|
||||
try:
|
||||
doc = self.get(*q_objs, **query)
|
||||
return doc, False
|
||||
except self._document.DoesNotExist:
|
||||
query.update(defaults)
|
||||
doc = self._document(**query)
|
||||
|
||||
if auto_save:
|
||||
doc.save(write_concern=write_concern)
|
||||
return doc, True
|
||||
|
||||
def first(self):
|
||||
"""Retrieve the first object matching the query.
|
||||
"""
|
||||
@@ -317,7 +269,7 @@ class BaseQuerySet(object):
|
||||
def insert(self, doc_or_docs, load_bulk=True, write_concern=None):
|
||||
"""bulk insert documents
|
||||
|
||||
:param docs_or_doc: a document or list of documents to be inserted
|
||||
:param doc_or_docs: a document or list of documents to be inserted
|
||||
:param load_bulk (optional): If True returns the list of document
|
||||
instances
|
||||
:param write_concern: Extra keyword arguments are passed down to
|
||||
@@ -383,7 +335,7 @@ class BaseQuerySet(object):
|
||||
self._document, documents=results, loaded=True)
|
||||
return return_one and results[0] or results
|
||||
|
||||
def count(self, with_limit_and_skip=True):
|
||||
def count(self, with_limit_and_skip=False):
|
||||
"""Count the selected elements in the query.
|
||||
|
||||
:param with_limit_and_skip (optional): take any :meth:`limit` or
|
||||
@@ -405,6 +357,7 @@ class BaseQuerySet(object):
|
||||
will force an fsync on the primary server.
|
||||
:param _from_doc_delete: True when called from document delete therefore
|
||||
signals will have been triggered so don't loop.
|
||||
|
||||
:returns number of deleted documents
|
||||
"""
|
||||
queryset = self.clone()
|
||||
@@ -425,7 +378,7 @@ class BaseQuerySet(object):
|
||||
if call_document_delete:
|
||||
cnt = 0
|
||||
for doc in queryset:
|
||||
doc.delete(write_concern=write_concern)
|
||||
doc.delete(**write_concern)
|
||||
cnt += 1
|
||||
return cnt
|
||||
|
||||
@@ -434,6 +387,8 @@ class BaseQuerySet(object):
|
||||
# references
|
||||
for rule_entry in delete_rules:
|
||||
document_cls, field_name = rule_entry
|
||||
if document_cls._meta.get('abstract'):
|
||||
continue
|
||||
rule = doc._meta['delete_rules'][rule_entry]
|
||||
if rule == DENY and document_cls.objects(
|
||||
**{field_name + '__in': self}).count() > 0:
|
||||
@@ -443,12 +398,14 @@ class BaseQuerySet(object):
|
||||
|
||||
for rule_entry in delete_rules:
|
||||
document_cls, field_name = rule_entry
|
||||
if document_cls._meta.get('abstract'):
|
||||
continue
|
||||
rule = doc._meta['delete_rules'][rule_entry]
|
||||
if rule == CASCADE:
|
||||
ref_q = document_cls.objects(**{field_name + '__in': self})
|
||||
ref_q_count = ref_q.count()
|
||||
if (doc != document_cls and ref_q_count > 0
|
||||
or (doc == document_cls and ref_q_count > 0)):
|
||||
if (doc != document_cls and ref_q_count > 0 or
|
||||
(doc == document_cls and ref_q_count > 0)):
|
||||
ref_q.delete(write_concern=write_concern)
|
||||
elif rule == NULLIFY:
|
||||
document_cls.objects(**{field_name + '__in': self}).update(
|
||||
@@ -458,8 +415,9 @@ class BaseQuerySet(object):
|
||||
write_concern=write_concern,
|
||||
**{'pull_all__%s' % field_name: self})
|
||||
|
||||
result = queryset._collection.remove(queryset._query, write_concern=write_concern)
|
||||
return result["n"]
|
||||
result = queryset._collection.remove(queryset._query, **write_concern)
|
||||
if result:
|
||||
return result.get("n")
|
||||
|
||||
def update(self, upsert=False, multi=True, write_concern=None,
|
||||
full_result=False, **update):
|
||||
@@ -543,7 +501,7 @@ class BaseQuerySet(object):
|
||||
|
||||
:param upsert: insert if document doesn't exist (default ``False``)
|
||||
:param full_response: return the entire response object from the
|
||||
server (default ``False``)
|
||||
server (default ``False``, not available for PyMongo 3+)
|
||||
:param remove: remove rather than updating (default ``False``)
|
||||
:param new: return updated rather than original document
|
||||
(default ``False``)
|
||||
@@ -561,13 +519,31 @@ class BaseQuerySet(object):
|
||||
|
||||
queryset = self.clone()
|
||||
query = queryset._query
|
||||
update = transform.update(queryset._document, **update)
|
||||
if not IS_PYMONGO_3 or not remove:
|
||||
update = transform.update(queryset._document, **update)
|
||||
sort = queryset._ordering
|
||||
|
||||
try:
|
||||
result = queryset._collection.find_and_modify(
|
||||
query, update, upsert=upsert, sort=sort, remove=remove, new=new,
|
||||
full_response=full_response, **self._cursor_args)
|
||||
if IS_PYMONGO_3:
|
||||
if full_response:
|
||||
msg = "With PyMongo 3+, it is not possible anymore to get the full response."
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
if remove:
|
||||
result = queryset._collection.find_one_and_delete(
|
||||
query, sort=sort, **self._cursor_args)
|
||||
else:
|
||||
if new:
|
||||
return_doc = ReturnDocument.AFTER
|
||||
else:
|
||||
return_doc = ReturnDocument.BEFORE
|
||||
result = queryset._collection.find_one_and_update(
|
||||
query, update, upsert=upsert, sort=sort, return_document=return_doc,
|
||||
**self._cursor_args)
|
||||
|
||||
else:
|
||||
result = queryset._collection.find_and_modify(
|
||||
query, update, upsert=upsert, sort=sort, remove=remove, new=new,
|
||||
full_response=full_response, **self._cursor_args)
|
||||
except pymongo.errors.DuplicateKeyError, err:
|
||||
raise NotUniqueError(u"Update failed (%s)" % err)
|
||||
except pymongo.errors.OperationFailure, err:
|
||||
@@ -619,7 +595,10 @@ class BaseQuerySet(object):
|
||||
doc_map[doc['_id']] = self._get_as_pymongo(doc)
|
||||
else:
|
||||
for doc in docs:
|
||||
doc_map[doc['_id']] = self._document._from_son(doc, only_fields=self.only_fields)
|
||||
doc_map[doc['_id']] = self._document._from_son(
|
||||
doc,
|
||||
only_fields=self.only_fields,
|
||||
_auto_dereference=self._auto_dereference)
|
||||
|
||||
return doc_map
|
||||
|
||||
@@ -639,7 +618,8 @@ class BaseQuerySet(object):
|
||||
return self
|
||||
|
||||
def using(self, alias):
|
||||
"""This method is for controlling which database the QuerySet will be evaluated against if you are using more than one database.
|
||||
"""This method is for controlling which database the QuerySet will be
|
||||
evaluated against if you are using more than one database.
|
||||
|
||||
:param alias: The database alias
|
||||
|
||||
@@ -672,7 +652,7 @@ class BaseQuerySet(object):
|
||||
'_timeout', '_class_check', '_slave_okay', '_read_preference',
|
||||
'_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce',
|
||||
'_limit', '_skip', '_hint', '_auto_dereference',
|
||||
'_search_text', '_include_text_scores', 'only_fields')
|
||||
'_search_text', 'only_fields', '_max_time_ms')
|
||||
|
||||
for prop in copy_props:
|
||||
val = getattr(self, prop)
|
||||
@@ -758,14 +738,29 @@ class BaseQuerySet(object):
|
||||
distinct = self._dereference(queryset._cursor.distinct(field), 1,
|
||||
name=field, instance=self._document)
|
||||
|
||||
# We may need to cast to the correct type eg.
|
||||
# ListField(EmbeddedDocumentField)
|
||||
doc_field = getattr(
|
||||
self._document._fields.get(field), "field", None)
|
||||
instance = getattr(doc_field, "document_type", False)
|
||||
doc_field = self._document._fields.get(field.split('.', 1)[0])
|
||||
instance = False
|
||||
# We may need to cast to the correct type eg. ListField(EmbeddedDocumentField)
|
||||
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
|
||||
GenericEmbeddedDocumentField = _import_class(
|
||||
'GenericEmbeddedDocumentField')
|
||||
ListField = _import_class('ListField')
|
||||
GenericEmbeddedDocumentField = _import_class('GenericEmbeddedDocumentField')
|
||||
if isinstance(doc_field, ListField):
|
||||
doc_field = getattr(doc_field, "field", doc_field)
|
||||
if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
|
||||
instance = getattr(doc_field, "document_type", False)
|
||||
# handle distinct on subdocuments
|
||||
if '.' in field:
|
||||
for field_part in field.split('.')[1:]:
|
||||
# if looping on embedded document, get the document type instance
|
||||
if instance and isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
|
||||
doc_field = instance
|
||||
# now get the subdocument
|
||||
doc_field = getattr(doc_field, field_part, doc_field)
|
||||
# We may need to cast to the correct type eg. ListField(EmbeddedDocumentField)
|
||||
if isinstance(doc_field, ListField):
|
||||
doc_field = getattr(doc_field, "field", doc_field)
|
||||
if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
|
||||
instance = getattr(doc_field, "document_type", False)
|
||||
if instance and isinstance(doc_field, (EmbeddedDocumentField,
|
||||
GenericEmbeddedDocumentField)):
|
||||
distinct = [instance(**doc) for doc in distinct]
|
||||
@@ -835,7 +830,6 @@ class BaseQuerySet(object):
|
||||
cleaned_fields = []
|
||||
for key, value in kwargs.items():
|
||||
parts = key.split('__')
|
||||
op = None
|
||||
if parts[0] in operators:
|
||||
op = parts.pop(0)
|
||||
value = {'$' + op: value}
|
||||
@@ -888,13 +882,18 @@ class BaseQuerySet(object):
|
||||
plan = pprint.pformat(plan)
|
||||
return plan
|
||||
|
||||
# DEPRECATED. Has no more impact on PyMongo 3+
|
||||
def snapshot(self, enabled):
|
||||
"""Enable or disable snapshot mode when querying.
|
||||
|
||||
:param enabled: whether or not snapshot mode is enabled
|
||||
|
||||
..versionchanged:: 0.5 - made chainable
|
||||
.. deprecated:: Ignored with PyMongo 3+
|
||||
"""
|
||||
if IS_PYMONGO_3:
|
||||
msg = "snapshot is deprecated as it has no impact when using PyMongo 3+."
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
queryset = self.clone()
|
||||
queryset._snapshot = enabled
|
||||
return queryset
|
||||
@@ -910,11 +909,17 @@ class BaseQuerySet(object):
|
||||
queryset._timeout = enabled
|
||||
return queryset
|
||||
|
||||
# DEPRECATED. Has no more impact on PyMongo 3+
|
||||
def slave_okay(self, enabled):
|
||||
"""Enable or disable the slave_okay when querying.
|
||||
|
||||
:param enabled: whether or not the slave_okay is enabled
|
||||
|
||||
.. deprecated:: Ignored with PyMongo 3+
|
||||
"""
|
||||
if IS_PYMONGO_3:
|
||||
msg = "slave_okay is deprecated as it has no impact when using PyMongo 3+."
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
queryset = self.clone()
|
||||
queryset._slave_okay = enabled
|
||||
return queryset
|
||||
@@ -961,7 +966,7 @@ class BaseQuerySet(object):
|
||||
"""Instead of returning Document instances, return raw values from
|
||||
pymongo.
|
||||
|
||||
:param coerce_type: Field types (if applicable) would be use to
|
||||
:param coerce_types: Field types (if applicable) would be use to
|
||||
coerce types.
|
||||
"""
|
||||
queryset = self.clone()
|
||||
@@ -969,6 +974,13 @@ class BaseQuerySet(object):
|
||||
queryset._as_pymongo_coerce = coerce_types
|
||||
return queryset
|
||||
|
||||
def max_time_ms(self, ms):
|
||||
"""Wait `ms` milliseconds before killing the query on the server
|
||||
|
||||
:param ms: the number of milliseconds before killing the query on the server
|
||||
"""
|
||||
return self._chainable_method("max_time_ms", ms)
|
||||
|
||||
# JSON Helpers
|
||||
|
||||
def to_json(self, *args, **kwargs):
|
||||
@@ -982,8 +994,8 @@ class BaseQuerySet(object):
|
||||
|
||||
def aggregate(self, *pipeline, **kwargs):
|
||||
"""
|
||||
Perform a aggreggate function based in your queryset params
|
||||
:param pipeline: list of agreggation commands,
|
||||
Perform a aggregate function based in your queryset params
|
||||
:param pipeline: list of aggregation commands,\
|
||||
see: http://docs.mongodb.org/manual/core/aggregation-pipeline/
|
||||
|
||||
.. versionadded:: 0.9
|
||||
@@ -1236,6 +1248,27 @@ class BaseQuerySet(object):
|
||||
else:
|
||||
return 0
|
||||
|
||||
def aggregate_sum(self, field):
|
||||
"""Sum over the values of the specified field.
|
||||
|
||||
:param field: the field to sum over; use dot-notation to refer to
|
||||
embedded document fields
|
||||
|
||||
This method is more performant than the regular `sum`, because it uses
|
||||
the aggregation framework instead of map-reduce.
|
||||
"""
|
||||
result = self._document._get_collection().aggregate([
|
||||
{'$match': self._query},
|
||||
{'$group': {'_id': 'sum', 'total': {'$sum': '$' + field}}}
|
||||
])
|
||||
if IS_PYMONGO_3:
|
||||
result = list(result)
|
||||
else:
|
||||
result = result.get('result')
|
||||
if result:
|
||||
return result[0]['total']
|
||||
return 0
|
||||
|
||||
def average(self, field):
|
||||
"""Average over the values of the specified field.
|
||||
|
||||
@@ -1291,6 +1324,27 @@ class BaseQuerySet(object):
|
||||
else:
|
||||
return 0
|
||||
|
||||
def aggregate_average(self, field):
|
||||
"""Average over the values of the specified field.
|
||||
|
||||
:param field: the field to average over; use dot-notation to refer to
|
||||
embedded document fields
|
||||
|
||||
This method is more performant than the regular `average`, because it
|
||||
uses the aggregation framework instead of map-reduce.
|
||||
"""
|
||||
result = self._document._get_collection().aggregate([
|
||||
{'$match': self._query},
|
||||
{'$group': {'_id': 'avg', 'total': {'$avg': '$' + field}}}
|
||||
])
|
||||
if IS_PYMONGO_3:
|
||||
result = list(result)
|
||||
else:
|
||||
result = result.get('result')
|
||||
if result:
|
||||
return result[0]['total']
|
||||
return 0
|
||||
|
||||
def item_frequencies(self, field, normalize=False, map_reduce=True):
|
||||
"""Returns a dictionary of all items present in a field across
|
||||
the whole queried set of documents, and their corresponding frequency.
|
||||
@@ -1331,6 +1385,7 @@ class BaseQuerySet(object):
|
||||
return self._get_as_pymongo(raw_doc)
|
||||
doc = self._document._from_son(raw_doc,
|
||||
_auto_dereference=self._auto_dereference, only_fields=self.only_fields)
|
||||
|
||||
if self._scalar:
|
||||
return self._get_scalar(doc)
|
||||
|
||||
@@ -1339,6 +1394,7 @@ class BaseQuerySet(object):
|
||||
def rewind(self):
|
||||
"""Rewind the cursor to its unevaluated state.
|
||||
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
self._iter = False
|
||||
@@ -1355,22 +1411,34 @@ class BaseQuerySet(object):
|
||||
|
||||
@property
|
||||
def _cursor_args(self):
|
||||
cursor_args = {
|
||||
'snapshot': self._snapshot,
|
||||
'timeout': self._timeout
|
||||
}
|
||||
if self._read_preference is not None:
|
||||
cursor_args['read_preference'] = self._read_preference
|
||||
if not IS_PYMONGO_3:
|
||||
fields_name = 'fields'
|
||||
cursor_args = {
|
||||
'timeout': self._timeout,
|
||||
'snapshot': self._snapshot
|
||||
}
|
||||
if self._read_preference is not None:
|
||||
cursor_args['read_preference'] = self._read_preference
|
||||
else:
|
||||
cursor_args['slave_okay'] = self._slave_okay
|
||||
else:
|
||||
cursor_args['slave_okay'] = self._slave_okay
|
||||
fields_name = 'projection'
|
||||
# snapshot is not handled at all by PyMongo 3+
|
||||
# TODO: evaluate similar possibilities using modifiers
|
||||
if self._snapshot:
|
||||
msg = "The snapshot option is not anymore available with PyMongo 3+"
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
cursor_args = {
|
||||
'no_cursor_timeout': self._timeout
|
||||
}
|
||||
if self._loaded_fields:
|
||||
cursor_args['fields'] = self._loaded_fields.as_dict()
|
||||
cursor_args[fields_name] = self._loaded_fields.as_dict()
|
||||
|
||||
if self._include_text_scores:
|
||||
if 'fields' not in cursor_args:
|
||||
cursor_args['fields'] = {}
|
||||
if self._search_text:
|
||||
if fields_name not in cursor_args:
|
||||
cursor_args[fields_name] = {}
|
||||
|
||||
cursor_args['fields']['text_score'] = {'$meta': "textScore"}
|
||||
cursor_args[fields_name]['_text_score'] = {'$meta': "textScore"}
|
||||
|
||||
return cursor_args
|
||||
|
||||
@@ -1413,8 +1481,11 @@ class BaseQuerySet(object):
|
||||
def _query(self):
|
||||
if self._mongo_query is None:
|
||||
self._mongo_query = self._query_obj.to_query(self._document)
|
||||
if self._class_check:
|
||||
self._mongo_query.update(self._initial_query)
|
||||
if self._class_check and self._initial_query:
|
||||
if "_cls" in self._mongo_query:
|
||||
self._mongo_query = {"$and": [self._initial_query, self._mongo_query]}
|
||||
else:
|
||||
self._mongo_query.update(self._initial_query)
|
||||
return self._mongo_query
|
||||
|
||||
@property
|
||||
@@ -1544,7 +1615,7 @@ class BaseQuerySet(object):
|
||||
|
||||
return frequencies
|
||||
|
||||
def _fields_to_dbfields(self, fields, subdoc=False):
|
||||
def _fields_to_dbfields(self, fields):
|
||||
"""Translate fields paths to its db equivalents"""
|
||||
ret = []
|
||||
subclasses = []
|
||||
@@ -1566,7 +1637,7 @@ class BaseQuerySet(object):
|
||||
ret.append(subfield)
|
||||
found = True
|
||||
break
|
||||
except LookUpError, e:
|
||||
except LookUpError:
|
||||
pass
|
||||
|
||||
if not found:
|
||||
@@ -1582,9 +1653,7 @@ class BaseQuerySet(object):
|
||||
continue
|
||||
|
||||
if key == '$text_score':
|
||||
# automatically set to include text scores
|
||||
self._include_text_scores = True
|
||||
key_list.append(('text_score', {'$meta': "textScore"}))
|
||||
key_list.append(('_text_score', {'$meta': "textScore"}))
|
||||
continue
|
||||
|
||||
direction = pymongo.ASCENDING
|
||||
@@ -1697,6 +1766,13 @@ class BaseQuerySet(object):
|
||||
code)
|
||||
return code
|
||||
|
||||
def _chainable_method(self, method_name, val):
|
||||
queryset = self.clone()
|
||||
method = getattr(queryset._cursor, method_name)
|
||||
method(val)
|
||||
setattr(queryset, "_" + method_name, val)
|
||||
return queryset
|
||||
|
||||
# Deprecated
|
||||
def ensure_index(self, **kwargs):
|
||||
"""Deprecated use :func:`Document.ensure_index`"""
|
||||
|
@@ -1,4 +1,3 @@
|
||||
|
||||
__all__ = ('QueryFieldList',)
|
||||
|
||||
|
||||
|
@@ -61,7 +61,6 @@ class QuerySet(BaseQuerySet):
|
||||
data[-1] = "...(remaining elements truncated)..."
|
||||
return repr(data)
|
||||
|
||||
|
||||
def _iter_results(self):
|
||||
"""A generator for iterating over the result cache.
|
||||
|
||||
@@ -74,7 +73,7 @@ class QuerySet(BaseQuerySet):
|
||||
upper = len(self._result_cache)
|
||||
while pos < upper:
|
||||
yield self._result_cache[pos]
|
||||
pos = pos + 1
|
||||
pos += 1
|
||||
if not self._has_more:
|
||||
raise StopIteration
|
||||
if len(self._result_cache) <= pos:
|
||||
@@ -94,7 +93,7 @@ class QuerySet(BaseQuerySet):
|
||||
except StopIteration:
|
||||
self._has_more = False
|
||||
|
||||
def count(self, with_limit_and_skip=True):
|
||||
def count(self, with_limit_and_skip=False):
|
||||
"""Count the selected elements in the query.
|
||||
|
||||
:param with_limit_and_skip (optional): take any :meth:`limit` or
|
||||
@@ -161,4 +160,4 @@ class QuerySetNoDeRef(QuerySet):
|
||||
"""Special no_dereference QuerySet"""
|
||||
|
||||
def __dereference(items, max_depth=1, instance=None, name=None):
|
||||
return items
|
||||
return items
|
||||
|
@@ -3,18 +3,19 @@ from collections import defaultdict
|
||||
import pymongo
|
||||
from bson import SON
|
||||
|
||||
from mongoengine.base.fields import UPDATE_OPERATORS
|
||||
from mongoengine.connection import get_connection
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.errors import InvalidQueryError, LookUpError
|
||||
from mongoengine.errors import InvalidQueryError
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
|
||||
__all__ = ('query', 'update')
|
||||
|
||||
|
||||
COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
|
||||
'all', 'size', 'exists', 'not', 'elemMatch')
|
||||
'all', 'size', 'exists', 'not', 'elemMatch', 'type')
|
||||
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
|
||||
'within_box', 'within_polygon', 'near', 'near_sphere',
|
||||
'max_distance', 'geo_within', 'geo_within_box',
|
||||
'max_distance', 'min_distance', 'geo_within', 'geo_within_box',
|
||||
'geo_within_polygon', 'geo_within_center',
|
||||
'geo_within_sphere', 'geo_intersects')
|
||||
STRING_OPERATORS = ('contains', 'icontains', 'startswith',
|
||||
@@ -24,12 +25,8 @@ CUSTOM_OPERATORS = ('match',)
|
||||
MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
|
||||
STRING_OPERATORS + CUSTOM_OPERATORS)
|
||||
|
||||
UPDATE_OPERATORS = ('set', 'unset', 'inc', 'dec', 'pop', 'push',
|
||||
'push_all', 'pull', 'pull_all', 'add_to_set',
|
||||
'set_on_insert')
|
||||
|
||||
|
||||
def query(_doc_cls=None, _field_operation=False, **query):
|
||||
def query(_doc_cls=None, **query):
|
||||
"""Transform a query from Django-style format to Mongo format.
|
||||
"""
|
||||
mongo_query = {}
|
||||
@@ -47,6 +44,10 @@ def query(_doc_cls=None, _field_operation=False, **query):
|
||||
if len(parts) > 1 and parts[-1] in MATCH_OPERATORS:
|
||||
op = parts.pop()
|
||||
|
||||
# Allw to escape operator-like field name by __
|
||||
if len(parts) > 1 and parts[-1] == "":
|
||||
parts.pop()
|
||||
|
||||
negate = False
|
||||
if len(parts) > 1 and parts[-1] == 'not':
|
||||
parts.pop()
|
||||
@@ -125,29 +126,34 @@ def query(_doc_cls=None, _field_operation=False, **query):
|
||||
elif key in mongo_query:
|
||||
if key in mongo_query and isinstance(mongo_query[key], dict):
|
||||
mongo_query[key].update(value)
|
||||
# $maxDistance needs to come last - convert to SON
|
||||
# $max/minDistance needs to come last - convert to SON
|
||||
value_dict = mongo_query[key]
|
||||
if ('$maxDistance' in value_dict and '$near' in value_dict):
|
||||
if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \
|
||||
('$near' in value_dict or '$nearSphere' in value_dict):
|
||||
value_son = SON()
|
||||
if isinstance(value_dict['$near'], dict):
|
||||
for k, v in value_dict.iteritems():
|
||||
if k == '$maxDistance':
|
||||
continue
|
||||
value_son[k] = v
|
||||
if (get_connection().max_wire_version <= 1):
|
||||
value_son['$maxDistance'] = value_dict[
|
||||
'$maxDistance']
|
||||
else:
|
||||
value_son['$near'] = SON(value_son['$near'])
|
||||
value_son['$near'][
|
||||
'$maxDistance'] = value_dict['$maxDistance']
|
||||
else:
|
||||
for k, v in value_dict.iteritems():
|
||||
if k == '$maxDistance':
|
||||
continue
|
||||
value_son[k] = v
|
||||
value_son['$maxDistance'] = value_dict['$maxDistance']
|
||||
|
||||
for k, v in value_dict.iteritems():
|
||||
if k == '$maxDistance' or k == '$minDistance':
|
||||
continue
|
||||
value_son[k] = v
|
||||
# Required for MongoDB >= 2.6, may fail when combining
|
||||
# PyMongo 3+ and MongoDB < 2.6
|
||||
near_embedded = False
|
||||
for near_op in ('$near', '$nearSphere'):
|
||||
if isinstance(value_dict.get(near_op), dict) and (
|
||||
IS_PYMONGO_3 or get_connection().max_wire_version > 1):
|
||||
value_son[near_op] = SON(value_son[near_op])
|
||||
if '$maxDistance' in value_dict:
|
||||
value_son[near_op][
|
||||
'$maxDistance'] = value_dict['$maxDistance']
|
||||
if '$minDistance' in value_dict:
|
||||
value_son[near_op][
|
||||
'$minDistance'] = value_dict['$minDistance']
|
||||
near_embedded = True
|
||||
if not near_embedded:
|
||||
if '$maxDistance' in value_dict:
|
||||
value_son['$maxDistance'] = value_dict['$maxDistance']
|
||||
if '$minDistance' in value_dict:
|
||||
value_son['$minDistance'] = value_dict['$minDistance']
|
||||
mongo_query[key] = value_son
|
||||
else:
|
||||
# Store for manually merging later
|
||||
@@ -160,7 +166,7 @@ def query(_doc_cls=None, _field_operation=False, **query):
|
||||
if isinstance(v, list):
|
||||
value = [{k: val} for val in v]
|
||||
if '$and' in mongo_query.keys():
|
||||
mongo_query['$and'].append(value)
|
||||
mongo_query['$and'].extend(value)
|
||||
else:
|
||||
mongo_query['$and'] = value
|
||||
|
||||
@@ -301,7 +307,11 @@ def update(_doc_cls=None, **update):
|
||||
|
||||
def _geo_operator(field, op, value):
|
||||
"""Helper to return the query for a given geo query"""
|
||||
if field._geo_index == pymongo.GEO2D:
|
||||
if op == "max_distance":
|
||||
value = {'$maxDistance': value}
|
||||
elif op == "min_distance":
|
||||
value = {'$minDistance': value}
|
||||
elif field._geo_index == pymongo.GEO2D:
|
||||
if op == "within_distance":
|
||||
value = {'$within': {'$center': value}}
|
||||
elif op == "within_spherical_distance":
|
||||
@@ -314,8 +324,6 @@ def _geo_operator(field, op, value):
|
||||
value = {'$nearSphere': value}
|
||||
elif op == 'within_box':
|
||||
value = {'$within': {'$box': value}}
|
||||
elif op == "max_distance":
|
||||
value = {'$maxDistance': value}
|
||||
else:
|
||||
raise NotImplementedError("Geo method '%s' has not "
|
||||
"been implemented for a GeoPointField" % op)
|
||||
@@ -334,8 +342,6 @@ def _geo_operator(field, op, value):
|
||||
value = {"$geoIntersects": _infer_geometry(value)}
|
||||
elif op == "near":
|
||||
value = {'$near': _infer_geometry(value)}
|
||||
elif op == "max_distance":
|
||||
value = {'$maxDistance': value}
|
||||
else:
|
||||
raise NotImplementedError("Geo method '%s' has not "
|
||||
"been implemented for a %s " % (op, field._name))
|
||||
@@ -352,6 +358,7 @@ def _infer_geometry(value):
|
||||
raise InvalidQueryError("Invalid $geometry dictionary should have "
|
||||
"type and coordinates keys")
|
||||
elif isinstance(value, (list, set)):
|
||||
# TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon?
|
||||
try:
|
||||
value[0][0][0]
|
||||
return {"$geometry": {"type": "Polygon", "coordinates": value}}
|
||||
|
@@ -1,8 +1,5 @@
|
||||
import copy
|
||||
|
||||
from itertools import product
|
||||
from functools import reduce
|
||||
|
||||
from mongoengine.errors import InvalidQueryError
|
||||
from mongoengine.queryset import transform
|
||||
|
||||
@@ -29,7 +26,7 @@ class DuplicateQueryConditionsError(InvalidQueryError):
|
||||
|
||||
|
||||
class SimplificationVisitor(QNodeVisitor):
|
||||
"""Simplifies query trees by combinging unnecessary 'and' connection nodes
|
||||
"""Simplifies query trees by combining unnecessary 'and' connection nodes
|
||||
into a single Q-object.
|
||||
"""
|
||||
|
||||
|
@@ -6,6 +6,7 @@ __all__ = ['pre_init', 'post_init', 'pre_save', 'pre_save_post_validation',
|
||||
signals_available = False
|
||||
try:
|
||||
from blinker import Namespace
|
||||
|
||||
signals_available = True
|
||||
except ImportError:
|
||||
class Namespace(object):
|
||||
@@ -27,6 +28,7 @@ except ImportError:
|
||||
raise RuntimeError('signalling support is unavailable '
|
||||
'because the blinker library is '
|
||||
'not installed.')
|
||||
|
||||
send = lambda *a, **kw: None
|
||||
connect = disconnect = has_receivers_for = receivers_for = \
|
||||
temporarily_connected_to = _fail
|
||||
|
@@ -1 +1,2 @@
|
||||
pymongo>=2.7.1
|
||||
nose
|
||||
|
15
setup.cfg
15
setup.cfg
@@ -1,11 +1,8 @@
|
||||
[nosetests]
|
||||
verbosity = 3
|
||||
rednose = 1
|
||||
verbosity = 2
|
||||
detailed-errors = 1
|
||||
#with-coverage = 1
|
||||
#cover-erase = 1
|
||||
#cover-html = 1
|
||||
#cover-html-dir = ../htmlcov
|
||||
#cover-package = mongoengine
|
||||
py3where = build
|
||||
where = tests
|
||||
#tests = document/__init__.py
|
||||
cover-erase = 1
|
||||
cover-branches = 1
|
||||
cover-package = mongoengine
|
||||
tests = tests
|
||||
|
9
setup.py
9
setup.py
@@ -29,7 +29,6 @@ init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
|
||||
version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0]
|
||||
|
||||
VERSION = get_version(eval(version_line.split('=')[-1]))
|
||||
print(VERSION)
|
||||
|
||||
CLASSIFIERS = [
|
||||
'Development Status :: 4 - Beta',
|
||||
@@ -38,7 +37,7 @@ CLASSIFIERS = [
|
||||
'Operating System :: OS Independent',
|
||||
'Programming Language :: Python',
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 2.6.6",
|
||||
"Programming Language :: Python :: 2.6",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.2",
|
||||
@@ -53,12 +52,13 @@ CLASSIFIERS = [
|
||||
extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])}
|
||||
if sys.version_info[0] == 3:
|
||||
extra_opts['use_2to3'] = True
|
||||
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6', 'Pillow>=2.0.0', 'django>=1.5.1']
|
||||
extra_opts['tests_require'] = ['nose', 'coverage==3.7.1', 'blinker', 'Pillow>=2.0.0']
|
||||
if "test" in sys.argv or "nosetests" in sys.argv:
|
||||
extra_opts['packages'] = find_packages()
|
||||
extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]}
|
||||
else:
|
||||
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'Pillow>=2.0.0', 'jinja2>=2.6', 'python-dateutil']
|
||||
# coverage 4 does not support Python 3.2 anymore
|
||||
extra_opts['tests_require'] = ['nose', 'coverage==3.7.1', 'blinker', 'Pillow>=2.0.0', 'python-dateutil']
|
||||
|
||||
if sys.version_info[0] == 2 and sys.version_info[1] == 6:
|
||||
extra_opts['tests_require'].append('unittest2')
|
||||
@@ -79,5 +79,6 @@ setup(name='mongoengine',
|
||||
classifiers=CLASSIFIERS,
|
||||
install_requires=['pymongo>=2.7.1'],
|
||||
test_suite='nose.collector',
|
||||
setup_requires=['nose', 'rednose'], # Allow proper nose usage with setuptols and tox
|
||||
**extra_opts
|
||||
)
|
||||
|
@@ -36,9 +36,9 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
def test_definition(self):
|
||||
"""Ensure that document may be defined using fields.
|
||||
"""
|
||||
self.assertEqual(['age', 'id', 'name'],
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name'],
|
||||
sorted(self.Person._fields.keys()))
|
||||
self.assertEqual(["IntField", "ObjectIdField", "StringField"],
|
||||
self.assertEqual(["IntField", "ObjectIdField", "StringField", "StringField"],
|
||||
sorted([x.__class__.__name__ for x in
|
||||
self.Person._fields.values()]))
|
||||
|
||||
|
@@ -93,6 +93,7 @@ class DeltaTest(unittest.TestCase):
|
||||
def delta_recursive(self, DocClass, EmbeddedClass):
|
||||
|
||||
class Embedded(EmbeddedClass):
|
||||
id = StringField()
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
@@ -114,6 +115,7 @@ class DeltaTest(unittest.TestCase):
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.id = "010101"
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
@@ -123,6 +125,7 @@ class DeltaTest(unittest.TestCase):
|
||||
self.assertEqual(doc._get_changed_fields(), ['embedded_field'])
|
||||
|
||||
embedded_delta = {
|
||||
'id': "010101",
|
||||
'string_field': 'hello',
|
||||
'int_field': 1,
|
||||
'dict_field': {'hello': 'world'},
|
||||
@@ -250,13 +253,13 @@ class DeltaTest(unittest.TestCase):
|
||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||
[1, 2, {'hello': 'world'}])
|
||||
|
||||
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
||||
del doc.embedded_field.list_field[2].list_field[2]['hello']
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'embedded_field.list_field.2.list_field.2.hello': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
del(doc.embedded_field.list_field[2].list_field)
|
||||
del doc.embedded_field.list_field[2].list_field
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'embedded_field.list_field.2.list_field': 1}))
|
||||
|
||||
@@ -590,13 +593,13 @@ class DeltaTest(unittest.TestCase):
|
||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||
[1, 2, {'hello': 'world'}])
|
||||
|
||||
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
||||
del doc.embedded_field.list_field[2].list_field[2]['hello']
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
del(doc.embedded_field.list_field[2].list_field)
|
||||
del doc.embedded_field.list_field[2].list_field
|
||||
self.assertEqual(doc._delta(), ({},
|
||||
{'db_embedded_field.db_list_field.2.db_list_field': 1}))
|
||||
|
||||
@@ -612,7 +615,7 @@ class DeltaTest(unittest.TestCase):
|
||||
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
|
||||
|
||||
p.doc = 123
|
||||
del(p.doc)
|
||||
del p.doc
|
||||
self.assertEqual(p._delta(), (
|
||||
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
|
||||
|
||||
@@ -732,6 +735,56 @@ class DeltaTest(unittest.TestCase):
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
|
||||
def test_lower_level_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc().save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
mydoc.subs['a'] = EmbeddedDoc()
|
||||
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||
|
||||
subdoc = mydoc.subs['a']
|
||||
subdoc.name = 'bar'
|
||||
|
||||
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||
mydoc.save()
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
|
||||
def test_upper_level_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc(subs={'a': EmbeddedDoc(name='foo')}).save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
subdoc = mydoc.subs['a']
|
||||
subdoc.name = 'bar'
|
||||
|
||||
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||
self.assertEqual(["subs.a.name"], mydoc._get_changed_fields())
|
||||
|
||||
mydoc.subs['a'] = EmbeddedDoc()
|
||||
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||
mydoc.save()
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
|
||||
def test_referenced_object_changed_attributes(self):
|
||||
"""Ensures that when you save a new reference to a field, the referenced object isn't altered"""
|
||||
|
||||
@@ -774,5 +827,43 @@ class DeltaTest(unittest.TestCase):
|
||||
org2.reload()
|
||||
self.assertEqual(org2.name, 'New Org 2')
|
||||
|
||||
def test_delta_for_nested_map_fields(self):
|
||||
class UInfoDocument(Document):
|
||||
phone = StringField()
|
||||
|
||||
class EmbeddedRole(EmbeddedDocument):
|
||||
type = StringField()
|
||||
|
||||
class EmbeddedUser(EmbeddedDocument):
|
||||
name = StringField()
|
||||
roles = MapField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||
rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||
info = ReferenceField(UInfoDocument)
|
||||
|
||||
class Doc(Document):
|
||||
users = MapField(field=EmbeddedDocumentField(EmbeddedUser))
|
||||
num = IntField(default=-1)
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
doc = Doc(num=1)
|
||||
doc.users["007"] = EmbeddedUser(name="Agent007")
|
||||
doc.save()
|
||||
|
||||
uinfo = UInfoDocument(phone="79089269066")
|
||||
uinfo.save()
|
||||
|
||||
d = Doc.objects(num=1).first()
|
||||
d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin")
|
||||
d.users["007"]["rolist"].append(EmbeddedRole(type="oops"))
|
||||
d.users["007"]["info"] = uinfo
|
||||
delta = d._delta()
|
||||
self.assertEqual(True, "users.007.roles.666" in delta[0])
|
||||
self.assertEqual(True, "users.007.rolist" in delta[0])
|
||||
self.assertEqual(True, "users.007.info" in delta[0])
|
||||
self.assertEqual('superadmin', delta[0]["users.007.roles.666"]["type"])
|
||||
self.assertEqual('oops', delta[0]["users.007.rolist"][0]["type"])
|
||||
self.assertEqual(uinfo.id, delta[0]["users.007.info"])
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -72,7 +72,7 @@ class DynamicTest(unittest.TestCase):
|
||||
obj = collection.find_one()
|
||||
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name'])
|
||||
|
||||
del(p.misc)
|
||||
del p.misc
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
@@ -81,6 +81,13 @@ class DynamicTest(unittest.TestCase):
|
||||
obj = collection.find_one()
|
||||
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name'])
|
||||
|
||||
def test_reload_after_unsetting(self):
|
||||
p = self.Person()
|
||||
p.misc = 22
|
||||
p.save()
|
||||
p.update(unset__misc=1)
|
||||
p.reload()
|
||||
|
||||
def test_dynamic_document_queries(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
p = self.Person()
|
||||
@@ -122,6 +129,15 @@ class DynamicTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(1, self.Person.objects(misc__hello='world').count())
|
||||
|
||||
def test_three_level_complex_data_lookups(self):
|
||||
"""Ensure you can query three level document dynamic fields"""
|
||||
p = self.Person()
|
||||
p.misc = {'hello': {'hello2': 'world'}}
|
||||
p.save()
|
||||
# from pprint import pprint as pp; import pdb; pdb.set_trace();
|
||||
print self.Person.objects(misc__hello__hello2='world')
|
||||
self.assertEqual(1, self.Person.objects(misc__hello__hello2='world').count())
|
||||
|
||||
def test_complex_embedded_document_validation(self):
|
||||
"""Ensure embedded dynamic documents may be validated"""
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
@@ -324,7 +340,7 @@ class DynamicTest(unittest.TestCase):
|
||||
person = Person.objects.first()
|
||||
person.attrval = "This works"
|
||||
|
||||
person["phone"] = "555-1212" # but this should too
|
||||
person["phone"] = "555-1212" # but this should too
|
||||
|
||||
# Same thing two levels deep
|
||||
person["address"]["city"] = "Lundenne"
|
||||
@@ -340,7 +356,6 @@ class DynamicTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(Person.objects.first().address.city, "Londinium")
|
||||
|
||||
|
||||
person = Person.objects.first()
|
||||
person["age"] = 35
|
||||
person.save()
|
||||
|
@@ -1,9 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
import sys
|
||||
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
import os
|
||||
import pymongo
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
@@ -18,7 +18,7 @@ __all__ = ("IndexesTest", )
|
||||
class IndexesTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.connection = connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
|
||||
class Person(Document):
|
||||
@@ -143,7 +143,7 @@ class IndexesTest(unittest.TestCase):
|
||||
meta = {
|
||||
'indexes': [
|
||||
{
|
||||
'fields': ('title',),
|
||||
'fields': ('title',),
|
||||
},
|
||||
],
|
||||
'allow_inheritance': True,
|
||||
@@ -175,6 +175,16 @@ class IndexesTest(unittest.TestCase):
|
||||
info = A._get_collection().index_information()
|
||||
self.assertEqual(len(info.keys()), 2)
|
||||
|
||||
class B(A):
|
||||
c = StringField()
|
||||
d = StringField()
|
||||
meta = {
|
||||
'indexes': [{'fields': ['c']}, {'fields': ['d'], 'cls': True}],
|
||||
'allow_inheritance': True
|
||||
}
|
||||
self.assertEqual([('c', 1)], B._meta['index_specs'][1]['fields'])
|
||||
self.assertEqual([('_cls', 1), ('d', 1)], B._meta['index_specs'][2]['fields'])
|
||||
|
||||
def test_build_index_spec_is_not_destructive(self):
|
||||
|
||||
class MyDoc(Document):
|
||||
@@ -265,6 +275,60 @@ class IndexesTest(unittest.TestCase):
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('current.location.point', '2d')] in info)
|
||||
|
||||
def test_explicit_geosphere_index(self):
|
||||
"""Ensure that geosphere indexes work when created via meta[indexes]
|
||||
"""
|
||||
class Place(Document):
|
||||
location = DictField()
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'indexes': [
|
||||
'(location.point',
|
||||
]
|
||||
}
|
||||
|
||||
self.assertEqual([{'fields': [('location.point', '2dsphere')]}],
|
||||
Place._meta['index_specs'])
|
||||
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('location.point', '2dsphere')] in info)
|
||||
|
||||
def test_explicit_geohaystack_index(self):
|
||||
"""Ensure that geohaystack indexes work when created via meta[indexes]
|
||||
"""
|
||||
raise SkipTest('GeoHaystack index creation is not supported for now'
|
||||
'from meta, as it requires a bucketSize parameter.')
|
||||
|
||||
class Place(Document):
|
||||
location = DictField()
|
||||
name = StringField()
|
||||
meta = {
|
||||
'indexes': [
|
||||
(')location.point', 'name')
|
||||
]
|
||||
}
|
||||
self.assertEqual([{'fields': [('location.point', 'geoHaystack'), ('name', 1)]}],
|
||||
Place._meta['index_specs'])
|
||||
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('location.point', 'geoHaystack')] in info)
|
||||
|
||||
def test_create_geohaystack_index(self):
|
||||
"""Ensure that geohaystack indexes can be created
|
||||
"""
|
||||
class Place(Document):
|
||||
location = DictField()
|
||||
name = StringField()
|
||||
|
||||
Place.create_index({'fields': (')location.point', 'name')}, bucketSize=10)
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('location.point', 'geoHaystack'), ('name', 1)] in info)
|
||||
|
||||
def test_dictionary_indexes(self):
|
||||
"""Ensure that indexes are used when meta[indexes] contains
|
||||
dictionaries instead of lists.
|
||||
@@ -422,6 +486,7 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
class Test(Document):
|
||||
a = IntField()
|
||||
b = IntField()
|
||||
|
||||
meta = {
|
||||
'indexes': ['a'],
|
||||
@@ -433,16 +498,36 @@ class IndexesTest(unittest.TestCase):
|
||||
obj = Test(a=1)
|
||||
obj.save()
|
||||
|
||||
connection = get_connection()
|
||||
IS_MONGODB_3 = connection.server_info()['versionArray'][0] >= 3
|
||||
|
||||
# Need to be explicit about covered indexes as mongoDB doesn't know if
|
||||
# the documents returned might have more keys in that here.
|
||||
query_plan = Test.objects(id=obj.id).exclude('a').explain()
|
||||
self.assertFalse(query_plan['indexOnly'])
|
||||
if not IS_MONGODB_3:
|
||||
self.assertFalse(query_plan['indexOnly'])
|
||||
else:
|
||||
self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IDHACK')
|
||||
|
||||
query_plan = Test.objects(id=obj.id).only('id').explain()
|
||||
self.assertTrue(query_plan['indexOnly'])
|
||||
if not IS_MONGODB_3:
|
||||
self.assertTrue(query_plan['indexOnly'])
|
||||
else:
|
||||
self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IDHACK')
|
||||
|
||||
query_plan = Test.objects(a=1).only('a').exclude('id').explain()
|
||||
self.assertTrue(query_plan['indexOnly'])
|
||||
if not IS_MONGODB_3:
|
||||
self.assertTrue(query_plan['indexOnly'])
|
||||
else:
|
||||
self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IXSCAN')
|
||||
self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('stage'), 'PROJECTION')
|
||||
|
||||
query_plan = Test.objects(a=1).explain()
|
||||
if not IS_MONGODB_3:
|
||||
self.assertFalse(query_plan['indexOnly'])
|
||||
else:
|
||||
self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IXSCAN')
|
||||
self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('stage'), 'FETCH')
|
||||
|
||||
def test_index_on_id(self):
|
||||
|
||||
@@ -481,13 +566,19 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(BlogPost.objects.count(), 10)
|
||||
self.assertEqual(BlogPost.objects.hint().count(), 10)
|
||||
self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10)
|
||||
|
||||
self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).count(), 10)
|
||||
# PyMongo 3.0 bug only, works correctly with 2.X and 3.0.1+ versions
|
||||
if pymongo.version != '3.0':
|
||||
self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10)
|
||||
|
||||
def invalid_index():
|
||||
BlogPost.objects.hint('tags')
|
||||
self.assertRaises(TypeError, invalid_index)
|
||||
self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).count(), 10)
|
||||
|
||||
if pymongo.version >= '2.8':
|
||||
self.assertEqual(BlogPost.objects.hint('tags').count(), 10)
|
||||
else:
|
||||
def invalid_index():
|
||||
BlogPost.objects.hint('tags')
|
||||
self.assertRaises(TypeError, invalid_index)
|
||||
|
||||
def invalid_index_2():
|
||||
return BlogPost.objects.hint(('tags', 1))
|
||||
@@ -567,6 +658,38 @@ class IndexesTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_unique_embedded_document_in_list(self):
|
||||
"""
|
||||
Ensure that the uniqueness constraints are applied to fields in
|
||||
embedded documents, even when the embedded documents in in a
|
||||
list field.
|
||||
"""
|
||||
class SubDocument(EmbeddedDocument):
|
||||
year = IntField(db_field='yr')
|
||||
slug = StringField(unique=True)
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
subs = ListField(EmbeddedDocumentField(SubDocument))
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post1 = BlogPost(
|
||||
title='test1', subs=[
|
||||
SubDocument(year=2009, slug='conflict'),
|
||||
SubDocument(year=2009, slug='conflict')
|
||||
]
|
||||
)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(
|
||||
title='test2', subs=[SubDocument(year=2014, slug='conflict')]
|
||||
)
|
||||
|
||||
self.assertRaises(NotUniqueError, post2.save)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_unique_with_embedded_document_and_embedded_unique(self):
|
||||
"""Ensure that uniqueness constraints are applied to fields on
|
||||
embedded documents. And work with unique_with as well.
|
||||
@@ -753,6 +876,129 @@ class IndexesTest(unittest.TestCase):
|
||||
key = indexes["title_text"]["key"]
|
||||
self.assertTrue(('_fts', 'text') in key)
|
||||
|
||||
def test_hashed_indexes(self):
|
||||
|
||||
class Book(Document):
|
||||
ref_id = StringField()
|
||||
meta = {
|
||||
"indexes": ["#ref_id"],
|
||||
}
|
||||
|
||||
indexes = Book.objects._collection.index_information()
|
||||
self.assertTrue("ref_id_hashed" in indexes)
|
||||
self.assertTrue(('ref_id', 'hashed') in indexes["ref_id_hashed"]["key"])
|
||||
|
||||
def test_indexes_after_database_drop(self):
|
||||
"""
|
||||
Test to ensure that indexes are re-created on a collection even
|
||||
after the database has been dropped.
|
||||
|
||||
Issue #812
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
slug = StringField(unique=True)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
# Create Post #1
|
||||
post1 = BlogPost(title='test1', slug='test')
|
||||
post1.save()
|
||||
|
||||
# Drop the Database
|
||||
self.connection.drop_database(BlogPost._get_db().name)
|
||||
|
||||
# Re-create Post #1
|
||||
post1 = BlogPost(title='test1', slug='test')
|
||||
post1.save()
|
||||
|
||||
# Create Post #2
|
||||
post2 = BlogPost(title='test2', slug='test')
|
||||
self.assertRaises(NotUniqueError, post2.save)
|
||||
|
||||
def test_index_dont_send_cls_option(self):
|
||||
"""
|
||||
Ensure that 'cls' option is not sent through ensureIndex. We shouldn't
|
||||
send internal MongoEngine arguments that are not a part of the index
|
||||
spec.
|
||||
|
||||
This is directly related to the fact that MongoDB doesn't validate the
|
||||
options that are passed to ensureIndex. For more details, see:
|
||||
https://jira.mongodb.org/browse/SERVER-769
|
||||
"""
|
||||
class TestDoc(Document):
|
||||
txt = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'indexes': [
|
||||
{'fields': ('txt',), 'cls': False}
|
||||
]
|
||||
}
|
||||
|
||||
class TestChildDoc(TestDoc):
|
||||
txt2 = StringField()
|
||||
|
||||
meta = {
|
||||
'indexes': [
|
||||
{'fields': ('txt2',), 'cls': False}
|
||||
]
|
||||
}
|
||||
|
||||
TestDoc.drop_collection()
|
||||
TestDoc.ensure_indexes()
|
||||
TestChildDoc.ensure_indexes()
|
||||
|
||||
index_info = TestDoc._get_collection().index_information()
|
||||
for key in index_info:
|
||||
del index_info[key]['v'] # drop the index version - we don't care about that here
|
||||
if 'ns' in index_info[key]:
|
||||
del index_info[key]['ns'] # drop the index namespace - we don't care about that here, MongoDB 3+
|
||||
if 'dropDups' in index_info[key]:
|
||||
del index_info[key]['dropDups'] # drop the index dropDups - it is deprecated in MongoDB 3+
|
||||
|
||||
self.assertEqual(index_info, {
|
||||
'txt_1': {
|
||||
'key': [('txt', 1)],
|
||||
'background': False
|
||||
},
|
||||
'_id_': {
|
||||
'key': [('_id', 1)],
|
||||
},
|
||||
'txt2_1': {
|
||||
'key': [('txt2', 1)],
|
||||
'background': False
|
||||
},
|
||||
'_cls_1': {
|
||||
'key': [('_cls', 1)],
|
||||
'background': False,
|
||||
}
|
||||
})
|
||||
|
||||
def test_compound_index_underscore_cls_not_overwritten(self):
|
||||
"""
|
||||
Test that the compound index doesn't get another _cls when it is specified
|
||||
"""
|
||||
class TestDoc(Document):
|
||||
shard_1 = StringField()
|
||||
txt_1 = StringField()
|
||||
|
||||
meta = {
|
||||
'collection': 'test',
|
||||
'allow_inheritance': True,
|
||||
'sparse': True,
|
||||
'shard_key': 'shard_1',
|
||||
'indexes': [
|
||||
('shard_1', '_cls', 'txt_1'),
|
||||
]
|
||||
}
|
||||
|
||||
TestDoc.drop_collection()
|
||||
TestDoc.ensure_indexes()
|
||||
|
||||
index_info = TestDoc._get_collection().index_information()
|
||||
self.assertTrue('shard_1_1__cls_1_txt_1_1' in index_info)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -163,7 +163,7 @@ class InheritanceTest(unittest.TestCase):
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
self.assertEqual(['age', 'id', 'name', 'salary'],
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'],
|
||||
sorted(Employee._fields.keys()))
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
Person._get_collection_name())
|
||||
@@ -180,7 +180,7 @@ class InheritanceTest(unittest.TestCase):
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
self.assertEqual(['age', 'id', 'name', 'salary'],
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'],
|
||||
sorted(Employee._fields.keys()))
|
||||
self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(),
|
||||
['_cls', 'name', 'age'])
|
||||
@@ -307,6 +307,69 @@ class InheritanceTest(unittest.TestCase):
|
||||
doc = Animal(name='dog')
|
||||
self.assertFalse('_cls' in doc.to_mongo())
|
||||
|
||||
def test_abstract_handle_ids_in_metaclass_properly(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 3)
|
||||
self.assertEqual(berlin._fields_ordered[0], 'id')
|
||||
|
||||
def test_auto_id_not_set_if_specific_in_parent_class(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
city_id = IntField(primary_key=True)
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 3)
|
||||
self.assertEqual(berlin._fields_ordered[0], 'city_id')
|
||||
|
||||
def test_auto_id_vs_non_pk_id_field(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
id = IntField()
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 4)
|
||||
self.assertEqual(berlin._fields_ordered[0], 'auto_id_0')
|
||||
berlin.save()
|
||||
self.assertEqual(berlin.pk, berlin.auto_id_0)
|
||||
|
||||
def test_abstract_document_creation_does_not_fail(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
bkk = City(continent='asia')
|
||||
self.assertEqual(None, bkk.pk)
|
||||
# TODO: expected error? Shouldn't we create a new error type?
|
||||
self.assertRaises(KeyError, lambda: setattr(bkk, 'pk', 1))
|
||||
|
||||
def test_allow_inheritance_embedded_document(self):
|
||||
"""Ensure embedded documents respect inheritance
|
||||
"""
|
||||
@@ -397,6 +460,16 @@ class InheritanceTest(unittest.TestCase):
|
||||
meta = {'abstract': True}
|
||||
self.assertRaises(ValueError, create_bad_abstract)
|
||||
|
||||
def test_abstract_embedded_documents(self):
|
||||
# 789: EmbeddedDocument shouldn't inherit abstract
|
||||
class A(EmbeddedDocument):
|
||||
meta = {"abstract": True}
|
||||
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
self.assertFalse(B._meta["abstract"])
|
||||
|
||||
def test_inherited_collections(self):
|
||||
"""Ensure that subclassed documents don't override parents'
|
||||
collections
|
||||
|
@@ -9,13 +9,15 @@ import unittest
|
||||
import uuid
|
||||
|
||||
from datetime import datetime
|
||||
from bson import DBRef
|
||||
from bson import DBRef, ObjectId
|
||||
from tests import fixtures
|
||||
from tests.fixtures import (PickleEmbedded, PickleTest, PickleSignalsTest,
|
||||
PickleDyanmicEmbedded, PickleDynamicTest)
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.errors import (NotRegistered, InvalidDocumentError,
|
||||
InvalidQueryError, NotUniqueError)
|
||||
InvalidQueryError, NotUniqueError,
|
||||
FieldDoesNotExist)
|
||||
from mongoengine.queryset import NULLIFY, Q
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.base import get_document
|
||||
@@ -34,15 +36,21 @@ class InstanceTest(unittest.TestCase):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
|
||||
class Job(EmbeddedDocument):
|
||||
name = StringField()
|
||||
years = IntField()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
job = EmbeddedDocumentField(Job)
|
||||
|
||||
non_field = True
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
self.Person = Person
|
||||
self.Job = Job
|
||||
|
||||
def tearDown(self):
|
||||
for collection in self.db.collection_names():
|
||||
@@ -50,6 +58,11 @@ class InstanceTest(unittest.TestCase):
|
||||
continue
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def assertDbEqual(self, docs):
|
||||
self.assertEqual(
|
||||
list(self.Person._get_collection().find().sort("id")),
|
||||
sorted(docs, key=lambda doc: doc["_id"]))
|
||||
|
||||
def test_capped_collection(self):
|
||||
"""Ensure that capped collections work properly.
|
||||
"""
|
||||
@@ -75,7 +88,7 @@ class InstanceTest(unittest.TestCase):
|
||||
options = Log.objects._collection.options()
|
||||
self.assertEqual(options['capped'], True)
|
||||
self.assertEqual(options['max'], 10)
|
||||
self.assertTrue(options['size'] >= 4096)
|
||||
self.assertEqual(options['size'], 4096)
|
||||
|
||||
# Check that the document cannot be redefined with different options
|
||||
def recreate_log_document():
|
||||
@@ -90,6 +103,69 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
def test_capped_collection_default(self):
|
||||
"""Ensure that capped collections defaults work properly.
|
||||
"""
|
||||
class Log(Document):
|
||||
date = DateTimeField(default=datetime.now)
|
||||
meta = {
|
||||
'max_documents': 10,
|
||||
}
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
# Create a doc to create the collection
|
||||
Log().save()
|
||||
|
||||
options = Log.objects._collection.options()
|
||||
self.assertEqual(options['capped'], True)
|
||||
self.assertEqual(options['max'], 10)
|
||||
self.assertEqual(options['size'], 10 * 2**20)
|
||||
|
||||
# Check that the document with default value can be recreated
|
||||
def recreate_log_document():
|
||||
class Log(Document):
|
||||
date = DateTimeField(default=datetime.now)
|
||||
meta = {
|
||||
'max_documents': 10,
|
||||
}
|
||||
# Create the collection by accessing Document.objects
|
||||
Log.objects
|
||||
recreate_log_document()
|
||||
Log.drop_collection()
|
||||
|
||||
def test_capped_collection_no_max_size_problems(self):
|
||||
"""Ensure that capped collections with odd max_size work properly.
|
||||
MongoDB rounds up max_size to next multiple of 256, recreating a doc
|
||||
with the same spec failed in mongoengine <0.10
|
||||
"""
|
||||
class Log(Document):
|
||||
date = DateTimeField(default=datetime.now)
|
||||
meta = {
|
||||
'max_size': 10000,
|
||||
}
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
# Create a doc to create the collection
|
||||
Log().save()
|
||||
|
||||
options = Log.objects._collection.options()
|
||||
self.assertEqual(options['capped'], True)
|
||||
self.assertTrue(options['size'] >= 10000)
|
||||
|
||||
# Check that the document with odd max_size value can be recreated
|
||||
def recreate_log_document():
|
||||
class Log(Document):
|
||||
date = DateTimeField(default=datetime.now)
|
||||
meta = {
|
||||
'max_size': 10000,
|
||||
}
|
||||
# Create the collection by accessing Document.objects
|
||||
Log.objects
|
||||
recreate_log_document()
|
||||
Log.drop_collection()
|
||||
|
||||
def test_repr(self):
|
||||
"""Ensure that unicode representation works
|
||||
"""
|
||||
@@ -103,6 +179,19 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual('<Article: привет мир>', repr(doc))
|
||||
|
||||
def test_repr_none(self):
|
||||
"""Ensure None values handled correctly
|
||||
"""
|
||||
class Article(Document):
|
||||
title = StringField()
|
||||
|
||||
def __str__(self):
|
||||
return None
|
||||
|
||||
doc = Article(title=u'привет мир')
|
||||
|
||||
self.assertEqual('<Article: None>', repr(doc))
|
||||
|
||||
def test_queryset_resurrects_dropped_collection(self):
|
||||
self.Person.drop_collection()
|
||||
|
||||
@@ -122,10 +211,18 @@ class InstanceTest(unittest.TestCase):
|
||||
"""
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
class Fish(Animal): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
class Zoo(Document):
|
||||
animals = ListField(ReferenceField(Animal))
|
||||
@@ -437,7 +534,7 @@ class InstanceTest(unittest.TestCase):
|
||||
f.reload()
|
||||
except Foo.DoesNotExist:
|
||||
pass
|
||||
except Exception as ex:
|
||||
except Exception:
|
||||
self.assertFalse("Threw wrong exception")
|
||||
|
||||
f.save()
|
||||
@@ -446,13 +543,13 @@ class InstanceTest(unittest.TestCase):
|
||||
f.reload()
|
||||
except Foo.DoesNotExist:
|
||||
pass
|
||||
except Exception as ex:
|
||||
except Exception:
|
||||
self.assertFalse("Threw wrong exception")
|
||||
|
||||
def test_dictionary_access(self):
|
||||
"""Ensure that dictionary-style field access works properly.
|
||||
"""
|
||||
person = self.Person(name='Test User', age=30)
|
||||
person = self.Person(name='Test User', age=30, job=self.Job())
|
||||
self.assertEqual(person['name'], 'Test User')
|
||||
|
||||
self.assertRaises(KeyError, person.__getitem__, 'salary')
|
||||
@@ -462,7 +559,7 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(person['name'], 'Another User')
|
||||
|
||||
# Length = length(assigned fields + id)
|
||||
self.assertEqual(len(person), 3)
|
||||
self.assertEqual(len(person), 5)
|
||||
|
||||
self.assertTrue('age' in person)
|
||||
person.age = None
|
||||
@@ -481,8 +578,9 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(),
|
||||
['_cls', 'name', 'age'])
|
||||
self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(),
|
||||
['_cls', 'name', 'age', 'salary'])
|
||||
self.assertEqual(
|
||||
Employee(name="Bob", age=35, salary=0).to_mongo().keys(),
|
||||
['_cls', 'name', 'age', 'salary'])
|
||||
|
||||
def test_embedded_document_to_mongo_id(self):
|
||||
class SubDoc(EmbeddedDocument):
|
||||
@@ -617,6 +715,64 @@ class InstanceTest(unittest.TestCase):
|
||||
t = TestDocument(doc=TestEmbeddedDocument(x=15, y=35, z=5))
|
||||
t.save(clean=False)
|
||||
|
||||
def test_modify_empty(self):
|
||||
doc = self.Person(name="bob", age=10).save()
|
||||
self.assertRaises(
|
||||
InvalidDocumentError, lambda: self.Person().modify(set__age=10))
|
||||
self.assertDbEqual([dict(doc.to_mongo())])
|
||||
|
||||
def test_modify_invalid_query(self):
|
||||
doc1 = self.Person(name="bob", age=10).save()
|
||||
doc2 = self.Person(name="jim", age=20).save()
|
||||
docs = [dict(doc1.to_mongo()), dict(doc2.to_mongo())]
|
||||
|
||||
self.assertRaises(
|
||||
InvalidQueryError,
|
||||
lambda: doc1.modify(dict(id=doc2.id), set__value=20))
|
||||
|
||||
self.assertDbEqual(docs)
|
||||
|
||||
def test_modify_match_another_document(self):
|
||||
doc1 = self.Person(name="bob", age=10).save()
|
||||
doc2 = self.Person(name="jim", age=20).save()
|
||||
docs = [dict(doc1.to_mongo()), dict(doc2.to_mongo())]
|
||||
|
||||
assert not doc1.modify(dict(name=doc2.name), set__age=100)
|
||||
|
||||
self.assertDbEqual(docs)
|
||||
|
||||
def test_modify_not_exists(self):
|
||||
doc1 = self.Person(name="bob", age=10).save()
|
||||
doc2 = self.Person(id=ObjectId(), name="jim", age=20)
|
||||
docs = [dict(doc1.to_mongo())]
|
||||
|
||||
assert not doc2.modify(dict(name=doc2.name), set__age=100)
|
||||
|
||||
self.assertDbEqual(docs)
|
||||
|
||||
def test_modify_update(self):
|
||||
other_doc = self.Person(name="bob", age=10).save()
|
||||
doc = self.Person(
|
||||
name="jim", age=20, job=self.Job(name="10gen", years=3)).save()
|
||||
|
||||
doc_copy = doc._from_son(doc.to_mongo())
|
||||
|
||||
# these changes must go away
|
||||
doc.name = "liza"
|
||||
doc.job.name = "Google"
|
||||
doc.job.years = 3
|
||||
|
||||
assert doc.modify(
|
||||
set__age=21, set__job__name="MongoDB", unset__job__years=True)
|
||||
doc_copy.age = 21
|
||||
doc_copy.job.name = "MongoDB"
|
||||
del doc_copy.job.years
|
||||
|
||||
assert doc.to_json() == doc_copy.to_json()
|
||||
assert doc._get_changed_fields() == []
|
||||
|
||||
self.assertDbEqual([dict(other_doc.to_mongo()), dict(doc.to_mongo())])
|
||||
|
||||
def test_save(self):
|
||||
"""Ensure that a document may be saved in the database.
|
||||
"""
|
||||
@@ -855,17 +1011,18 @@ class InstanceTest(unittest.TestCase):
|
||||
w1 = Widget(toggle=False, save_id=UUID(1))
|
||||
|
||||
# ignore save_condition on new record creation
|
||||
w1.save(save_condition={'save_id':UUID(42)})
|
||||
w1.save(save_condition={'save_id': UUID(42)})
|
||||
w1.reload()
|
||||
self.assertFalse(w1.toggle)
|
||||
self.assertEqual(w1.save_id, UUID(1))
|
||||
self.assertEqual(w1.count, 0)
|
||||
|
||||
# mismatch in save_condition prevents save
|
||||
# mismatch in save_condition prevents save and raise exception
|
||||
flip(w1)
|
||||
self.assertTrue(w1.toggle)
|
||||
self.assertEqual(w1.count, 1)
|
||||
w1.save(save_condition={'save_id':UUID(42)})
|
||||
self.assertRaises(OperationError,
|
||||
w1.save, save_condition={'save_id': UUID(42)})
|
||||
w1.reload()
|
||||
self.assertFalse(w1.toggle)
|
||||
self.assertEqual(w1.count, 0)
|
||||
@@ -874,7 +1031,7 @@ class InstanceTest(unittest.TestCase):
|
||||
flip(w1)
|
||||
self.assertTrue(w1.toggle)
|
||||
self.assertEqual(w1.count, 1)
|
||||
w1.save(save_condition={'save_id':UUID(1)})
|
||||
w1.save(save_condition={'save_id': UUID(1)})
|
||||
w1.reload()
|
||||
self.assertTrue(w1.toggle)
|
||||
self.assertEqual(w1.count, 1)
|
||||
@@ -887,25 +1044,27 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
flip(w1)
|
||||
w1.save_id = UUID(2)
|
||||
w1.save(save_condition={'save_id':old_id})
|
||||
w1.save(save_condition={'save_id': old_id})
|
||||
w1.reload()
|
||||
self.assertFalse(w1.toggle)
|
||||
self.assertEqual(w1.count, 2)
|
||||
flip(w2)
|
||||
flip(w2)
|
||||
w2.save(save_condition={'save_id':old_id})
|
||||
self.assertRaises(OperationError,
|
||||
w2.save, save_condition={'save_id': old_id})
|
||||
w2.reload()
|
||||
self.assertFalse(w2.toggle)
|
||||
self.assertEqual(w2.count, 2)
|
||||
|
||||
# save_condition uses mongoengine-style operator syntax
|
||||
flip(w1)
|
||||
w1.save(save_condition={'count__lt':w1.count})
|
||||
w1.save(save_condition={'count__lt': w1.count})
|
||||
w1.reload()
|
||||
self.assertTrue(w1.toggle)
|
||||
self.assertEqual(w1.count, 3)
|
||||
flip(w1)
|
||||
w1.save(save_condition={'count__gte':w1.count})
|
||||
self.assertRaises(OperationError,
|
||||
w1.save, save_condition={'count__gte': w1.count})
|
||||
w1.reload()
|
||||
self.assertTrue(w1.toggle)
|
||||
self.assertEqual(w1.count, 3)
|
||||
@@ -1351,7 +1510,8 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4')
|
||||
|
||||
def test_save_custom_pk(self):
|
||||
"""Ensure that a document may be saved with a custom _id using pk alias.
|
||||
"""
|
||||
Ensure that a document may be saved with a custom _id using pk alias.
|
||||
"""
|
||||
# Create person object and save it to the database
|
||||
person = self.Person(name='Test User', age=30,
|
||||
@@ -1437,9 +1597,15 @@ class InstanceTest(unittest.TestCase):
|
||||
p4 = Page(comments=[Comment(user=u2, comment="Heavy Metal song")])
|
||||
p4.save()
|
||||
|
||||
self.assertEqual([p1, p2], list(Page.objects.filter(comments__user=u1)))
|
||||
self.assertEqual([p1, p2, p4], list(Page.objects.filter(comments__user=u2)))
|
||||
self.assertEqual([p1, p3], list(Page.objects.filter(comments__user=u3)))
|
||||
self.assertEqual(
|
||||
[p1, p2],
|
||||
list(Page.objects.filter(comments__user=u1)))
|
||||
self.assertEqual(
|
||||
[p1, p2, p4],
|
||||
list(Page.objects.filter(comments__user=u2)))
|
||||
self.assertEqual(
|
||||
[p1, p3],
|
||||
list(Page.objects.filter(comments__user=u3)))
|
||||
|
||||
def test_save_embedded_document(self):
|
||||
"""Ensure that a document with an embedded document field may be
|
||||
@@ -1514,7 +1680,8 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(promoted_employee.age, 50)
|
||||
|
||||
# Ensure that the 'details' embedded object saved correctly
|
||||
self.assertEqual(promoted_employee.details.position, 'Senior Developer')
|
||||
self.assertEqual(
|
||||
promoted_employee.details.position, 'Senior Developer')
|
||||
|
||||
# Test removal
|
||||
promoted_employee.details = None
|
||||
@@ -1650,7 +1817,8 @@ class InstanceTest(unittest.TestCase):
|
||||
post.save()
|
||||
|
||||
reviewer.delete()
|
||||
self.assertEqual(BlogPost.objects.count(), 1) # No effect on the BlogPost
|
||||
# No effect on the BlogPost
|
||||
self.assertEqual(BlogPost.objects.count(), 1)
|
||||
self.assertEqual(BlogPost.objects.get().reviewer, None)
|
||||
|
||||
# Delete the Person, which should lead to deletion of the BlogPost, too
|
||||
@@ -1699,8 +1867,10 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
authors = ListField(ReferenceField(self.Person, reverse_delete_rule=CASCADE))
|
||||
reviewers = ListField(ReferenceField(self.Person, reverse_delete_rule=NULLIFY))
|
||||
authors = ListField(ReferenceField(
|
||||
self.Person, reverse_delete_rule=CASCADE))
|
||||
reviewers = ListField(ReferenceField(
|
||||
self.Person, reverse_delete_rule=NULLIFY))
|
||||
|
||||
self.Person.drop_collection()
|
||||
|
||||
@@ -1727,11 +1897,11 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(BlogPost.objects.count(), 0)
|
||||
|
||||
def test_reverse_delete_rule_cascade_triggers_pre_delete_signal(self):
|
||||
''' ensure the pre_delete signal is triggered upon a cascading deletion
|
||||
""" ensure the pre_delete signal is triggered upon a cascading deletion
|
||||
setup a blog post with content, an author and editor
|
||||
delete the author which triggers deletion of blogpost via cascade
|
||||
blog post's pre_delete signal alters an editor attribute
|
||||
'''
|
||||
"""
|
||||
class Editor(self.Person):
|
||||
review_queue = IntField(default=0)
|
||||
|
||||
@@ -1795,13 +1965,17 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(Bar.objects.count(), 1) # No effect on the BlogPost
|
||||
self.assertEqual(Bar.objects.get().foo, None)
|
||||
|
||||
def test_invalid_reverse_delete_rules_raise_errors(self):
|
||||
def test_invalid_reverse_delete_rule_raise_errors(self):
|
||||
|
||||
def throw_invalid_document_error():
|
||||
class Blog(Document):
|
||||
content = StringField()
|
||||
authors = MapField(ReferenceField(self.Person, reverse_delete_rule=CASCADE))
|
||||
reviewers = DictField(field=ReferenceField(self.Person, reverse_delete_rule=NULLIFY))
|
||||
authors = MapField(ReferenceField(
|
||||
self.Person, reverse_delete_rule=CASCADE))
|
||||
reviewers = DictField(
|
||||
field=ReferenceField(
|
||||
self.Person,
|
||||
reverse_delete_rule=NULLIFY))
|
||||
|
||||
self.assertRaises(InvalidDocumentError, throw_invalid_document_error)
|
||||
|
||||
@@ -1810,7 +1984,8 @@ class InstanceTest(unittest.TestCase):
|
||||
father = ReferenceField('Person', reverse_delete_rule=DENY)
|
||||
mother = ReferenceField('Person', reverse_delete_rule=DENY)
|
||||
|
||||
self.assertRaises(InvalidDocumentError, throw_invalid_document_error_embedded)
|
||||
self.assertRaises(
|
||||
InvalidDocumentError, throw_invalid_document_error_embedded)
|
||||
|
||||
def test_reverse_delete_rule_cascade_recurs(self):
|
||||
"""Ensure that a chain of documents is also deleted upon cascaded
|
||||
@@ -1832,16 +2007,16 @@ class InstanceTest(unittest.TestCase):
|
||||
author = self.Person(name='Test User')
|
||||
author.save()
|
||||
|
||||
post = BlogPost(content = 'Watched some TV')
|
||||
post = BlogPost(content='Watched some TV')
|
||||
post.author = author
|
||||
post.save()
|
||||
|
||||
comment = Comment(text = 'Kudos.')
|
||||
comment = Comment(text='Kudos.')
|
||||
comment.post = post
|
||||
comment.save()
|
||||
|
||||
# Delete the Person, which should lead to deletion of the BlogPost, and,
|
||||
# recursively to the Comment, too
|
||||
# Delete the Person, which should lead to deletion of the BlogPost,
|
||||
# and, recursively to the Comment, too
|
||||
author.delete()
|
||||
self.assertEqual(Comment.objects.count(), 0)
|
||||
|
||||
@@ -1864,7 +2039,7 @@ class InstanceTest(unittest.TestCase):
|
||||
author = self.Person(name='Test User')
|
||||
author.save()
|
||||
|
||||
post = BlogPost(content = 'Watched some TV')
|
||||
post = BlogPost(content='Watched some TV')
|
||||
post.author = author
|
||||
post.save()
|
||||
|
||||
@@ -1978,9 +2153,33 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(pickle_doc.string, "Two")
|
||||
self.assertEqual(pickle_doc.lists, ["1", "2", "3"])
|
||||
|
||||
def test_regular_document_pickle(self):
|
||||
|
||||
pickle_doc = PickleTest(number=1, string="One", lists=['1', '2'])
|
||||
pickled_doc = pickle.dumps(pickle_doc) # make sure pickling works even before the doc is saved
|
||||
pickle_doc.save()
|
||||
|
||||
pickled_doc = pickle.dumps(pickle_doc)
|
||||
|
||||
# Test that when a document's definition changes the new
|
||||
# definition is used
|
||||
fixtures.PickleTest = fixtures.NewDocumentPickleTest
|
||||
|
||||
resurrected = pickle.loads(pickled_doc)
|
||||
self.assertEqual(resurrected.__class__,
|
||||
fixtures.NewDocumentPickleTest)
|
||||
self.assertEqual(resurrected._fields_ordered,
|
||||
fixtures.NewDocumentPickleTest._fields_ordered)
|
||||
self.assertNotEqual(resurrected._fields_ordered,
|
||||
pickle_doc._fields_ordered)
|
||||
|
||||
# The local PickleTest is still a ref to the original
|
||||
fixtures.PickleTest = PickleTest
|
||||
|
||||
def test_dynamic_document_pickle(self):
|
||||
|
||||
pickle_doc = PickleDynamicTest(name="test", number=1, string="One", lists=['1', '2'])
|
||||
pickle_doc = PickleDynamicTest(
|
||||
name="test", number=1, string="One", lists=['1', '2'])
|
||||
pickle_doc.embedded = PickleDyanmicEmbedded(foo="Bar")
|
||||
pickled_doc = pickle.dumps(pickle_doc) # make sure pickling works even before the doc is saved
|
||||
|
||||
@@ -2002,7 +2201,8 @@ class InstanceTest(unittest.TestCase):
|
||||
pickle_doc.embedded._dynamic_fields.keys())
|
||||
|
||||
def test_picklable_on_signals(self):
|
||||
pickle_doc = PickleSignalsTest(number=1, string="One", lists=['1', '2'])
|
||||
pickle_doc = PickleSignalsTest(
|
||||
number=1, string="One", lists=['1', '2'])
|
||||
pickle_doc.embedded = PickleEmbedded()
|
||||
pickle_doc.save()
|
||||
pickle_doc.delete()
|
||||
@@ -2157,9 +2357,15 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(AuthorBooks._get_db(), get_db("testdb-3"))
|
||||
|
||||
# Collections
|
||||
self.assertEqual(User._get_collection(), get_db("testdb-1")[User._get_collection_name()])
|
||||
self.assertEqual(Book._get_collection(), get_db("testdb-2")[Book._get_collection_name()])
|
||||
self.assertEqual(AuthorBooks._get_collection(), get_db("testdb-3")[AuthorBooks._get_collection_name()])
|
||||
self.assertEqual(
|
||||
User._get_collection(),
|
||||
get_db("testdb-1")[User._get_collection_name()])
|
||||
self.assertEqual(
|
||||
Book._get_collection(),
|
||||
get_db("testdb-2")[Book._get_collection_name()])
|
||||
self.assertEqual(
|
||||
AuthorBooks._get_collection(),
|
||||
get_db("testdb-3")[AuthorBooks._get_collection_name()])
|
||||
|
||||
def test_db_alias_overrides(self):
|
||||
"""db_alias can be overriden
|
||||
@@ -2328,29 +2534,113 @@ class InstanceTest(unittest.TestCase):
|
||||
group = Group.objects.first()
|
||||
self.assertEqual("hello - default", group.name)
|
||||
|
||||
def test_no_overwritting_no_data_loss(self):
|
||||
|
||||
def test_load_undefined_fields(self):
|
||||
class User(Document):
|
||||
username = StringField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
@property
|
||||
def foo(self):
|
||||
return True
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
user = User(username="Ross", foo="bar")
|
||||
self.assertTrue(user.foo)
|
||||
User._get_collection().save({
|
||||
'name': 'John',
|
||||
'foo': 'Bar',
|
||||
'data': [1, 2, 3]
|
||||
})
|
||||
|
||||
User._get_collection().save({"_id": "Ross", "foo": "Bar",
|
||||
"data": [1, 2, 3]})
|
||||
self.assertRaises(FieldDoesNotExist, User.objects.first)
|
||||
|
||||
def test_load_undefined_fields_with_strict_false(self):
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
meta = {'strict': False}
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
User._get_collection().save({
|
||||
'name': 'John',
|
||||
'foo': 'Bar',
|
||||
'data': [1, 2, 3]
|
||||
})
|
||||
|
||||
user = User.objects.first()
|
||||
self.assertEqual("Ross", user.username)
|
||||
self.assertEqual(True, user.foo)
|
||||
self.assertEqual("Bar", user._data["foo"])
|
||||
self.assertEqual([1, 2, 3], user._data["data"])
|
||||
self.assertEqual(user.name, 'John')
|
||||
self.assertFalse(hasattr(user, 'foo'))
|
||||
self.assertEqual(user._data['foo'], 'Bar')
|
||||
self.assertFalse(hasattr(user, 'data'))
|
||||
self.assertEqual(user._data['data'], [1, 2, 3])
|
||||
|
||||
def test_load_undefined_fields_on_embedded_document(self):
|
||||
class Thing(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
thing = EmbeddedDocumentField(Thing)
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
User._get_collection().save({
|
||||
'name': 'John',
|
||||
'thing': {
|
||||
'name': 'My thing',
|
||||
'foo': 'Bar',
|
||||
'data': [1, 2, 3]
|
||||
}
|
||||
})
|
||||
|
||||
self.assertRaises(FieldDoesNotExist, User.objects.first)
|
||||
|
||||
def test_load_undefined_fields_on_embedded_document_with_strict_false_on_doc(self):
|
||||
class Thing(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
thing = EmbeddedDocumentField(Thing)
|
||||
|
||||
meta = {'strict': False}
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
User._get_collection().save({
|
||||
'name': 'John',
|
||||
'thing': {
|
||||
'name': 'My thing',
|
||||
'foo': 'Bar',
|
||||
'data': [1, 2, 3]
|
||||
}
|
||||
})
|
||||
|
||||
self.assertRaises(FieldDoesNotExist, User.objects.first)
|
||||
|
||||
def test_load_undefined_fields_on_embedded_document_with_strict_false(self):
|
||||
class Thing(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
meta = {'strict': False}
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
thing = EmbeddedDocumentField(Thing)
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
User._get_collection().save({
|
||||
'name': 'John',
|
||||
'thing': {
|
||||
'name': 'My thing',
|
||||
'foo': 'Bar',
|
||||
'data': [1, 2, 3]
|
||||
}
|
||||
})
|
||||
|
||||
user = User.objects.first()
|
||||
self.assertEqual(user.name, 'John')
|
||||
self.assertEqual(user.thing.name, 'My thing')
|
||||
self.assertFalse(hasattr(user.thing, 'foo'))
|
||||
self.assertEqual(user.thing._data['foo'], 'Bar')
|
||||
self.assertFalse(hasattr(user.thing, 'data'))
|
||||
self.assertEqual(user.thing._data['data'], [1, 2, 3])
|
||||
|
||||
def test_spaces_in_keys(self):
|
||||
|
||||
@@ -2427,6 +2717,10 @@ class InstanceTest(unittest.TestCase):
|
||||
doc_name = StringField()
|
||||
doc = EmbeddedDocumentField(Embedded)
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.doc_name == other.doc_name and
|
||||
self.doc == other.doc)
|
||||
|
||||
classic_doc = Doc(doc_name="my doc", doc=Embedded(name="embedded doc"))
|
||||
dict_doc = Doc(**{"doc_name": "my doc",
|
||||
"doc": {"name": "embedded doc"}})
|
||||
@@ -2443,6 +2737,10 @@ class InstanceTest(unittest.TestCase):
|
||||
doc_name = StringField()
|
||||
docs = ListField(EmbeddedDocumentField(Embedded))
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.doc_name == other.doc_name and
|
||||
self.docs == other.docs)
|
||||
|
||||
classic_doc = Doc(doc_name="my doc", docs=[
|
||||
Embedded(name="embedded doc1"),
|
||||
Embedded(name="embedded doc2")])
|
||||
@@ -2537,7 +2835,9 @@ class InstanceTest(unittest.TestCase):
|
||||
system.save()
|
||||
|
||||
system = NodesSystem.objects.first()
|
||||
self.assertEqual("UNDEFINED", system.nodes["node"].parameters["param"].macros["test"].value)
|
||||
self.assertEqual(
|
||||
"UNDEFINED",
|
||||
system.nodes["node"].parameters["param"].macros["test"].value)
|
||||
|
||||
def test_embedded_document_equality(self):
|
||||
|
||||
@@ -2643,5 +2943,76 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEquals(p4.height, 189)
|
||||
self.assertEquals(Person.objects(height=189).count(), 1)
|
||||
|
||||
def test_from_son(self):
|
||||
# 771
|
||||
class MyPerson(self.Person):
|
||||
meta = dict(shard_key=["id"])
|
||||
p = MyPerson.from_json('{"name": "name", "age": 27}', created=True)
|
||||
self.assertEquals(p.id, None)
|
||||
p.id = "12345" # in case it is not working: "OperationError: Shard Keys are immutable..." will be raised here
|
||||
p = MyPerson._from_son({"name": "name", "age": 27}, created=True)
|
||||
self.assertEquals(p.id, None)
|
||||
p.id = "12345" # in case it is not working: "OperationError: Shard Keys are immutable..." will be raised here
|
||||
|
||||
def test_null_field(self):
|
||||
# 734
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
height = IntField(default=184, null=True)
|
||||
str_fld = StringField(null=True)
|
||||
int_fld = IntField(null=True)
|
||||
flt_fld = FloatField(null=True)
|
||||
dt_fld = DateTimeField(null=True)
|
||||
cdt_fld = ComplexDateTimeField(null=True)
|
||||
|
||||
User.objects.delete()
|
||||
u = User(name='user')
|
||||
u.save()
|
||||
u_from_db = User.objects.get(name='user')
|
||||
u_from_db.height = None
|
||||
u_from_db.save()
|
||||
self.assertEquals(u_from_db.height, None)
|
||||
# 864
|
||||
self.assertEqual(u_from_db.str_fld, None)
|
||||
self.assertEqual(u_from_db.int_fld, None)
|
||||
self.assertEqual(u_from_db.flt_fld, None)
|
||||
self.assertEqual(u_from_db.dt_fld, None)
|
||||
self.assertEqual(u_from_db.cdt_fld, None)
|
||||
|
||||
# 735
|
||||
User.objects.delete()
|
||||
u = User(name='user')
|
||||
u.save()
|
||||
User.objects(name='user').update_one(set__height=None, upsert=True)
|
||||
u_from_db = User.objects.get(name='user')
|
||||
self.assertEquals(u_from_db.height, None)
|
||||
|
||||
def test_not_saved_eq(self):
|
||||
"""Ensure we can compare documents not saved.
|
||||
"""
|
||||
class Person(Document):
|
||||
pass
|
||||
|
||||
p = Person()
|
||||
p1 = Person()
|
||||
self.assertNotEqual(p, p1)
|
||||
self.assertEqual(p, p)
|
||||
|
||||
def test_list_iter(self):
|
||||
# 914
|
||||
class B(EmbeddedDocument):
|
||||
v = StringField()
|
||||
|
||||
class A(Document):
|
||||
l = ListField(EmbeddedDocumentField(B))
|
||||
|
||||
A.objects.delete()
|
||||
A(l=[B(v='1'), B(v='2'), B(v='3')]).save()
|
||||
a = A.objects.get()
|
||||
self.assertEqual(a.l._instance, a)
|
||||
for idx, b in enumerate(a.l):
|
||||
self.assertEqual(b._instance, a)
|
||||
self.assertEqual(idx, 2)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -51,6 +51,10 @@ class TestJson(unittest.TestCase):
|
||||
string = StringField()
|
||||
embedded_field = EmbeddedDocumentField(Embedded)
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.string == other.string and
|
||||
self.embedded_field == other.embedded_field)
|
||||
|
||||
doc = Doc(string="Hi", embedded_field=Embedded(string="Hi"))
|
||||
|
||||
doc_json = doc.to_json(sort_keys=True, separators=(',', ':'))
|
||||
@@ -99,6 +103,10 @@ class TestJson(unittest.TestCase):
|
||||
generic_embedded_document_field = GenericEmbeddedDocumentField(
|
||||
default=lambda: EmbeddedDoc())
|
||||
|
||||
def __eq__(self, other):
|
||||
import json
|
||||
return json.loads(self.to_json()) == json.loads(other.to_json())
|
||||
|
||||
doc = Doc()
|
||||
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
||||
|
||||
|
@@ -165,6 +165,53 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
self.assertRaises(ValidationError, lambda: d2.validate())
|
||||
|
||||
def test_parent_reference_in_child_document(self):
|
||||
"""
|
||||
Test to ensure a ReferenceField can store a reference to a parent
|
||||
class when inherited. Issue #954.
|
||||
"""
|
||||
class Parent(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
reference = ReferenceField('self')
|
||||
|
||||
class Child(Parent):
|
||||
pass
|
||||
|
||||
parent = Parent()
|
||||
parent.save()
|
||||
|
||||
child = Child(reference=parent)
|
||||
|
||||
# Saving child should not raise a ValidationError
|
||||
try:
|
||||
child.save()
|
||||
except ValidationError as e:
|
||||
self.fail("ValidationError raised: %s" % e.message)
|
||||
|
||||
def test_parent_reference_set_as_attribute_in_child_document(self):
|
||||
"""
|
||||
Test to ensure a ReferenceField can store a reference to a parent
|
||||
class when inherited and when set via attribute. Issue #954.
|
||||
"""
|
||||
class Parent(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
reference = ReferenceField('self')
|
||||
|
||||
class Child(Parent):
|
||||
pass
|
||||
|
||||
parent = Parent()
|
||||
parent.save()
|
||||
|
||||
child = Child()
|
||||
child.reference = parent
|
||||
|
||||
# Saving the child should not raise a ValidationError
|
||||
try:
|
||||
child.save()
|
||||
except ValidationError as e:
|
||||
self.fail("ValidationError raised: %s" % e.message)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -1,3 +1,3 @@
|
||||
from fields import *
|
||||
from file_tests import *
|
||||
from geo import *
|
||||
from geo import *
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -12,7 +12,7 @@ import gridfs
|
||||
from nose.plugins.skip import SkipTest
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.python_support import PY3, b, StringIO
|
||||
from mongoengine.python_support import b, StringIO
|
||||
|
||||
try:
|
||||
from PIL import Image
|
||||
@@ -112,7 +112,43 @@ class FileTest(unittest.TestCase):
|
||||
result.the_file.delete()
|
||||
|
||||
# Ensure deleted file returns None
|
||||
self.assertTrue(result.the_file.read() == None)
|
||||
self.assertTrue(result.the_file.read() is None)
|
||||
|
||||
def test_file_fields_stream_after_none(self):
|
||||
"""Ensure that a file field can be written to after it has been saved as
|
||||
None
|
||||
"""
|
||||
class StreamFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
StreamFile.drop_collection()
|
||||
|
||||
text = b('Hello, World!')
|
||||
more_text = b('Foo Bar')
|
||||
content_type = 'text/plain'
|
||||
|
||||
streamfile = StreamFile()
|
||||
streamfile.save()
|
||||
streamfile.the_file.new_file()
|
||||
streamfile.the_file.write(text)
|
||||
streamfile.the_file.write(more_text)
|
||||
streamfile.the_file.close()
|
||||
streamfile.save()
|
||||
|
||||
result = StreamFile.objects.first()
|
||||
self.assertTrue(streamfile == result)
|
||||
self.assertEqual(result.the_file.read(), text + more_text)
|
||||
# self.assertEqual(result.the_file.content_type, content_type)
|
||||
result.the_file.seek(0)
|
||||
self.assertEqual(result.the_file.tell(), 0)
|
||||
self.assertEqual(result.the_file.read(len(text)), text)
|
||||
self.assertEqual(result.the_file.tell(), len(text))
|
||||
self.assertEqual(result.the_file.read(len(more_text)), more_text)
|
||||
self.assertEqual(result.the_file.tell(), len(text + more_text))
|
||||
result.the_file.delete()
|
||||
|
||||
# Ensure deleted file returns None
|
||||
self.assertTrue(result.the_file.read() is None)
|
||||
|
||||
def test_file_fields_set(self):
|
||||
|
||||
@@ -261,6 +297,71 @@ class FileTest(unittest.TestCase):
|
||||
test_file = TestFile()
|
||||
self.assertFalse(test_file.the_file in [{"test": 1}])
|
||||
|
||||
def test_file_disk_space(self):
|
||||
""" Test disk space usage when we delete/replace a file """
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
text = b('Hello, World!')
|
||||
content_type = 'text/plain'
|
||||
|
||||
testfile = TestFile()
|
||||
testfile.the_file.put(text, content_type=content_type, filename="hello")
|
||||
testfile.save()
|
||||
|
||||
# Now check fs.files and fs.chunks
|
||||
db = TestFile._get_db()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEquals(len(list(files)), 1)
|
||||
self.assertEquals(len(list(chunks)), 1)
|
||||
|
||||
# Deleting the docoument should delete the files
|
||||
testfile.delete()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEquals(len(list(files)), 0)
|
||||
self.assertEquals(len(list(chunks)), 0)
|
||||
|
||||
# Test case where we don't store a file in the first place
|
||||
testfile = TestFile()
|
||||
testfile.save()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEquals(len(list(files)), 0)
|
||||
self.assertEquals(len(list(chunks)), 0)
|
||||
|
||||
testfile.delete()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEquals(len(list(files)), 0)
|
||||
self.assertEquals(len(list(chunks)), 0)
|
||||
|
||||
# Test case where we overwrite the file
|
||||
testfile = TestFile()
|
||||
testfile.the_file.put(text, content_type=content_type, filename="hello")
|
||||
testfile.save()
|
||||
|
||||
text = b('Bonjour, World!')
|
||||
testfile.the_file.replace(text, content_type=content_type, filename="hello")
|
||||
testfile.save()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEquals(len(list(files)), 1)
|
||||
self.assertEquals(len(list(chunks)), 1)
|
||||
|
||||
testfile.delete()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEquals(len(list(files)), 0)
|
||||
self.assertEquals(len(list(chunks)), 0)
|
||||
|
||||
def test_image_field(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
|
@@ -19,8 +19,8 @@ class GeoFieldTest(unittest.TestCase):
|
||||
def _test_for_expected_error(self, Cls, loc, expected):
|
||||
try:
|
||||
Cls(loc=loc).validate()
|
||||
self.fail()
|
||||
except ValidationError, e:
|
||||
self.fail('Should not validate the location {0}'.format(loc))
|
||||
except ValidationError as e:
|
||||
self.assertEqual(expected, e.to_dict()['loc'])
|
||||
|
||||
def test_geopoint_validation(self):
|
||||
@@ -115,7 +115,7 @@ class GeoFieldTest(unittest.TestCase):
|
||||
expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0])
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[1, 2], [3, 4], [5, 6], [1,2]]).validate()
|
||||
Location(loc=[[1, 2], [3, 4], [5, 6], [1, 2]]).validate()
|
||||
|
||||
def test_polygon_validation(self):
|
||||
class Location(Document):
|
||||
@@ -155,6 +155,117 @@ class GeoFieldTest(unittest.TestCase):
|
||||
|
||||
Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate()
|
||||
|
||||
def test_multipoint_validation(self):
|
||||
class Location(Document):
|
||||
loc = MultiPointField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
expected = 'MultiPointField type must be "MultiPoint"'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MultiPoint", "coordinates": [[1, 2, 3]]}
|
||||
expected = "Value ([1, 2, 3]) must be a two-dimensional point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[]]
|
||||
expected = "Invalid MultiPoint must contain at least one valid point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[1]], [[1, 2, 3]]]
|
||||
for coord in invalid_coords:
|
||||
expected = "Value (%s) must be a two-dimensional point" % repr(coord[0])
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
invalid_coords = [[[{}, {}]], [("a", "b")]]
|
||||
for coord in invalid_coords:
|
||||
expected = "Both values (%s) in point must be float or int" % repr(coord[0])
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[1, 2]]).validate()
|
||||
Location(loc={
|
||||
"type": "MultiPoint",
|
||||
"coordinates": [
|
||||
[1, 2],
|
||||
[81.4471435546875, 23.61432859499169]
|
||||
]}).validate()
|
||||
|
||||
def test_multilinestring_validation(self):
|
||||
class Location(Document):
|
||||
loc = MultiLineStringField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
expected = 'MultiLineStringField type must be "MultiLineString"'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MultiLineString", "coordinates": [[[1, 2, 3]]]}
|
||||
expected = "Invalid MultiLineString:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [5, "a"]
|
||||
expected = "Invalid MultiLineString must contain at least one valid linestring"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[1]]]
|
||||
expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0])
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[1, 2, 3]]]
|
||||
expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0])
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[{}, {}]]], [[("a", "b")]]]
|
||||
for coord in invalid_coords:
|
||||
expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0])
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate()
|
||||
|
||||
def test_multipolygon_validation(self):
|
||||
class Location(Document):
|
||||
loc = MultiPolygonField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
expected = 'MultiPolygonField type must be "MultiPolygon"'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MultiPolygon", "coordinates": [[[[1, 2, 3]]]]}
|
||||
expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[5, "a"]]]]
|
||||
expected = "Invalid MultiPolygon:\nBoth values ([5, 'a']) in point must be float or int"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[]]]]
|
||||
expected = "Invalid MultiPolygon must contain at least one valid Polygon"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[1, 2, 3]]]]
|
||||
expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[{}, {}]]], [[("a", "b")]]]
|
||||
expected = "Invalid MultiPolygon:\nBoth values ([{}, {}]) in point must be float or int, Both values (('a', 'b')) in point must be float or int"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[1, 2], [3, 4]]]]
|
||||
expected = "Invalid MultiPolygon:\nLineStrings must start and end at the same point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
Location(loc=[[[[1, 2], [3, 4], [5, 6], [1, 2]]]]).validate()
|
||||
|
||||
def test_indexes_geopoint(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
@@ -225,12 +336,11 @@ class GeoFieldTest(unittest.TestCase):
|
||||
Location.drop_collection()
|
||||
Parent.drop_collection()
|
||||
|
||||
list(Parent.objects)
|
||||
|
||||
collection = Parent._get_collection()
|
||||
info = collection.index_information()
|
||||
|
||||
Parent(name='Berlin').save()
|
||||
info = Parent._get_collection().index_information()
|
||||
self.assertFalse('location_2d' in info)
|
||||
info = Location._get_collection().index_information()
|
||||
self.assertTrue('location_2d' in info)
|
||||
|
||||
self.assertEqual(len(Parent._geo_indices()), 0)
|
||||
self.assertEqual(len(Location._geo_indices()), 1)
|
||||
|
@@ -17,6 +17,15 @@ class PickleTest(Document):
|
||||
photo = FileField()
|
||||
|
||||
|
||||
class NewDocumentPickleTest(Document):
|
||||
number = IntField()
|
||||
string = StringField(choices=(('One', '1'), ('Two', '2')))
|
||||
embedded = EmbeddedDocumentField(PickleEmbedded)
|
||||
lists = ListField(StringField())
|
||||
photo = FileField()
|
||||
new_field = StringField()
|
||||
|
||||
|
||||
class PickleDyanmicEmbedded(DynamicEmbeddedDocument):
|
||||
date = DateTimeField(default=datetime.now)
|
||||
|
||||
|
@@ -1,12 +1,16 @@
|
||||
import sys
|
||||
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
import unittest
|
||||
from datetime import datetime, timedelta
|
||||
from mongoengine import *
|
||||
|
||||
from pymongo.errors import OperationFailure
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_connection
|
||||
from nose.plugins.skip import SkipTest
|
||||
|
||||
|
||||
__all__ = ("GeoQueriesTest",)
|
||||
|
||||
|
||||
@@ -66,6 +70,16 @@ class GeoQueriesTest(unittest.TestCase):
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
|
||||
# find events at least 10 degrees away of san francisco
|
||||
point = [-122.415579, 37.7566023]
|
||||
events = Event.objects(location__near=point, location__min_distance=10)
|
||||
# The following real test passes on MongoDB 3 but minDistance seems
|
||||
# buggy on older MongoDB versions
|
||||
if get_connection().server_info()['versionArray'][0] > 2:
|
||||
self.assertEqual(events.count(), 2)
|
||||
else:
|
||||
self.assertTrue(events.count() >= 2)
|
||||
|
||||
# find events within 10 degrees of san francisco
|
||||
point_and_distance = [[-122.415579, 37.7566023], 10]
|
||||
events = Event.objects(location__within_distance=point_and_distance)
|
||||
@@ -141,7 +155,13 @@ class GeoQueriesTest(unittest.TestCase):
|
||||
def test_spherical_geospatial_operators(self):
|
||||
"""Ensure that spherical geospatial queries are working
|
||||
"""
|
||||
raise SkipTest("https://jira.mongodb.org/browse/SERVER-14039")
|
||||
# Needs MongoDB > 2.6.4 https://jira.mongodb.org/browse/SERVER-14039
|
||||
connection = get_connection()
|
||||
info = connection.test.command('buildInfo')
|
||||
mongodb_version = tuple([int(i) for i in info['version'].split('.')])
|
||||
if mongodb_version < (2, 6, 4):
|
||||
raise SkipTest("Need MongoDB version 2.6.4+")
|
||||
|
||||
class Point(Document):
|
||||
location = GeoPointField()
|
||||
|
||||
@@ -161,7 +181,7 @@ class GeoQueriesTest(unittest.TestCase):
|
||||
|
||||
# Same behavior for _within_spherical_distance
|
||||
points = Point.objects(
|
||||
location__within_spherical_distance=[[-122, 37.5], 60/earth_radius]
|
||||
location__within_spherical_distance=[[-122, 37.5], 60 / earth_radius]
|
||||
)
|
||||
self.assertEqual(points.count(), 2)
|
||||
|
||||
@@ -169,6 +189,24 @@ class GeoQueriesTest(unittest.TestCase):
|
||||
location__max_distance=60 / earth_radius)
|
||||
self.assertEqual(points.count(), 2)
|
||||
|
||||
# Test query works with max_distance, being farer from one point
|
||||
points = Point.objects(location__near_sphere=[-122, 37.8],
|
||||
location__max_distance=60 / earth_radius)
|
||||
close_point = points.first()
|
||||
self.assertEqual(points.count(), 1)
|
||||
|
||||
# Test query works with min_distance, being farer from one point
|
||||
points = Point.objects(location__near_sphere=[-122, 37.8],
|
||||
location__min_distance=60 / earth_radius)
|
||||
# The following real test passes on MongoDB 3 but minDistance seems
|
||||
# buggy on older MongoDB versions
|
||||
if get_connection().server_info()['versionArray'][0] > 2:
|
||||
self.assertEqual(points.count(), 1)
|
||||
far_point = points.first()
|
||||
self.assertNotEqual(close_point, far_point)
|
||||
else:
|
||||
self.assertTrue(points.count() >= 1)
|
||||
|
||||
# Finds both points, but orders the north point first because it's
|
||||
# closer to the reference point to the north.
|
||||
points = Point.objects(location__near_sphere=[-122, 38.5])
|
||||
@@ -251,6 +289,20 @@ class GeoQueriesTest(unittest.TestCase):
|
||||
self.assertEqual(events.count(), 2)
|
||||
self.assertEqual(events[0], event3)
|
||||
|
||||
# ensure min_distance and max_distance combine well
|
||||
events = Event.objects(location__near=[-87.67892, 41.9120459],
|
||||
location__min_distance=1000,
|
||||
location__max_distance=10000).order_by("-date")
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event3)
|
||||
|
||||
# ensure ordering is respected by "near"
|
||||
events = Event.objects(location__near=[-87.67892, 41.9120459],
|
||||
# location__min_distance=10000
|
||||
location__min_distance=10000).order_by("-date")
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
|
||||
# check that within_box works
|
||||
box = [(-125.0, 35.0), (-100.0, 40.0)]
|
||||
events = Event.objects(location__geo_within_box=box)
|
||||
|
@@ -13,11 +13,11 @@ import pymongo
|
||||
from pymongo.errors import ConfigurationError
|
||||
from pymongo.read_preferences import ReadPreference
|
||||
|
||||
from bson import ObjectId
|
||||
from bson import ObjectId, DBRef
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_connection, get_db
|
||||
from mongoengine.python_support import PY3
|
||||
from mongoengine.python_support import PY3, IS_PYMONGO_3
|
||||
from mongoengine.context_managers import query_counter, switch_db
|
||||
from mongoengine.queryset import (QuerySet, QuerySetManager,
|
||||
MultipleObjectsReturned, DoesNotExist,
|
||||
@@ -51,6 +51,20 @@ def skip_older_mongodb(f):
|
||||
return _inner
|
||||
|
||||
|
||||
def skip_pymongo3(f):
|
||||
def _inner(*args, **kwargs):
|
||||
|
||||
if IS_PYMONGO_3:
|
||||
raise SkipTest("Useless with PyMongo 3+")
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
_inner.__name__ = f.__name__
|
||||
_inner.__doc__ = f.__doc__
|
||||
|
||||
return _inner
|
||||
|
||||
|
||||
class QuerySetTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
@@ -326,8 +340,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
write_concern = {"fsync": True}
|
||||
|
||||
author, created = self.Person.objects.get_or_create(
|
||||
name='Test User', write_concern=write_concern)
|
||||
author = self.Person.objects.create(name='Test User')
|
||||
author.save(write_concern=write_concern)
|
||||
|
||||
result = self.Person.objects.update(
|
||||
@@ -510,18 +523,29 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(post.comments[0].by, 'joe')
|
||||
self.assertEqual(post.comments[0].votes.score, 4)
|
||||
|
||||
def test_updates_can_have_match_operators(self):
|
||||
def test_update_min_max(self):
|
||||
class Scores(Document):
|
||||
high_score = IntField()
|
||||
low_score = IntField()
|
||||
scores = Scores(high_score=800, low_score=200)
|
||||
scores.save()
|
||||
Scores.objects(id=scores.id).update(min__low_score=150)
|
||||
self.assertEqual(Scores.objects(id=scores.id).get().low_score, 150)
|
||||
Scores.objects(id=scores.id).update(min__low_score=250)
|
||||
self.assertEqual(Scores.objects(id=scores.id).get().low_score, 150)
|
||||
|
||||
class Post(Document):
|
||||
title = StringField(required=True)
|
||||
tags = ListField(StringField())
|
||||
comments = ListField(EmbeddedDocumentField("Comment"))
|
||||
def test_updates_can_have_match_operators(self):
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
name = StringField(max_length=120)
|
||||
vote = IntField()
|
||||
|
||||
class Post(Document):
|
||||
title = StringField(required=True)
|
||||
tags = ListField(StringField())
|
||||
comments = ListField(EmbeddedDocumentField("Comment"))
|
||||
|
||||
Post.drop_collection()
|
||||
|
||||
comm1 = Comment(content="very funny indeed", name="John S", vote=1)
|
||||
@@ -591,6 +615,54 @@ class QuerySetTest(unittest.TestCase):
|
||||
set__name="bobby", multi=True)
|
||||
self.assertEqual(result, 2)
|
||||
|
||||
def test_update_validate(self):
|
||||
class EmDoc(EmbeddedDocument):
|
||||
str_f = StringField()
|
||||
|
||||
class Doc(Document):
|
||||
str_f = StringField()
|
||||
dt_f = DateTimeField()
|
||||
cdt_f = ComplexDateTimeField()
|
||||
ed_f = EmbeddedDocumentField(EmDoc)
|
||||
|
||||
self.assertRaises(ValidationError, Doc.objects().update, str_f=1, upsert=True)
|
||||
self.assertRaises(ValidationError, Doc.objects().update, dt_f="datetime", upsert=True)
|
||||
self.assertRaises(ValidationError, Doc.objects().update, ed_f__str_f=1, upsert=True)
|
||||
|
||||
def test_update_related_models( self ):
|
||||
class TestPerson( Document ):
|
||||
name = StringField()
|
||||
|
||||
class TestOrganization( Document ):
|
||||
name = StringField()
|
||||
owner = ReferenceField( TestPerson )
|
||||
|
||||
TestPerson.drop_collection()
|
||||
TestOrganization.drop_collection()
|
||||
|
||||
p = TestPerson( name='p1' )
|
||||
p.save()
|
||||
o = TestOrganization( name='o1' )
|
||||
o.save()
|
||||
|
||||
o.owner = p
|
||||
p.name = 'p2'
|
||||
|
||||
self.assertEqual( o._get_changed_fields(), [ 'owner' ] )
|
||||
self.assertEqual( p._get_changed_fields(), [ 'name' ] )
|
||||
|
||||
o.save()
|
||||
|
||||
self.assertEqual( o._get_changed_fields(), [] )
|
||||
self.assertEqual( p._get_changed_fields(), [ 'name' ] ) # Fails; it's empty
|
||||
|
||||
# This will do NOTHING at all, even though we changed the name
|
||||
p.save()
|
||||
|
||||
p.reload()
|
||||
|
||||
self.assertEqual( p.name, 'p2' ) # Fails; it's still `p1`
|
||||
|
||||
def test_upsert(self):
|
||||
self.Person.drop_collection()
|
||||
|
||||
@@ -620,37 +692,42 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual("Bob", bob.name)
|
||||
self.assertEqual(30, bob.age)
|
||||
|
||||
def test_get_or_create(self):
|
||||
"""Ensure that ``get_or_create`` returns one result or creates a new
|
||||
document.
|
||||
"""
|
||||
person1 = self.Person(name="User A", age=20)
|
||||
person1.save()
|
||||
person2 = self.Person(name="User B", age=30)
|
||||
person2.save()
|
||||
def test_save_and_only_on_fields_with_default(self):
|
||||
class Embed(EmbeddedDocument):
|
||||
field = IntField()
|
||||
|
||||
# Retrieve the first person from the database
|
||||
self.assertRaises(MultipleObjectsReturned,
|
||||
self.Person.objects.get_or_create)
|
||||
self.assertRaises(self.Person.MultipleObjectsReturned,
|
||||
self.Person.objects.get_or_create)
|
||||
class B(Document):
|
||||
meta = {'collection': 'b'}
|
||||
|
||||
# Use a query to filter the people found to just person2
|
||||
person, created = self.Person.objects.get_or_create(age=30)
|
||||
self.assertEqual(person.name, "User B")
|
||||
self.assertEqual(created, False)
|
||||
field = IntField(default=1)
|
||||
embed = EmbeddedDocumentField(Embed, default=Embed)
|
||||
embed_no_default = EmbeddedDocumentField(Embed)
|
||||
|
||||
person, created = self.Person.objects.get_or_create(age__lt=30)
|
||||
self.assertEqual(person.name, "User A")
|
||||
self.assertEqual(created, False)
|
||||
# Creating {field : 2, embed : {field: 2}, embed_no_default: {field: 2}}
|
||||
val = 2
|
||||
embed = Embed()
|
||||
embed.field = val
|
||||
record = B()
|
||||
record.field = val
|
||||
record.embed = embed
|
||||
record.embed_no_default = embed
|
||||
record.save()
|
||||
|
||||
# Try retrieving when no objects exists - new doc should be created
|
||||
kwargs = dict(age=50, defaults={'name': 'User C'})
|
||||
person, created = self.Person.objects.get_or_create(**kwargs)
|
||||
self.assertEqual(created, True)
|
||||
# Checking it was saved correctly
|
||||
record.reload()
|
||||
self.assertEqual(record.field, 2)
|
||||
self.assertEqual(record.embed_no_default.field, 2)
|
||||
self.assertEqual(record.embed.field, 2)
|
||||
|
||||
person = self.Person.objects.get(age=50)
|
||||
self.assertEqual(person.name, "User C")
|
||||
# Request only the _id field and save
|
||||
clone = B.objects().only('id').first()
|
||||
clone.save()
|
||||
|
||||
# Reload the record and see that the embed data is not lost
|
||||
record.reload()
|
||||
self.assertEqual(record.field, 2)
|
||||
self.assertEqual(record.embed_no_default.field, 2)
|
||||
self.assertEqual(record.embed.field, 2)
|
||||
|
||||
def test_bulk_insert(self):
|
||||
"""Ensure that bulk insert works
|
||||
@@ -669,6 +746,11 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
Blog.drop_collection()
|
||||
|
||||
# get MongoDB version info
|
||||
connection = get_connection()
|
||||
info = connection.test.command('buildInfo')
|
||||
mongodb_version = tuple([int(i) for i in info['version'].split('.')])
|
||||
|
||||
# Recreates the collection
|
||||
self.assertEqual(0, Blog.objects.count())
|
||||
|
||||
@@ -685,7 +767,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
blogs.append(Blog(title="post %s" % i, posts=[post1, post2]))
|
||||
|
||||
Blog.objects.insert(blogs, load_bulk=False)
|
||||
if (get_connection().max_wire_version <= 1):
|
||||
if mongodb_version < (2, 6):
|
||||
self.assertEqual(q, 1)
|
||||
else:
|
||||
# profiling logs each doc now in the bulk op
|
||||
@@ -698,7 +780,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
Blog.objects.insert(blogs)
|
||||
if (get_connection().max_wire_version <= 1):
|
||||
if mongodb_version < (2, 6):
|
||||
self.assertEqual(q, 2) # 1 for insert, and 1 for in bulk fetch
|
||||
else:
|
||||
# 99 for insert, and 1 for in bulk fetch
|
||||
@@ -830,8 +912,10 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(q, 3)
|
||||
|
||||
@skip_pymongo3
|
||||
def test_slave_okay(self):
|
||||
"""Ensures that a query can take slave_okay syntax
|
||||
"""Ensures that a query can take slave_okay syntax.
|
||||
Useless with PyMongo 3+ as well as with MongoDB 3+.
|
||||
"""
|
||||
person1 = self.Person(name="User A", age=20)
|
||||
person1.save()
|
||||
@@ -844,6 +928,8 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(person.name, "User A")
|
||||
self.assertEqual(person.age, 20)
|
||||
|
||||
@skip_older_mongodb
|
||||
@skip_pymongo3
|
||||
def test_cursor_args(self):
|
||||
"""Ensures the cursor args can be set as expected
|
||||
"""
|
||||
@@ -914,7 +1000,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
docs = docs[1:4]
|
||||
self.assertEqual('[<Doc: 1>, <Doc: 2>, <Doc: 3>]', "%s" % docs)
|
||||
|
||||
self.assertEqual(docs.count(), 3)
|
||||
self.assertEqual(docs.count(with_limit_and_skip=True), 3)
|
||||
for doc in docs:
|
||||
self.assertEqual('.. queryset mid-iteration ..', repr(docs))
|
||||
|
||||
@@ -1302,6 +1388,31 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.Person.objects(name='Test User').delete()
|
||||
self.assertEqual(1, BlogPost.objects.count())
|
||||
|
||||
def test_reverse_delete_rule_cascade_on_abstract_document(self):
|
||||
"""Ensure cascading deletion of referring documents from the database
|
||||
does not fail on abstract document.
|
||||
"""
|
||||
class AbstractBlogPost(Document):
|
||||
meta = {'abstract': True}
|
||||
author = ReferenceField(self.Person, reverse_delete_rule=CASCADE)
|
||||
|
||||
class BlogPost(AbstractBlogPost):
|
||||
content = StringField()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
me = self.Person(name='Test User')
|
||||
me.save()
|
||||
someoneelse = self.Person(name='Some-one Else')
|
||||
someoneelse.save()
|
||||
|
||||
BlogPost(content='Watching TV', author=me).save()
|
||||
BlogPost(content='Chilling out', author=me).save()
|
||||
BlogPost(content='Pro Testing', author=someoneelse).save()
|
||||
|
||||
self.assertEqual(3, BlogPost.objects.count())
|
||||
self.Person.objects(name='Test User').delete()
|
||||
self.assertEqual(1, BlogPost.objects.count())
|
||||
|
||||
def test_reverse_delete_rule_cascade_self_referencing(self):
|
||||
"""Ensure self-referencing CASCADE deletes do not result in infinite
|
||||
loop
|
||||
@@ -1361,6 +1472,31 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(1, BlogPost.objects.count())
|
||||
self.assertEqual(None, BlogPost.objects.first().category)
|
||||
|
||||
def test_reverse_delete_rule_nullify_on_abstract_document(self):
|
||||
"""Ensure nullification of references to deleted documents when
|
||||
reference is on an abstract document.
|
||||
"""
|
||||
class AbstractBlogPost(Document):
|
||||
meta = {'abstract': True}
|
||||
author = ReferenceField(self.Person, reverse_delete_rule=NULLIFY)
|
||||
|
||||
class BlogPost(AbstractBlogPost):
|
||||
content = StringField()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
me = self.Person(name='Test User')
|
||||
me.save()
|
||||
someoneelse = self.Person(name='Some-one Else')
|
||||
someoneelse.save()
|
||||
|
||||
BlogPost(content='Watching TV', author=me).save()
|
||||
|
||||
self.assertEqual(1, BlogPost.objects.count())
|
||||
self.assertEqual(me, BlogPost.objects.first().author)
|
||||
self.Person.objects(name='Test User').delete()
|
||||
self.assertEqual(1, BlogPost.objects.count())
|
||||
self.assertEqual(None, BlogPost.objects.first().author)
|
||||
|
||||
def test_reverse_delete_rule_deny(self):
|
||||
"""Ensure deletion gets denied on documents that still have references
|
||||
to them.
|
||||
@@ -1380,6 +1516,26 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
self.assertRaises(OperationError, self.Person.objects.delete)
|
||||
|
||||
def test_reverse_delete_rule_deny_on_abstract_document(self):
|
||||
"""Ensure deletion gets denied on documents that still have references
|
||||
to them, when reference is on an abstract document.
|
||||
"""
|
||||
class AbstractBlogPost(Document):
|
||||
meta = {'abstract': True}
|
||||
author = ReferenceField(self.Person, reverse_delete_rule=DENY)
|
||||
|
||||
class BlogPost(AbstractBlogPost):
|
||||
content = StringField()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
me = self.Person(name='Test User')
|
||||
me.save()
|
||||
|
||||
BlogPost(content='Watching TV', author=me).save()
|
||||
|
||||
self.assertEqual(1, BlogPost.objects.count())
|
||||
self.assertRaises(OperationError, self.Person.objects.delete)
|
||||
|
||||
def test_reverse_delete_rule_pull(self):
|
||||
"""Ensure pulling of references to deleted documents.
|
||||
"""
|
||||
@@ -1410,6 +1566,40 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(post.authors, [me])
|
||||
self.assertEqual(another.authors, [])
|
||||
|
||||
def test_reverse_delete_rule_pull_on_abstract_documents(self):
|
||||
"""Ensure pulling of references to deleted documents when reference
|
||||
is defined on an abstract document..
|
||||
"""
|
||||
class AbstractBlogPost(Document):
|
||||
meta = {'abstract': True}
|
||||
authors = ListField(ReferenceField(self.Person,
|
||||
reverse_delete_rule=PULL))
|
||||
|
||||
class BlogPost(AbstractBlogPost):
|
||||
content = StringField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
self.Person.drop_collection()
|
||||
|
||||
me = self.Person(name='Test User')
|
||||
me.save()
|
||||
|
||||
someoneelse = self.Person(name='Some-one Else')
|
||||
someoneelse.save()
|
||||
|
||||
post = BlogPost(content='Watching TV', authors=[me, someoneelse])
|
||||
post.save()
|
||||
|
||||
another = BlogPost(content='Chilling Out', authors=[someoneelse])
|
||||
another.save()
|
||||
|
||||
someoneelse.delete()
|
||||
post.reload()
|
||||
another.reload()
|
||||
|
||||
self.assertEqual(post.authors, [me])
|
||||
self.assertEqual(another.authors, [])
|
||||
|
||||
def test_delete_with_limits(self):
|
||||
|
||||
class Log(Document):
|
||||
@@ -1444,6 +1634,12 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.Person.objects()[:1].delete()
|
||||
self.assertEqual(1, BlogPost.objects.count())
|
||||
|
||||
def test_limit_with_write_concern_0(self):
|
||||
|
||||
p1 = self.Person(name="User Z", age=20).save()
|
||||
del_result = p1.delete(w=0)
|
||||
self.assertEqual(None, del_result)
|
||||
|
||||
def test_reference_field_find(self):
|
||||
"""Ensure cascading deletion of referring documents from the database.
|
||||
"""
|
||||
@@ -1492,6 +1688,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
"""Ensure that atomic updates work properly.
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
name = StringField()
|
||||
title = StringField()
|
||||
hits = IntField()
|
||||
tags = ListField(StringField())
|
||||
@@ -2515,26 +2712,58 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
avg = float(sum(ages)) / (len(ages) + 1) # take into account the 0
|
||||
self.assertAlmostEqual(int(self.Person.objects.average('age')), avg)
|
||||
self.assertAlmostEqual(
|
||||
int(self.Person.objects.aggregate_average('age')), avg
|
||||
)
|
||||
|
||||
self.Person(name='ageless person').save()
|
||||
self.assertEqual(int(self.Person.objects.average('age')), avg)
|
||||
self.assertEqual(
|
||||
int(self.Person.objects.aggregate_average('age')), avg
|
||||
)
|
||||
|
||||
# dot notation
|
||||
self.Person(
|
||||
name='person meta', person_meta=self.PersonMeta(weight=0)).save()
|
||||
self.assertAlmostEqual(
|
||||
int(self.Person.objects.average('person_meta.weight')), 0)
|
||||
self.assertAlmostEqual(
|
||||
int(self.Person.objects.aggregate_average('person_meta.weight')),
|
||||
0
|
||||
)
|
||||
|
||||
for i, weight in enumerate(ages):
|
||||
self.Person(
|
||||
name='test meta%i', person_meta=self.PersonMeta(weight=weight)).save()
|
||||
|
||||
self.assertAlmostEqual(
|
||||
int(self.Person.objects.average('person_meta.weight')), avg)
|
||||
int(self.Person.objects.average('person_meta.weight')), avg
|
||||
)
|
||||
self.assertAlmostEqual(
|
||||
int(self.Person.objects.aggregate_average('person_meta.weight')),
|
||||
avg
|
||||
)
|
||||
|
||||
self.Person(name='test meta none').save()
|
||||
self.assertEqual(
|
||||
int(self.Person.objects.average('person_meta.weight')), avg)
|
||||
int(self.Person.objects.average('person_meta.weight')), avg
|
||||
)
|
||||
self.assertEqual(
|
||||
int(self.Person.objects.aggregate_average('person_meta.weight')),
|
||||
avg
|
||||
)
|
||||
|
||||
# test summing over a filtered queryset
|
||||
over_50 = [a for a in ages if a >= 50]
|
||||
avg = float(sum(over_50)) / len(over_50)
|
||||
self.assertEqual(
|
||||
self.Person.objects.filter(age__gte=50).average('age'),
|
||||
avg
|
||||
)
|
||||
self.assertEqual(
|
||||
self.Person.objects.filter(age__gte=50).aggregate_average('age'),
|
||||
avg
|
||||
)
|
||||
|
||||
def test_sum(self):
|
||||
"""Ensure that field can be summed over correctly.
|
||||
@@ -2543,20 +2772,44 @@ class QuerySetTest(unittest.TestCase):
|
||||
for i, age in enumerate(ages):
|
||||
self.Person(name='test%s' % i, age=age).save()
|
||||
|
||||
self.assertEqual(int(self.Person.objects.sum('age')), sum(ages))
|
||||
self.assertEqual(self.Person.objects.sum('age'), sum(ages))
|
||||
self.assertEqual(
|
||||
self.Person.objects.aggregate_sum('age'), sum(ages)
|
||||
)
|
||||
|
||||
self.Person(name='ageless person').save()
|
||||
self.assertEqual(int(self.Person.objects.sum('age')), sum(ages))
|
||||
self.assertEqual(self.Person.objects.sum('age'), sum(ages))
|
||||
self.assertEqual(
|
||||
self.Person.objects.aggregate_sum('age'), sum(ages)
|
||||
)
|
||||
|
||||
for i, age in enumerate(ages):
|
||||
self.Person(name='test meta%s' %
|
||||
i, person_meta=self.PersonMeta(weight=age)).save()
|
||||
|
||||
self.assertEqual(
|
||||
int(self.Person.objects.sum('person_meta.weight')), sum(ages))
|
||||
self.Person.objects.sum('person_meta.weight'), sum(ages)
|
||||
)
|
||||
self.assertEqual(
|
||||
self.Person.objects.aggregate_sum('person_meta.weight'),
|
||||
sum(ages)
|
||||
)
|
||||
|
||||
self.Person(name='weightless person').save()
|
||||
self.assertEqual(int(self.Person.objects.sum('age')), sum(ages))
|
||||
self.assertEqual(self.Person.objects.sum('age'), sum(ages))
|
||||
self.assertEqual(
|
||||
self.Person.objects.aggregate_sum('age'), sum(ages)
|
||||
)
|
||||
|
||||
# test summing over a filtered queryset
|
||||
self.assertEqual(
|
||||
self.Person.objects.filter(age__gte=50).sum('age'),
|
||||
sum([a for a in ages if a >= 50])
|
||||
)
|
||||
self.assertEqual(
|
||||
self.Person.objects.filter(age__gte=50).aggregate_sum('age'),
|
||||
sum([a for a in ages if a >= 50])
|
||||
)
|
||||
|
||||
def test_embedded_average(self):
|
||||
class Pay(EmbeddedDocument):
|
||||
@@ -2783,25 +3036,27 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertTrue('dilma' in new.content)
|
||||
self.assertTrue('planejamento' in new.title)
|
||||
|
||||
query = News.objects.search_text(
|
||||
"candidata", include_text_scores=True)
|
||||
|
||||
self.assertTrue(query._include_text_scores)
|
||||
query = News.objects.search_text("candidata")
|
||||
self.assertEqual(query._search_text, "candidata")
|
||||
new = query.first()
|
||||
|
||||
self.assertTrue(isinstance(new.text_score, float))
|
||||
self.assertTrue(isinstance(new.get_text_score(), float))
|
||||
|
||||
# count
|
||||
query = News.objects.search_text('brasil').order_by('$text_score')
|
||||
self.assertTrue(query._include_text_scores)
|
||||
self.assertEqual(query._search_text, "brasil")
|
||||
|
||||
self.assertEqual(query.count(), 3)
|
||||
self.assertEqual(query._query, {'$text': {'$search': 'brasil'}})
|
||||
cursor_args = query._cursor_args
|
||||
if not IS_PYMONGO_3:
|
||||
cursor_args_fields = cursor_args['fields']
|
||||
else:
|
||||
cursor_args_fields = cursor_args['projection']
|
||||
self.assertEqual(
|
||||
cursor_args['fields'], {'text_score': {'$meta': 'textScore'}})
|
||||
cursor_args_fields, {'_text_score': {'$meta': 'textScore'}})
|
||||
|
||||
text_scores = [i.text_score for i in query]
|
||||
text_scores = [i.get_text_score() for i in query]
|
||||
self.assertEqual(len(text_scores), 3)
|
||||
|
||||
self.assertTrue(text_scores[0] > text_scores[1])
|
||||
@@ -2811,7 +3066,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
# get item
|
||||
item = News.objects.search_text(
|
||||
'brasil').order_by('$text_score').first()
|
||||
self.assertEqual(item.text_score, max_text_score)
|
||||
self.assertEqual(item.get_text_score(), max_text_score)
|
||||
|
||||
@skip_older_mongodb
|
||||
def test_distinct_handles_references_to_alias(self):
|
||||
@@ -2878,13 +3133,55 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(authors, [mark_twain, john_tolkien])
|
||||
|
||||
def test_distinct_ListField_EmbeddedDocumentField_EmbeddedDocumentField(self):
|
||||
class Continent(EmbeddedDocument):
|
||||
continent_name = StringField()
|
||||
|
||||
class Country(EmbeddedDocument):
|
||||
country_name = StringField()
|
||||
continent = EmbeddedDocumentField(Continent)
|
||||
|
||||
class Author(EmbeddedDocument):
|
||||
name = StringField()
|
||||
country = EmbeddedDocumentField(Country)
|
||||
|
||||
class Book(Document):
|
||||
title = StringField()
|
||||
authors = ListField(EmbeddedDocumentField(Author))
|
||||
|
||||
Book.drop_collection()
|
||||
|
||||
europe = Continent(continent_name='europe')
|
||||
asia = Continent(continent_name='asia')
|
||||
|
||||
scotland = Country(country_name="Scotland", continent=europe)
|
||||
tibet = Country(country_name="Tibet", continent=asia)
|
||||
|
||||
mark_twain = Author(name="Mark Twain", country=scotland)
|
||||
john_tolkien = Author(name="John Ronald Reuel Tolkien", country=tibet)
|
||||
|
||||
book = Book(title="Tom Sawyer", authors=[mark_twain]).save()
|
||||
book = Book(
|
||||
title="The Lord of the Rings", authors=[john_tolkien]).save()
|
||||
book = Book(
|
||||
title="The Stories", authors=[mark_twain, john_tolkien]).save()
|
||||
country_list = Book.objects.distinct("authors.country")
|
||||
|
||||
self.assertEqual(country_list, [scotland, tibet])
|
||||
|
||||
continent_list = Book.objects.distinct("authors.country.continent")
|
||||
|
||||
self.assertEqual(continent_list, [europe, asia])
|
||||
|
||||
def test_distinct_ListField_ReferenceField(self):
|
||||
class Foo(Document):
|
||||
bar_lst = ListField(ReferenceField('Bar'))
|
||||
|
||||
class Bar(Document):
|
||||
text = StringField()
|
||||
|
||||
class Foo(Document):
|
||||
bar = ReferenceField('Bar')
|
||||
bar_lst = ListField(ReferenceField('Bar'))
|
||||
|
||||
Bar.drop_collection()
|
||||
Foo.drop_collection()
|
||||
|
||||
@@ -3244,7 +3541,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
for i in xrange(10):
|
||||
Post(title="Post %s" % i).save()
|
||||
|
||||
self.assertEqual(5, Post.objects.limit(5).skip(5).count())
|
||||
self.assertEqual(5, Post.objects.limit(5).skip(5).count(with_limit_and_skip=True))
|
||||
|
||||
self.assertEqual(
|
||||
10, Post.objects.limit(5).skip(5).count(with_limit_and_skip=False))
|
||||
@@ -3458,11 +3755,9 @@ class QuerySetTest(unittest.TestCase):
|
||||
def test_scalar(self):
|
||||
|
||||
class Organization(Document):
|
||||
id = ObjectIdField('_id')
|
||||
name = StringField()
|
||||
|
||||
class User(Document):
|
||||
id = ObjectIdField('_id')
|
||||
name = StringField()
|
||||
organization = ObjectIdField()
|
||||
|
||||
@@ -3822,8 +4117,11 @@ class QuerySetTest(unittest.TestCase):
|
||||
bars = list(Bar.objects(read_preference=ReadPreference.PRIMARY))
|
||||
self.assertEqual([], bars)
|
||||
|
||||
self.assertRaises(ConfigurationError, Bar.objects,
|
||||
read_preference='Primary')
|
||||
if not IS_PYMONGO_3:
|
||||
error_class = ConfigurationError
|
||||
else:
|
||||
error_class = TypeError
|
||||
self.assertRaises(error_class, Bar.objects, read_preference='Primary')
|
||||
|
||||
bars = Bar.objects(read_preference=ReadPreference.SECONDARY_PREFERRED)
|
||||
self.assertEqual(
|
||||
@@ -3985,6 +4283,41 @@ class QuerySetTest(unittest.TestCase):
|
||||
Organization))
|
||||
self.assertTrue(isinstance(qs.first().organization, Organization))
|
||||
|
||||
def test_no_dereference_embedded_doc(self):
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
class Member(EmbeddedDocument):
|
||||
name = StringField()
|
||||
user = ReferenceField(User)
|
||||
|
||||
class Organization(Document):
|
||||
name = StringField()
|
||||
members = ListField(EmbeddedDocumentField(Member))
|
||||
ceo = ReferenceField(User)
|
||||
member = EmbeddedDocumentField(Member)
|
||||
admin = ListField(ReferenceField(User))
|
||||
|
||||
Organization.drop_collection()
|
||||
User.drop_collection()
|
||||
|
||||
user = User(name="Flash")
|
||||
user.save()
|
||||
|
||||
member = Member(name="Flash", user=user)
|
||||
|
||||
company = Organization(name="Mongo Inc", ceo=user, member=member)
|
||||
company.admin.append(user)
|
||||
company.members.append(member)
|
||||
company.save()
|
||||
|
||||
result = Organization.objects().no_dereference().first()
|
||||
|
||||
self.assertTrue(isinstance(result.admin[0], (DBRef, ObjectId)))
|
||||
self.assertTrue(isinstance(result.member.user, (DBRef, ObjectId)))
|
||||
self.assertTrue(isinstance(result.members[0].user, (DBRef, ObjectId)))
|
||||
|
||||
def test_cached_queryset(self):
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
@@ -4013,7 +4346,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(100, people._len) # Caused by list calling len
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
people.count() # count is cached
|
||||
people.count(with_limit_and_skip=True) # count is cached
|
||||
self.assertEqual(q, 1)
|
||||
|
||||
def test_no_cached_queryset(self):
|
||||
@@ -4109,7 +4442,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
self.assertEqual(users.count(), 7)
|
||||
self.assertEqual(users.count(with_limit_and_skip=True), 7)
|
||||
|
||||
for i, outer_user in enumerate(users):
|
||||
self.assertEqual(outer_user.name, names[i])
|
||||
@@ -4117,7 +4450,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
inner_count = 0
|
||||
|
||||
# Calling len might disrupt the inner loop if there are bugs
|
||||
self.assertEqual(users.count(), 7)
|
||||
self.assertEqual(users.count(with_limit_and_skip=True), 7)
|
||||
|
||||
for j, inner_user in enumerate(users):
|
||||
self.assertEqual(inner_user.name, names[j])
|
||||
@@ -4401,6 +4734,48 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.Person.objects().delete()
|
||||
self.assertEqual(self.Person.objects().skip(1).delete(), 0) # test Document delete without existing documents
|
||||
|
||||
def test_max_time_ms(self):
|
||||
# 778: max_time_ms can get only int or None as input
|
||||
self.assertRaises(TypeError, self.Person.objects(name="name").max_time_ms, "not a number")
|
||||
|
||||
def test_subclass_field_query(self):
|
||||
class Animal(Document):
|
||||
is_mamal = BooleanField()
|
||||
meta = dict(allow_inheritance=True)
|
||||
|
||||
class Cat(Animal):
|
||||
whiskers_length = FloatField()
|
||||
|
||||
class ScottishCat(Cat):
|
||||
folded_ears = BooleanField()
|
||||
|
||||
Animal(is_mamal=False).save()
|
||||
Cat(is_mamal=True, whiskers_length=5.1).save()
|
||||
ScottishCat(is_mamal=True, folded_ears=True).save()
|
||||
self.assertEquals(Animal.objects(folded_ears=True).count(), 1)
|
||||
self.assertEquals(Animal.objects(whiskers_length=5.1).count(), 1)
|
||||
|
||||
def test_loop_via_invalid_id_does_not_crash(self):
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
Person.objects.delete()
|
||||
Person._get_collection().update({"name": "a"}, {"$set": {"_id": ""}}, upsert=True)
|
||||
for p in Person.objects():
|
||||
self.assertEqual(p.name, 'a')
|
||||
|
||||
def test_last_field_name_like_operator(self):
|
||||
class EmbeddedItem(EmbeddedDocument):
|
||||
type = StringField()
|
||||
|
||||
class Doc(Document):
|
||||
item = EmbeddedDocumentField(EmbeddedItem)
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
doc = Doc(item=EmbeddedItem(type="axe"))
|
||||
doc.save()
|
||||
|
||||
self.assertEqual(1, Doc.objects(item__type__="axe").count())
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -197,5 +197,42 @@ class TransformTest(unittest.TestCase):
|
||||
update = transform.update(Location, set__poly={"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
|
||||
self.assertEqual(update, {'$set': {'poly': {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}}})
|
||||
|
||||
def test_type(self):
|
||||
class Doc(Document):
|
||||
df = DynamicField()
|
||||
Doc(df=True).save()
|
||||
Doc(df=7).save()
|
||||
Doc(df="df").save()
|
||||
self.assertEqual(Doc.objects(df__type=1).count(), 0) # double
|
||||
self.assertEqual(Doc.objects(df__type=8).count(), 1) # bool
|
||||
self.assertEqual(Doc.objects(df__type=2).count(), 1) # str
|
||||
self.assertEqual(Doc.objects(df__type=16).count(), 1) # int
|
||||
|
||||
def test_last_field_name_like_operator(self):
|
||||
class EmbeddedItem(EmbeddedDocument):
|
||||
type = StringField()
|
||||
name = StringField()
|
||||
|
||||
class Doc(Document):
|
||||
item = EmbeddedDocumentField(EmbeddedItem)
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
doc = Doc(item=EmbeddedItem(type="axe", name="Heroic axe"))
|
||||
doc.save()
|
||||
|
||||
self.assertEqual(1, Doc.objects(item__type__="axe").count())
|
||||
self.assertEqual(1, Doc.objects(item__name__="Heroic axe").count())
|
||||
|
||||
def test_understandable_error_raised(self):
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
location = GeoPointField()
|
||||
|
||||
box = [(35.0, -125.0), (40.0, -100.0)]
|
||||
# I *meant* to execute location__within_box=box
|
||||
events = Event.objects(location__within=box)
|
||||
self.assertRaises(InvalidQueryError, lambda: events.count())
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -1,4 +1,7 @@
|
||||
import sys
|
||||
import datetime
|
||||
from pymongo.errors import OperationFailure
|
||||
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
try:
|
||||
@@ -6,16 +9,25 @@ try:
|
||||
except ImportError:
|
||||
import unittest
|
||||
|
||||
import datetime
|
||||
|
||||
import pymongo
|
||||
from bson.tz_util import utc
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine import (
|
||||
connect, register_connection,
|
||||
Document, DateTimeField
|
||||
)
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
import mongoengine.connection
|
||||
from mongoengine.connection import get_db, get_connection, ConnectionError
|
||||
|
||||
|
||||
def get_tz_awareness(connection):
|
||||
if not IS_PYMONGO_3:
|
||||
return connection.tz_aware
|
||||
else:
|
||||
return connection.codec_options.tz_aware
|
||||
|
||||
|
||||
class ConnectionTest(unittest.TestCase):
|
||||
|
||||
def tearDown(self):
|
||||
@@ -42,12 +54,18 @@ class ConnectionTest(unittest.TestCase):
|
||||
def test_sharing_connections(self):
|
||||
"""Ensure that connections are shared when the connection settings are exactly the same
|
||||
"""
|
||||
connect('mongoenginetest', alias='testdb1')
|
||||
|
||||
connect('mongoenginetests', alias='testdb1')
|
||||
expected_connection = get_connection('testdb1')
|
||||
|
||||
connect('mongoenginetest', alias='testdb2')
|
||||
connect('mongoenginetests', alias='testdb2')
|
||||
actual_connection = get_connection('testdb2')
|
||||
|
||||
# Handle PyMongo 3+ Async Connection
|
||||
if IS_PYMONGO_3:
|
||||
# Ensure we are connected, throws ServerSelectionTimeoutError otherwise.
|
||||
# Purposely not catching exception to fail test if thrown.
|
||||
expected_connection.server_info()
|
||||
|
||||
self.assertEqual(expected_connection, actual_connection)
|
||||
|
||||
def test_connect_uri(self):
|
||||
@@ -61,7 +79,8 @@ class ConnectionTest(unittest.TestCase):
|
||||
c.admin.authenticate("admin", "password")
|
||||
c.mongoenginetest.add_user("username", "password")
|
||||
|
||||
self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost')
|
||||
if not IS_PYMONGO_3:
|
||||
self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost')
|
||||
|
||||
connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest')
|
||||
|
||||
@@ -76,8 +95,7 @@ class ConnectionTest(unittest.TestCase):
|
||||
c.mongoenginetest.system.users.remove({})
|
||||
|
||||
def test_connect_uri_without_db(self):
|
||||
"""Ensure that the connect() method works properly with uri's
|
||||
without database_name
|
||||
"""Ensure connect() method works properly with uri's without database_name
|
||||
"""
|
||||
c = connect(db='mongoenginetest', alias='admin')
|
||||
c.admin.system.users.remove({})
|
||||
@@ -87,7 +105,8 @@ class ConnectionTest(unittest.TestCase):
|
||||
c.admin.authenticate("admin", "password")
|
||||
c.mongoenginetest.add_user("username", "password")
|
||||
|
||||
self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost')
|
||||
if not IS_PYMONGO_3:
|
||||
self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost')
|
||||
|
||||
connect("mongoenginetest", host='mongodb://localhost/')
|
||||
|
||||
@@ -101,6 +120,42 @@ class ConnectionTest(unittest.TestCase):
|
||||
c.admin.system.users.remove({})
|
||||
c.mongoenginetest.system.users.remove({})
|
||||
|
||||
def test_connect_uri_with_authsource(self):
|
||||
"""Ensure that the connect() method works well with
|
||||
the option `authSource` in URI.
|
||||
This feature was introduced in MongoDB 2.4 and removed in 2.6
|
||||
"""
|
||||
# Create users
|
||||
c = connect('mongoenginetest')
|
||||
c.admin.system.users.remove({})
|
||||
c.admin.add_user('username2', 'password')
|
||||
|
||||
# Authentication fails without "authSource"
|
||||
if IS_PYMONGO_3:
|
||||
test_conn = connect('mongoenginetest', alias='test1',
|
||||
host='mongodb://username2:password@localhost/mongoenginetest')
|
||||
self.assertRaises(OperationFailure, test_conn.server_info)
|
||||
else:
|
||||
self.assertRaises(
|
||||
ConnectionError, connect, 'mongoenginetest', alias='test1',
|
||||
host='mongodb://username2:password@localhost/mongoenginetest'
|
||||
)
|
||||
self.assertRaises(ConnectionError, get_db, 'test1')
|
||||
|
||||
# Authentication succeeds with "authSource"
|
||||
test_conn2 = connect(
|
||||
'mongoenginetest', alias='test2',
|
||||
host=('mongodb://username2:password@localhost/'
|
||||
'mongoenginetest?authSource=admin')
|
||||
)
|
||||
# This will fail starting from MongoDB 2.6+
|
||||
db = get_db('test2')
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
|
||||
# Clear all users
|
||||
c.admin.system.users.remove({})
|
||||
|
||||
def test_register_connection(self):
|
||||
"""Ensure that connections with different aliases may be registered.
|
||||
"""
|
||||
@@ -128,11 +183,11 @@ class ConnectionTest(unittest.TestCase):
|
||||
connect('mongoenginetest', alias='t1', tz_aware=True)
|
||||
conn = get_connection('t1')
|
||||
|
||||
self.assertTrue(conn.tz_aware)
|
||||
self.assertTrue(get_tz_awareness(conn))
|
||||
|
||||
connect('mongoenginetest2', alias='t2')
|
||||
conn = get_connection('t2')
|
||||
self.assertFalse(conn.tz_aware)
|
||||
self.assertFalse(get_tz_awareness(conn))
|
||||
|
||||
def test_datetime(self):
|
||||
connect('mongoenginetest', tz_aware=True)
|
||||
@@ -147,6 +202,27 @@ class ConnectionTest(unittest.TestCase):
|
||||
date_doc = DateDoc.objects.first()
|
||||
self.assertEqual(d, date_doc.the_date)
|
||||
|
||||
def test_multiple_connection_settings(self):
|
||||
connect('mongoenginetest', alias='t1', host="localhost")
|
||||
|
||||
connect('mongoenginetest2', alias='t2', host="127.0.0.1")
|
||||
|
||||
mongo_connections = mongoengine.connection._connections
|
||||
self.assertEqual(len(mongo_connections.items()), 2)
|
||||
self.assertTrue('t1' in mongo_connections.keys())
|
||||
self.assertTrue('t2' in mongo_connections.keys())
|
||||
if not IS_PYMONGO_3:
|
||||
self.assertEqual(mongo_connections['t1'].host, 'localhost')
|
||||
self.assertEqual(mongo_connections['t2'].host, '127.0.0.1')
|
||||
else:
|
||||
# Handle PyMongo 3+ Async Connection
|
||||
# Ensure we are connected, throws ServerSelectionTimeoutError otherwise.
|
||||
# Purposely not catching exception to fail test if thrown.
|
||||
mongo_connections['t1'].server_info()
|
||||
mongo_connections['t2'].server_info()
|
||||
self.assertEqual(mongo_connections['t1'].address[0], 'localhost')
|
||||
self.assertEqual(mongo_connections['t2'].address[0], '127.0.0.1')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -1,11 +1,14 @@
|
||||
import unittest
|
||||
from mongoengine.base.datastructures import StrictDict, SemiStrictDict
|
||||
|
||||
|
||||
class TestStrictDict(unittest.TestCase):
|
||||
def strict_dict_class(self, *args, **kwargs):
|
||||
return StrictDict.create(*args, **kwargs)
|
||||
|
||||
def setUp(self):
|
||||
self.dtype = self.strict_dict_class(("a", "b", "c"))
|
||||
|
||||
def test_init(self):
|
||||
d = self.dtype(a=1, b=1, c=1)
|
||||
self.assertEqual((d.a, d.b, d.c), (1, 1, 1))
|
||||
@@ -38,8 +41,9 @@ class TestStrictDict(unittest.TestCase):
|
||||
|
||||
def test_setattr_raises_on_nonexisting_attr(self):
|
||||
d = self.dtype()
|
||||
|
||||
def _f():
|
||||
d.x=1
|
||||
d.x = 1
|
||||
self.assertRaises(AttributeError, _f)
|
||||
|
||||
def test_setattr_getattr_special(self):
|
||||
|
@@ -318,6 +318,10 @@ class FieldTest(unittest.TestCase):
|
||||
def test_circular_reference(self):
|
||||
"""Ensure you can handle circular references
|
||||
"""
|
||||
class Relation(EmbeddedDocument):
|
||||
name = StringField()
|
||||
person = ReferenceField('Person')
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
relations = ListField(EmbeddedDocumentField('Relation'))
|
||||
@@ -325,10 +329,6 @@ class FieldTest(unittest.TestCase):
|
||||
def __repr__(self):
|
||||
return "<Person: %s>" % self.name
|
||||
|
||||
class Relation(EmbeddedDocument):
|
||||
name = StringField()
|
||||
person = ReferenceField('Person')
|
||||
|
||||
Person.drop_collection()
|
||||
mother = Person(name="Mother")
|
||||
daughter = Person(name="Daughter")
|
||||
@@ -947,6 +947,8 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
class Asset(Document):
|
||||
name = StringField(max_length=250, required=True)
|
||||
path = StringField()
|
||||
title = StringField()
|
||||
parent = GenericReferenceField(default=None)
|
||||
parents = ListField(GenericReferenceField())
|
||||
children = ListField(GenericReferenceField())
|
||||
@@ -1024,6 +1026,43 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(type(foo.bar), Bar)
|
||||
self.assertEqual(type(foo.baz), Baz)
|
||||
|
||||
|
||||
def test_document_reload_reference_integrity(self):
|
||||
"""
|
||||
Ensure reloading a document with multiple similar id
|
||||
in different collections doesn't mix them.
|
||||
"""
|
||||
class Topic(Document):
|
||||
id = IntField(primary_key=True)
|
||||
class User(Document):
|
||||
id = IntField(primary_key=True)
|
||||
name = StringField()
|
||||
class Message(Document):
|
||||
id = IntField(primary_key=True)
|
||||
topic = ReferenceField(Topic)
|
||||
author = ReferenceField(User)
|
||||
|
||||
Topic.drop_collection()
|
||||
User.drop_collection()
|
||||
Message.drop_collection()
|
||||
|
||||
# All objects share the same id, but each in a different collection
|
||||
topic = Topic(id=1).save()
|
||||
user = User(id=1, name='user-name').save()
|
||||
Message(id=1, topic=topic, author=user).save()
|
||||
|
||||
concurrent_change_user = User.objects.get(id=1)
|
||||
concurrent_change_user.name = 'new-name'
|
||||
concurrent_change_user.save()
|
||||
self.assertNotEqual(user.name, 'new-name')
|
||||
|
||||
msg = Message.objects.get(id=1)
|
||||
msg.reload()
|
||||
self.assertEqual(msg.topic, topic)
|
||||
self.assertEqual(msg.author, user)
|
||||
self.assertEqual(msg.author.name, 'new-name')
|
||||
|
||||
|
||||
def test_list_lookup_not_checked_in_map(self):
|
||||
"""Ensure we dereference list data correctly
|
||||
"""
|
||||
@@ -1220,14 +1259,15 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(page.tags[0], page.posts[0].tags[0])
|
||||
|
||||
def test_select_related_follows_embedded_referencefields(self):
|
||||
class Playlist(Document):
|
||||
items = ListField(EmbeddedDocumentField("PlaylistItem"))
|
||||
|
||||
class Song(Document):
|
||||
title = StringField()
|
||||
|
||||
class PlaylistItem(EmbeddedDocument):
|
||||
song = ReferenceField("Song")
|
||||
|
||||
class Song(Document):
|
||||
title = StringField()
|
||||
class Playlist(Document):
|
||||
items = ListField(EmbeddedDocumentField("PlaylistItem"))
|
||||
|
||||
Playlist.drop_collection()
|
||||
Song.drop_collection()
|
||||
|
@@ -1,308 +0,0 @@
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
import unittest
|
||||
from nose.plugins.skip import SkipTest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.django.shortcuts import get_document_or_404
|
||||
|
||||
import django
|
||||
from django.http import Http404
|
||||
from django.template import Context, Template
|
||||
from django.conf import settings
|
||||
from django.core.paginator import Paginator
|
||||
|
||||
settings.configure(
|
||||
USE_TZ=True,
|
||||
INSTALLED_APPS=('django.contrib.auth', 'mongoengine.django.mongo_auth'),
|
||||
AUTH_USER_MODEL=('mongo_auth.MongoUser'),
|
||||
AUTHENTICATION_BACKENDS = ('mongoengine.django.auth.MongoEngineBackend',)
|
||||
)
|
||||
|
||||
# For Django >= 1.7
|
||||
if hasattr(django, 'setup'):
|
||||
django.setup()
|
||||
|
||||
try:
|
||||
from django.contrib.auth import authenticate, get_user_model
|
||||
from mongoengine.django.auth import User
|
||||
from mongoengine.django.mongo_auth.models import (
|
||||
MongoUser,
|
||||
MongoUserManager,
|
||||
get_user_document,
|
||||
)
|
||||
DJ15 = True
|
||||
except Exception:
|
||||
DJ15 = False
|
||||
from django.contrib.sessions.tests import SessionTestsMixin
|
||||
from mongoengine.django.sessions import SessionStore, MongoSession
|
||||
from mongoengine.django.tests import MongoTestCase
|
||||
from datetime import tzinfo, timedelta
|
||||
ZERO = timedelta(0)
|
||||
|
||||
|
||||
class FixedOffset(tzinfo):
|
||||
"""Fixed offset in minutes east from UTC."""
|
||||
|
||||
def __init__(self, offset, name):
|
||||
self.__offset = timedelta(minutes=offset)
|
||||
self.__name = name
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return self.__offset
|
||||
|
||||
def tzname(self, dt):
|
||||
return self.__name
|
||||
|
||||
def dst(self, dt):
|
||||
return ZERO
|
||||
|
||||
|
||||
def activate_timezone(tz):
|
||||
"""Activate Django timezone support if it is available.
|
||||
"""
|
||||
try:
|
||||
from django.utils import timezone
|
||||
timezone.deactivate()
|
||||
timezone.activate(tz)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class QuerySetTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
self.Person = Person
|
||||
|
||||
def test_order_by_in_django_template(self):
|
||||
"""Ensure that QuerySets are properly ordered in Django template.
|
||||
"""
|
||||
self.Person.drop_collection()
|
||||
|
||||
self.Person(name="A", age=20).save()
|
||||
self.Person(name="D", age=10).save()
|
||||
self.Person(name="B", age=40).save()
|
||||
self.Person(name="C", age=30).save()
|
||||
|
||||
t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}")
|
||||
|
||||
d = {"ol": self.Person.objects.order_by('-name')}
|
||||
self.assertEqual(t.render(Context(d)), u'D-10:C-30:B-40:A-20:')
|
||||
d = {"ol": self.Person.objects.order_by('+name')}
|
||||
self.assertEqual(t.render(Context(d)), u'A-20:B-40:C-30:D-10:')
|
||||
d = {"ol": self.Person.objects.order_by('-age')}
|
||||
self.assertEqual(t.render(Context(d)), u'B-40:C-30:A-20:D-10:')
|
||||
d = {"ol": self.Person.objects.order_by('+age')}
|
||||
self.assertEqual(t.render(Context(d)), u'D-10:A-20:C-30:B-40:')
|
||||
|
||||
self.Person.drop_collection()
|
||||
|
||||
def test_q_object_filter_in_template(self):
|
||||
|
||||
self.Person.drop_collection()
|
||||
|
||||
self.Person(name="A", age=20).save()
|
||||
self.Person(name="D", age=10).save()
|
||||
self.Person(name="B", age=40).save()
|
||||
self.Person(name="C", age=30).save()
|
||||
|
||||
t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}")
|
||||
|
||||
d = {"ol": self.Person.objects.filter(Q(age=10) | Q(name="C"))}
|
||||
self.assertEqual(t.render(Context(d)), 'D-10:C-30:')
|
||||
|
||||
# Check double rendering doesn't throw an error
|
||||
self.assertEqual(t.render(Context(d)), 'D-10:C-30:')
|
||||
|
||||
def test_get_document_or_404(self):
|
||||
p = self.Person(name="G404")
|
||||
p.save()
|
||||
|
||||
self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234')
|
||||
self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk))
|
||||
|
||||
def test_pagination(self):
|
||||
"""Ensure that Pagination works as expected
|
||||
"""
|
||||
class Page(Document):
|
||||
name = StringField()
|
||||
|
||||
Page.drop_collection()
|
||||
|
||||
for i in xrange(1, 11):
|
||||
Page(name=str(i)).save()
|
||||
|
||||
paginator = Paginator(Page.objects.all(), 2)
|
||||
|
||||
t = Template("{% for i in page.object_list %}{{ i.name }}:{% endfor %}")
|
||||
for p in paginator.page_range:
|
||||
d = {"page": paginator.page(p)}
|
||||
end = p * 2
|
||||
start = end - 1
|
||||
self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end))
|
||||
|
||||
def test_nested_queryset_template_iterator(self):
|
||||
# Try iterating the same queryset twice, nested, in a Django template.
|
||||
names = ['A', 'B', 'C', 'D']
|
||||
|
||||
class CustomUser(Document):
|
||||
name = StringField()
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
CustomUser.drop_collection()
|
||||
|
||||
for name in names:
|
||||
CustomUser(name=name).save()
|
||||
|
||||
users = CustomUser.objects.all().order_by('name')
|
||||
template = Template("{% for user in users %}{{ user.name }}{% ifequal forloop.counter 2 %} {% for inner_user in users %}{{ inner_user.name }}{% endfor %} {% endifequal %}{% endfor %}")
|
||||
rendered = template.render(Context({'users': users}))
|
||||
self.assertEqual(rendered, 'AB ABCD CD')
|
||||
|
||||
def test_filter(self):
|
||||
"""Ensure that a queryset and filters work as expected
|
||||
"""
|
||||
|
||||
class Note(Document):
|
||||
text = StringField()
|
||||
|
||||
Note.drop_collection()
|
||||
|
||||
for i in xrange(1, 101):
|
||||
Note(name="Note: %s" % i).save()
|
||||
|
||||
# Check the count
|
||||
self.assertEqual(Note.objects.count(), 100)
|
||||
|
||||
# Get the first 10 and confirm
|
||||
notes = Note.objects[:10]
|
||||
self.assertEqual(notes.count(), 10)
|
||||
|
||||
# Test djangos template filters
|
||||
# self.assertEqual(length(notes), 10)
|
||||
t = Template("{{ notes.count }}")
|
||||
c = Context({"notes": notes})
|
||||
self.assertEqual(t.render(c), "10")
|
||||
|
||||
# Test with skip
|
||||
notes = Note.objects.skip(90)
|
||||
self.assertEqual(notes.count(), 10)
|
||||
|
||||
# Test djangos template filters
|
||||
self.assertEqual(notes.count(), 10)
|
||||
t = Template("{{ notes.count }}")
|
||||
c = Context({"notes": notes})
|
||||
self.assertEqual(t.render(c), "10")
|
||||
|
||||
# Test with limit
|
||||
notes = Note.objects.skip(90)
|
||||
self.assertEqual(notes.count(), 10)
|
||||
|
||||
# Test djangos template filters
|
||||
self.assertEqual(notes.count(), 10)
|
||||
t = Template("{{ notes.count }}")
|
||||
c = Context({"notes": notes})
|
||||
self.assertEqual(t.render(c), "10")
|
||||
|
||||
# Test with skip and limit
|
||||
notes = Note.objects.skip(10).limit(10)
|
||||
|
||||
# Test djangos template filters
|
||||
self.assertEqual(notes.count(), 10)
|
||||
t = Template("{{ notes.count }}")
|
||||
c = Context({"notes": notes})
|
||||
self.assertEqual(t.render(c), "10")
|
||||
|
||||
|
||||
class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase):
|
||||
backend = SessionStore
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
MongoSession.drop_collection()
|
||||
super(MongoDBSessionTest, self).setUp()
|
||||
|
||||
def assertIn(self, first, second, msg=None):
|
||||
self.assertTrue(first in second, msg)
|
||||
|
||||
def assertNotIn(self, first, second, msg=None):
|
||||
self.assertFalse(first in second, msg)
|
||||
|
||||
def test_first_save(self):
|
||||
session = SessionStore()
|
||||
session['test'] = True
|
||||
session.save()
|
||||
self.assertTrue('test' in session)
|
||||
|
||||
def test_session_expiration_tz(self):
|
||||
activate_timezone(FixedOffset(60, 'UTC+1'))
|
||||
# create and save new session
|
||||
session = SessionStore()
|
||||
session.set_expiry(600) # expire in 600 seconds
|
||||
session['test_expire'] = True
|
||||
session.save()
|
||||
# reload session with key
|
||||
key = session.session_key
|
||||
session = SessionStore(key)
|
||||
self.assertTrue('test_expire' in session, 'Session has expired before it is expected')
|
||||
|
||||
|
||||
class MongoAuthTest(unittest.TestCase):
|
||||
user_data = {
|
||||
'username': 'user',
|
||||
'email': 'user@example.com',
|
||||
'password': 'test',
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
if not DJ15:
|
||||
raise SkipTest('mongo_auth requires Django 1.5')
|
||||
connect(db='mongoenginetest')
|
||||
User.drop_collection()
|
||||
super(MongoAuthTest, self).setUp()
|
||||
|
||||
def test_get_user_model(self):
|
||||
self.assertEqual(get_user_model(), MongoUser)
|
||||
|
||||
def test_get_user_document(self):
|
||||
self.assertEqual(get_user_document(), User)
|
||||
|
||||
def test_user_manager(self):
|
||||
manager = get_user_model()._default_manager
|
||||
self.assertTrue(isinstance(manager, MongoUserManager))
|
||||
|
||||
def test_user_manager_exception(self):
|
||||
manager = get_user_model()._default_manager
|
||||
self.assertRaises(MongoUser.DoesNotExist, manager.get,
|
||||
username='not found')
|
||||
|
||||
def test_create_user(self):
|
||||
manager = get_user_model()._default_manager
|
||||
user = manager.create_user(**self.user_data)
|
||||
self.assertTrue(isinstance(user, User))
|
||||
db_user = User.objects.get(username='user')
|
||||
self.assertEqual(user.id, db_user.id)
|
||||
|
||||
def test_authenticate(self):
|
||||
get_user_model()._default_manager.create_user(**self.user_data)
|
||||
user = authenticate(username='user', password='fail')
|
||||
self.assertEqual(None, user)
|
||||
user = authenticate(username='user', password='test')
|
||||
db_user = User.objects.get(username='user')
|
||||
self.assertEqual(user.id, db_user.id)
|
||||
|
||||
|
||||
class MongoTestCaseTest(MongoTestCase):
|
||||
def test_mongo_test_case(self):
|
||||
self.db.dummy_collection.insert({'collection': 'will be dropped'})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@@ -1,47 +0,0 @@
|
||||
import sys
|
||||
sys.path[0:0] = [""]
|
||||
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
import jinja2
|
||||
|
||||
|
||||
class TemplateFilterTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
def test_jinja2(self):
|
||||
env = jinja2.Environment()
|
||||
|
||||
class TestData(Document):
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
|
||||
TestData.drop_collection()
|
||||
|
||||
examples = [('A', '1'),
|
||||
('B', '2'),
|
||||
('C', '3')]
|
||||
|
||||
for title, description in examples:
|
||||
TestData(title=title, description=description).save()
|
||||
|
||||
tmpl = """
|
||||
{%- for record in content -%}
|
||||
{%- if loop.first -%}{ {%- endif -%}
|
||||
"{{ record.title }}": "{{ record.description }}"
|
||||
{%- if loop.last -%} }{%- else -%},{% endif -%}
|
||||
{%- endfor -%}
|
||||
"""
|
||||
ctx = {'content': TestData.objects}
|
||||
template = env.from_string(tmpl)
|
||||
rendered = template.render(**ctx)
|
||||
|
||||
self.assertEqual('{"A": "1","B": "2","C": "3"}', rendered)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@@ -1,17 +1,33 @@
|
||||
import sys
|
||||
|
||||
sys.path[0:0] = [""]
|
||||
import unittest
|
||||
|
||||
import pymongo
|
||||
from pymongo import ReadPreference, ReplicaSetConnection
|
||||
from pymongo import ReadPreference
|
||||
|
||||
from mongoengine.python_support import IS_PYMONGO_3
|
||||
|
||||
if IS_PYMONGO_3:
|
||||
from pymongo import MongoClient
|
||||
CONN_CLASS = MongoClient
|
||||
READ_PREF = ReadPreference.SECONDARY
|
||||
else:
|
||||
from pymongo import ReplicaSetConnection
|
||||
CONN_CLASS = ReplicaSetConnection
|
||||
READ_PREF = ReadPreference.SECONDARY_ONLY
|
||||
|
||||
import mongoengine
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db, get_connection, ConnectionError
|
||||
from mongoengine.connection import ConnectionError
|
||||
|
||||
|
||||
class ConnectionTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
mongoengine.connection._connection_settings = {}
|
||||
mongoengine.connection._connections = {}
|
||||
mongoengine.connection._dbs = {}
|
||||
|
||||
def tearDown(self):
|
||||
mongoengine.connection._connection_settings = {}
|
||||
mongoengine.connection._connections = {}
|
||||
@@ -22,14 +38,17 @@ class ConnectionTest(unittest.TestCase):
|
||||
"""
|
||||
|
||||
try:
|
||||
conn = connect(db='mongoenginetest', host="mongodb://localhost/mongoenginetest?replicaSet=rs", read_preference=ReadPreference.SECONDARY_ONLY)
|
||||
conn = connect(db='mongoenginetest',
|
||||
host="mongodb://localhost/mongoenginetest?replicaSet=rs",
|
||||
read_preference=READ_PREF)
|
||||
except ConnectionError, e:
|
||||
return
|
||||
|
||||
if not isinstance(conn, ReplicaSetConnection):
|
||||
if not isinstance(conn, CONN_CLASS):
|
||||
# really???
|
||||
return
|
||||
|
||||
self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_ONLY)
|
||||
self.assertEqual(conn.read_preference, READ_PREF)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -279,5 +279,33 @@ class SignalTests(unittest.TestCase):
|
||||
# second time, it must be an update
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is updated'])
|
||||
|
||||
def test_signals_with_switch_collection(self):
|
||||
ei = self.ExplicitId(id=123)
|
||||
ei.switch_collection("explicit__1")
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
ei.switch_collection("explicit__1")
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is updated'])
|
||||
|
||||
ei.switch_collection("explicit__1", keep_created=False)
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
ei.switch_collection("explicit__1", keep_created=False)
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
|
||||
def test_signals_with_switch_db(self):
|
||||
connect('mongoenginetest')
|
||||
register_connection('testdb-1', 'mongoenginetest2')
|
||||
|
||||
ei = self.ExplicitId(id=123)
|
||||
ei.switch_db("testdb-1")
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
ei.switch_db("testdb-1")
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is updated'])
|
||||
|
||||
ei.switch_db("testdb-1", keep_created=False)
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
ei.switch_db("testdb-1", keep_created=False)
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
12
tox.ini
Normal file
12
tox.ini
Normal file
@@ -0,0 +1,12 @@
|
||||
[tox]
|
||||
envlist = {py26,py27,py32,py33,py34,pypy,pypy3}-{mg27,mg28}
|
||||
#envlist = {py26,py27,py32,py33,py34,pypy,pypy3}-{mg27,mg28,mg30,mgdev}
|
||||
|
||||
[testenv]
|
||||
commands =
|
||||
python setup.py nosetests {posargs}
|
||||
deps =
|
||||
mg27: PyMongo<2.8
|
||||
mg28: PyMongo>=2.8,<3.0
|
||||
mg30: PyMongo>=3.0
|
||||
mgdev: https://github.com/mongodb/mongo-python-driver/tarball/master
|
Reference in New Issue
Block a user