Compare commits
400 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
3070e0bf5d | ||
|
83c11a9834 | ||
|
5c912b930e | ||
|
1b17fb0ae7 | ||
|
d83e67c121 | ||
|
ae39ed94c9 | ||
|
1e51180d42 | ||
|
87ba69d02e | ||
|
8879d5560b | ||
|
c1621ee39c | ||
|
b0aa98edb4 | ||
|
a7a2fe0216 | ||
|
8e50f5fa3c | ||
|
31793520bf | ||
|
0b6b0368c5 | ||
|
d1d30a9280 | ||
|
420c6f2d1e | ||
|
34f06c4971 | ||
|
9cc4bbd49d | ||
|
f66b312869 | ||
|
2405ba8708 | ||
|
a91b6bff8b | ||
|
450dc11a68 | ||
|
1ce2f84ce5 | ||
|
f55b241cfa | ||
|
34d08ce8ef | ||
|
4f5aa8c43b | ||
|
27b375060d | ||
|
cbfdc401f7 | ||
|
b58bf3e0ce | ||
|
1fff7e9aca | ||
|
494b981b13 | ||
|
dd93995bd0 | ||
|
b3bb4add9c | ||
|
d305e71c27 | ||
|
0d92baa670 | ||
|
7a1b110f62 | ||
|
db8df057ce | ||
|
5d8ffded40 | ||
|
07f3e5356d | ||
|
1ece62f960 | ||
|
056c604dc3 | ||
|
2d08eec093 | ||
|
614b590551 | ||
|
6d90ce250a | ||
|
ea31846a19 | ||
|
e6317776c1 | ||
|
efeaba39a4 | ||
|
1a97dfd479 | ||
|
9fecf2b303 | ||
|
3d0d2f48ad | ||
|
581605e0e2 | ||
|
45d3a7f6ff | ||
|
7ca2ea0766 | ||
|
89220c142b | ||
|
c73ce3d220 | ||
|
b0f127af4e | ||
|
766d54795f | ||
|
bd41c6eea4 | ||
|
2435786713 | ||
|
9e7ea64bd2 | ||
|
89a6eee6af | ||
|
2ec1476e50 | ||
|
2d9b581f34 | ||
|
5bb63f645b | ||
|
a856c7cc37 | ||
|
26db9d8a9d | ||
|
8060179f6d | ||
|
77ebd87fed | ||
|
e4bc92235d | ||
|
27a4d83ce8 | ||
|
ece9b902f8 | ||
|
65a2f8a68b | ||
|
9c212306b8 | ||
|
1fdc7ce6bb | ||
|
0b22c140c5 | ||
|
944aa45459 | ||
|
c9842ba13a | ||
|
8840680303 | ||
|
376b9b1316 | ||
|
54bb1cb3d9 | ||
|
43468b474e | ||
|
28a957c684 | ||
|
ec5ddbf391 | ||
|
bab186e195 | ||
|
bc7e874476 | ||
|
97114b5948 | ||
|
45e015d71d | ||
|
0ff6531953 | ||
|
ba298c3cfc | ||
|
0479bea40b | ||
|
a536097804 | ||
|
bbefd0fdf9 | ||
|
2aa8b04c21 | ||
|
aeebdfec51 | ||
|
debfcdf498 | ||
|
5c4b33e8e6 | ||
|
eb54037b66 | ||
|
f48af8db3b | ||
|
97c5b957dd | ||
|
95e7397803 | ||
|
43a989978a | ||
|
27734a7c26 | ||
|
dd786d6fc4 | ||
|
be1c28fc45 | ||
|
20e41b3523 | ||
|
e07ecc5cf8 | ||
|
3360b72531 | ||
|
233b13d670 | ||
|
5bcbb4fdaa | ||
|
dbe2f5f2b8 | ||
|
ca8b58d66d | ||
|
f80f0b416f | ||
|
d7765511ee | ||
|
0240a09056 | ||
|
ab15c4eec9 | ||
|
4ce1ba81a6 | ||
|
530440b333 | ||
|
b80fda36af | ||
|
42d24263ef | ||
|
1e2797e7ce | ||
|
f7075766fc | ||
|
5647ca70bb | ||
|
2b8aa6bafc | ||
|
410443471c | ||
|
0bb9781b91 | ||
|
2769d6d7ca | ||
|
120b9433c2 | ||
|
605092bd88 | ||
|
a4a8c94374 | ||
|
0e93f6c0db | ||
|
aa2add39ad | ||
|
a928047147 | ||
|
c474ca0f13 | ||
|
88dc64653e | ||
|
5f4b70f3a9 | ||
|
51b429e5b0 | ||
|
360624eb6e | ||
|
d9d2291837 | ||
|
cbdf816232 | ||
|
2d71eb8a18 | ||
|
64d2532ce9 | ||
|
0376910f33 | ||
|
6d503119a1 | ||
|
bfae93e57e | ||
|
49a66ba81a | ||
|
a1d43fecd9 | ||
|
d0e42a4798 | ||
|
2a34358abc | ||
|
fd2bb8ea45 | ||
|
98e5daa0e0 | ||
|
ad2e119282 | ||
|
c20c30d8d1 | ||
|
66d215c9c1 | ||
|
46e088d379 | ||
|
bbdd15161a | ||
|
ea9dc8cfb8 | ||
|
6bd2ccc9bf | ||
|
56327c6b58 | ||
|
712e8a51e4 | ||
|
421f324f9e | ||
|
8fe4a70299 | ||
|
3af6d0dbfd | ||
|
e2bef076d3 | ||
|
1bf9f28f4b | ||
|
f1e7b97a93 | ||
|
8cfe13ad90 | ||
|
0f420abc8e | ||
|
3b5b715567 | ||
|
520051af25 | ||
|
7e376b40bb | ||
|
fd18a48608 | ||
|
64860c6287 | ||
|
58635b24ba | ||
|
3ec9dfc108 | ||
|
bd1572f11a | ||
|
540a0cc59c | ||
|
83eb4f6b16 | ||
|
95c58bd793 | ||
|
65591c7727 | ||
|
737cbf5f60 | ||
|
4c67cbb4b7 | ||
|
ed2cc2a60b | ||
|
859e9b3cc4 | ||
|
c34e79fad9 | ||
|
82446d641e | ||
|
9451c9f331 | ||
|
61411bb259 | ||
|
fcdb0eff8f | ||
|
30d9347272 | ||
|
7564bbdee8 | ||
|
69251e5000 | ||
|
6ecdc7b59d | ||
|
b7d0d8f0cc | ||
|
df52ed1162 | ||
|
aa6370dd5d | ||
|
c272b7901f | ||
|
c61de6540a | ||
|
3c7bf50089 | ||
|
32fc4152a7 | ||
|
bdf7187d5c | ||
|
1639576203 | ||
|
ae20c785ea | ||
|
a2eb876f8c | ||
|
5a1eaa0a98 | ||
|
398fd4a548 | ||
|
44b9fb66e1 | ||
|
2afa2171f9 | ||
|
1d7ea71c0d | ||
|
2a391f0f16 | ||
|
e9b8093dac | ||
|
6a229cfbc5 | ||
|
3300f409ba | ||
|
4466005363 | ||
|
296ef5bddf | ||
|
1f2a432e82 | ||
|
855933ab2a | ||
|
ece8d25187 | ||
|
589a720162 | ||
|
a59b518cf2 | ||
|
a15352a4f8 | ||
|
df65f3fc3f | ||
|
734986c1b5 | ||
|
4a9ed5f2f2 | ||
|
088f229865 | ||
|
cb2cb851e2 | ||
|
d3962c4f7d | ||
|
0301135f96 | ||
|
f59aa922ea | ||
|
f60a49d6f6 | ||
|
9a190eb00d | ||
|
6bad4bd415 | ||
|
50d9b0b796 | ||
|
12f884e3ac | ||
|
02b1aa7355 | ||
|
90bfa608dd | ||
|
13f38b1c1d | ||
|
1afe7240f4 | ||
|
7a41155178 | ||
|
39a20ea471 | ||
|
d8855a4a0f | ||
|
de8da78042 | ||
|
318b42dff2 | ||
|
0018674b62 | ||
|
82913e8d69 | ||
|
0d867a108d | ||
|
5ee4b4a5ac | ||
|
62219d9648 | ||
|
6d9bfff19c | ||
|
7614b92197 | ||
|
7c1afd0031 | ||
|
ca7b2371fb | ||
|
ed5fba6b0f | ||
|
2b3b3bf652 | ||
|
11daf706df | ||
|
4a269eb2c4 | ||
|
9b3899476c | ||
|
febb3d7e3d | ||
|
83e3c5c7d8 | ||
|
3c271845c9 | ||
|
56c4292164 | ||
|
2531ade3bb | ||
|
3e2f035400 | ||
|
e7bcb5e366 | ||
|
112e921ce2 | ||
|
216f15602b | ||
|
fbe1901e65 | ||
|
8d2bc444bb | ||
|
cf4a45da11 | ||
|
be78209f94 | ||
|
45b5bf73fe | ||
|
84f9e44b6c | ||
|
700bc1b4bb | ||
|
beef2ede25 | ||
|
9bfc838029 | ||
|
e9d7353294 | ||
|
a6948771d8 | ||
|
403977cd49 | ||
|
153538cef9 | ||
|
9f1196e982 | ||
|
6419a8d09a | ||
|
769cee3d64 | ||
|
fc460b775e | ||
|
ba59e498de | ||
|
939bd2bb1f | ||
|
e231f71b4a | ||
|
d06c5f036b | ||
|
071562d755 | ||
|
391f659af1 | ||
|
8a44232bfc | ||
|
9188f9bf62 | ||
|
0187a0e113 | ||
|
beacfae400 | ||
|
fdc385ea33 | ||
|
8b97808931 | ||
|
179c4a10c8 | ||
|
6cef571bfb | ||
|
fbe8b28b2e | ||
|
a8d91a56bf | ||
|
8d7291506e | ||
|
d9005ac2fc | ||
|
c775c0a80c | ||
|
700e2cd93d | ||
|
083f00be84 | ||
|
d00859ecfd | ||
|
4e73566c11 | ||
|
208a467b24 | ||
|
e1bb453f32 | ||
|
4607b08be5 | ||
|
aa5c776f3d | ||
|
0075c0a1e8 | ||
|
83fff80b0f | ||
|
5e553ffaf7 | ||
|
6d185b7f7a | ||
|
e80144e9f2 | ||
|
fa4b820931 | ||
|
63c5a4dd65 | ||
|
34646a414c | ||
|
5aeee9deb2 | ||
|
4c1509a62a | ||
|
bfdaae944d | ||
|
4e44198bbd | ||
|
a4e8177b76 | ||
|
81bf5cb78b | ||
|
a9fc476fb8 | ||
|
26f0c06624 | ||
|
59bd72a888 | ||
|
7d808b483e | ||
|
3ee60affa9 | ||
|
558b8123b5 | ||
|
ecdf2ae5c7 | ||
|
aa9ed614ad | ||
|
1acdb880fc | ||
|
7cd22aaf83 | ||
|
5eb63cfa30 | ||
|
5dc998ed52 | ||
|
8074094568 | ||
|
56d1139d71 | ||
|
165cdc8840 | ||
|
c42aef74de | ||
|
634e1f661f | ||
|
a1db437c42 | ||
|
b8e2bdc99f | ||
|
52d4ea7d78 | ||
|
7db5335420 | ||
|
62480fe940 | ||
|
3d7b30da77 | ||
|
8e87648d53 | ||
|
f842c90007 | ||
|
7f2b686ab5 | ||
|
b09c52fc7e | ||
|
202d6e414f | ||
|
3d817f145c | ||
|
181e191fee | ||
|
79ecf027dd | ||
|
76d771d20f | ||
|
4d5f602ee7 | ||
|
452bbcc19b | ||
|
24b8650026 | ||
|
269e6e29d6 | ||
|
c4b0002ddb | ||
|
53598781b8 | ||
|
0624cdd6e4 | ||
|
5fb9d61d28 | ||
|
7b1860d17b | ||
|
8797565606 | ||
|
3d97c41fe9 | ||
|
5edfeb2e29 | ||
|
268908b3b2 | ||
|
fb70b47acb | ||
|
219d316b49 | ||
|
3aa2233b5d | ||
|
d59862ae6e | ||
|
0a03f9a31a | ||
|
dca135190a | ||
|
aedcf3dc81 | ||
|
6961a9494f | ||
|
6d70ef1a08 | ||
|
e1fc15875d | ||
|
94ae1388b1 | ||
|
17728d4e74 | ||
|
417aa743ca | ||
|
2f26f7a827 | ||
|
09f9c59b3d | ||
|
bec6805296 | ||
|
d99c7c20cc | ||
|
60b6ad3fcf | ||
|
9b4d0f6450 | ||
|
1a2c74391c | ||
|
08288e591c | ||
|
823cf421fa | ||
|
3799f27734 | ||
|
a7edd8602c | ||
|
c081aca794 | ||
|
2ca6648227 | ||
|
1af54f93f5 | ||
|
a9cacd2e06 | ||
|
f7fbb3d2f6 | ||
|
adb7bbeea0 | ||
|
b91db87ae0 |
8
.gitignore
vendored
8
.gitignore
vendored
@@ -1,7 +1,8 @@
|
||||
.*
|
||||
!.gitignore
|
||||
*.pyc
|
||||
.*.swp
|
||||
*~
|
||||
*.py[co]
|
||||
.*.sw[po]
|
||||
*.egg
|
||||
docs/.build
|
||||
docs/_build
|
||||
@@ -12,4 +13,5 @@ env/
|
||||
.settings
|
||||
.project
|
||||
.pydevproject
|
||||
tests/bugfix.py
|
||||
tests/test_bugfix.py
|
||||
htmlcov/
|
12
.travis.yml
Normal file
12
.travis.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
# http://travis-ci.org/#!/MongoEngine/mongoengine
|
||||
language: python
|
||||
python:
|
||||
- 2.6
|
||||
- 2.7
|
||||
install:
|
||||
- sudo apt-get install zlib1g zlib1g-dev
|
||||
- sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/
|
||||
- pip install PIL --use-mirrors ; true
|
||||
- python setup.py install
|
||||
script:
|
||||
- python setup.py test
|
49
AUTHORS
49
AUTHORS
@@ -1,11 +1,11 @@
|
||||
The PRIMARY AUTHORS are (and/or have been):
|
||||
|
||||
Ross Lawley <ross.lawley@gmail.com>
|
||||
Harry Marr <harry@hmarr.com>
|
||||
Matt Dennewitz <mattdennewitz@gmail.com>
|
||||
Deepak Thukral <iapain@yahoo.com>
|
||||
Florian Schlachter <flori@n-schlachter.de>
|
||||
Steve Challis <steve@stevechallis.com>
|
||||
Ross Lawley <ross.lawley@gmail.com>
|
||||
Wilson Júnior <wilsonpjunior@gmail.com>
|
||||
Dan Crosta https://github.com/dcrosta
|
||||
|
||||
@@ -67,3 +67,50 @@ that much better:
|
||||
* Gareth Lloyd
|
||||
* Albert Choi
|
||||
* John Arnfield
|
||||
* grubberr
|
||||
* Paul Aliagas
|
||||
* Paul Cunnane
|
||||
* Julien Rebetez
|
||||
* Marc Tamlyn
|
||||
* Karim Allah
|
||||
* Adam Parrish
|
||||
* jpfarias
|
||||
* jonrscott
|
||||
* Alice Zoë Bevan-McGregor
|
||||
* Stephen Young
|
||||
* tkloc
|
||||
* aid
|
||||
* yamaneko1212
|
||||
* dave mankoff
|
||||
* Alexander G. Morano
|
||||
* jwilder
|
||||
* Joe Shaw
|
||||
* Adam Flynn
|
||||
* Ankhbayar
|
||||
* Jan Schrewe
|
||||
* David Koblas
|
||||
* Crittercism
|
||||
* Alvin Liang
|
||||
* andrewmlevy
|
||||
* Chris Faulkner
|
||||
* Ashwin Purohit
|
||||
* Shalabh Aggarwal
|
||||
* Chris Williams
|
||||
* Robert Kajic
|
||||
* Jacob Peddicord
|
||||
* Nils Hasenbanck
|
||||
* mostlystatic
|
||||
* Greg Banks
|
||||
* swashbuckler
|
||||
* Adam Reeve
|
||||
* Anthony Nemitz
|
||||
* deignacio
|
||||
* shaunduncan
|
||||
* Meir Kriheli
|
||||
* Andrey Fedoseev
|
||||
* aparajita
|
||||
* Tristan Escalada
|
||||
* Alexander Koshelev
|
||||
* Jaime Irurzun
|
||||
* Alexandre González
|
||||
* Thomas Steinacher
|
25
README.rst
25
README.rst
@@ -3,25 +3,29 @@ MongoEngine
|
||||
===========
|
||||
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
||||
:Author: Harry Marr (http://github.com/hmarr)
|
||||
:Maintainer: Ross Lawley (http://github.com/rozza)
|
||||
|
||||
.. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master
|
||||
:target: http://travis-ci.org/MongoEngine/mongoengine
|
||||
|
||||
About
|
||||
=====
|
||||
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
||||
Documentation available at http://hmarr.com/mongoengine/ - there is currently
|
||||
a `tutorial <http://hmarr.com/mongoengine/tutorial.html>`_, a `user guide
|
||||
<http://hmarr.com/mongoengine/userguide.html>`_ and an `API reference
|
||||
<http://hmarr.com/mongoengine/apireference.html>`_.
|
||||
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
||||
Documentation available at http://mongoengine-odm.rtfd.org - there is currently
|
||||
a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide
|
||||
<http://readthedocs.org/docs/mongoengine-odm/en/latest/userguide.html>`_ and an `API reference
|
||||
<http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_.
|
||||
|
||||
Installation
|
||||
============
|
||||
If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
||||
you can use ``easy_install -U mongoengine``. Otherwise, you can download the
|
||||
source from `GitHub <http://github.com/hmarr/mongoengine>`_ and run ``python
|
||||
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
|
||||
setup.py install``.
|
||||
|
||||
Dependencies
|
||||
============
|
||||
- pymongo 1.1+
|
||||
- pymongo 2.1.1+
|
||||
- sphinx (optional - for documentation generation)
|
||||
|
||||
Examples
|
||||
@@ -84,14 +88,15 @@ the standard port, and run ``python setup.py test``.
|
||||
|
||||
Community
|
||||
=========
|
||||
- `MongoEngine Users mailing list
|
||||
- `MongoEngine Users mailing list
|
||||
<http://groups.google.com/group/mongoengine-users>`_
|
||||
- `MongoEngine Developers mailing list
|
||||
- `MongoEngine Developers mailing list
|
||||
<http://groups.google.com/group/mongoengine-dev>`_
|
||||
- `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_
|
||||
|
||||
Contributing
|
||||
============
|
||||
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ - to
|
||||
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to
|
||||
contribute to the project, fork it on GitHub and send a pull request, all
|
||||
contributions and suggestions are welcome!
|
||||
|
||||
|
182
benchmark.py
Normal file
182
benchmark.py
Normal file
@@ -0,0 +1,182 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import timeit
|
||||
|
||||
|
||||
def cprofile_main():
|
||||
from pymongo import Connection
|
||||
connection = Connection()
|
||||
connection.drop_database('timeit_test')
|
||||
connection.disconnect()
|
||||
|
||||
from mongoengine import Document, DictField, connect
|
||||
connect("timeit_test")
|
||||
|
||||
class Noddy(Document):
|
||||
fields = DictField()
|
||||
|
||||
for i in xrange(1):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key" + str(j)] = "value " + str(j)
|
||||
noddy.save()
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
0.4 Performance Figures ...
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo
|
||||
1.1141769886
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine
|
||||
2.37724113464
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||
1.92479610443
|
||||
|
||||
0.5.X
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo
|
||||
1.10552310944
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine
|
||||
16.5169169903
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||
14.9446101189
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||
14.912801981
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, force=True
|
||||
14.9617750645
|
||||
|
||||
Performance
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo
|
||||
1.10072994232
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine
|
||||
5.27341103554
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||
4.49365401268
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||
4.43459296227
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, force=True
|
||||
4.40114378929
|
||||
"""
|
||||
|
||||
setup = """
|
||||
from pymongo import Connection
|
||||
connection = Connection()
|
||||
connection.drop_database('timeit_test')
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
from pymongo import Connection
|
||||
connection = Connection()
|
||||
|
||||
db = connection.timeit_test
|
||||
noddy = db.noddy
|
||||
|
||||
for i in xrange(10000):
|
||||
example = {'fields': {}}
|
||||
for j in range(20):
|
||||
example['fields']["key"+str(j)] = "value "+str(j)
|
||||
|
||||
noddy.insert(example)
|
||||
|
||||
myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print "-" * 100
|
||||
print """Creating 10000 dictionaries - Pymongo"""
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print t.timeit(1)
|
||||
|
||||
setup = """
|
||||
from pymongo import Connection
|
||||
connection = Connection()
|
||||
connection.drop_database('timeit_test')
|
||||
connection.disconnect()
|
||||
|
||||
from mongoengine import Document, DictField, connect
|
||||
connect("timeit_test")
|
||||
|
||||
class Noddy(Document):
|
||||
fields = DictField()
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
for i in xrange(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save()
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print "-" * 100
|
||||
print """Creating 10000 dictionaries - MongoEngine"""
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print t.timeit(1)
|
||||
|
||||
stmt = """
|
||||
for i in xrange(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(safe=False, validate=False)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print "-" * 100
|
||||
print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False"""
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print t.timeit(1)
|
||||
|
||||
|
||||
stmt = """
|
||||
for i in xrange(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(safe=False, validate=False, cascade=False)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print "-" * 100
|
||||
print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False"""
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print t.timeit(1)
|
||||
|
||||
stmt = """
|
||||
for i in xrange(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(force_insert=True, safe=False, validate=False, cascade=False)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print "-" * 100
|
||||
print """Creating 10000 dictionaries - MongoEngine, force=True"""
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print t.timeit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@@ -6,6 +6,7 @@ Connecting
|
||||
==========
|
||||
|
||||
.. autofunction:: mongoengine.connect
|
||||
.. autofunction:: mongoengine.register_connection
|
||||
|
||||
Documents
|
||||
=========
|
||||
@@ -21,9 +22,18 @@ Documents
|
||||
.. autoclass:: mongoengine.EmbeddedDocument
|
||||
:members:
|
||||
|
||||
.. autoclass:: mongoengine.DynamicDocument
|
||||
:members:
|
||||
|
||||
.. autoclass:: mongoengine.DynamicEmbeddedDocument
|
||||
:members:
|
||||
|
||||
.. autoclass:: mongoengine.document.MapReduceDocument
|
||||
:members:
|
||||
|
||||
.. autoclass:: mongoengine.ValidationError
|
||||
:members:
|
||||
|
||||
Querying
|
||||
========
|
||||
|
||||
@@ -37,25 +47,28 @@ Querying
|
||||
Fields
|
||||
======
|
||||
|
||||
.. autoclass:: mongoengine.StringField
|
||||
.. autoclass:: mongoengine.URLField
|
||||
.. autoclass:: mongoengine.EmailField
|
||||
.. autoclass:: mongoengine.IntField
|
||||
.. autoclass:: mongoengine.FloatField
|
||||
.. autoclass:: mongoengine.DecimalField
|
||||
.. autoclass:: mongoengine.DateTimeField
|
||||
.. autoclass:: mongoengine.BinaryField
|
||||
.. autoclass:: mongoengine.BooleanField
|
||||
.. autoclass:: mongoengine.ComplexDateTimeField
|
||||
.. autoclass:: mongoengine.ListField
|
||||
.. autoclass:: mongoengine.SortedListField
|
||||
.. autoclass:: mongoengine.DateTimeField
|
||||
.. autoclass:: mongoengine.DecimalField
|
||||
.. autoclass:: mongoengine.DictField
|
||||
.. autoclass:: mongoengine.DynamicField
|
||||
.. autoclass:: mongoengine.EmailField
|
||||
.. autoclass:: mongoengine.EmbeddedDocumentField
|
||||
.. autoclass:: mongoengine.FileField
|
||||
.. autoclass:: mongoengine.FloatField
|
||||
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
|
||||
.. autoclass:: mongoengine.GenericReferenceField
|
||||
.. autoclass:: mongoengine.GeoPointField
|
||||
.. autoclass:: mongoengine.ImageField
|
||||
.. autoclass:: mongoengine.IntField
|
||||
.. autoclass:: mongoengine.ListField
|
||||
.. autoclass:: mongoengine.MapField
|
||||
.. autoclass:: mongoengine.ObjectIdField
|
||||
.. autoclass:: mongoengine.ReferenceField
|
||||
.. autoclass:: mongoengine.GenericReferenceField
|
||||
.. autoclass:: mongoengine.EmbeddedDocumentField
|
||||
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
|
||||
.. autoclass:: mongoengine.BooleanField
|
||||
.. autoclass:: mongoengine.FileField
|
||||
.. autoclass:: mongoengine.BinaryField
|
||||
.. autoclass:: mongoengine.GeoPointField
|
||||
.. autoclass:: mongoengine.SequenceField
|
||||
.. autoclass:: mongoengine.SortedListField
|
||||
.. autoclass:: mongoengine.StringField
|
||||
.. autoclass:: mongoengine.URLField
|
||||
.. autoclass:: mongoengine.UUIDField
|
||||
|
@@ -2,10 +2,192 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
|
||||
Changes in 0.6.16
|
||||
=================
|
||||
- Fixed issue where db_alias wasn't inherited
|
||||
|
||||
Changes in 0.6.15
|
||||
=================
|
||||
- Updated validation error messages
|
||||
- Added support for null / zero / false values in item_frequencies
|
||||
- Fixed cascade save edge case
|
||||
- Fixed geo index creation through reference fields
|
||||
- Added support for args / kwargs when using @queryset_manager
|
||||
- Deref list custom id fix
|
||||
|
||||
Changes in 0.6.14
|
||||
=================
|
||||
- Fixed error dict with nested validation
|
||||
- Fixed Int/Float fields and not equals None
|
||||
- Exclude tests from installation
|
||||
- Allow tuples for index meta
|
||||
- Fixed use of str in instance checks
|
||||
- Fixed unicode support in transform update
|
||||
- Added support for add_to_set and each
|
||||
|
||||
Changes in 0.6.13
|
||||
================
|
||||
- Fixed EmbeddedDocument db_field validation issue
|
||||
- Fixed StringField unicode issue
|
||||
- Fixes __repr__ modifying the cursor
|
||||
|
||||
Changes in 0.6.12
|
||||
=================
|
||||
- Fixes scalar lookups for primary_key
|
||||
- Fixes error with _delta handling DBRefs
|
||||
|
||||
Changes in 0.6.11
|
||||
==================
|
||||
- Fixed inconsistency handling None values field attrs
|
||||
- Fixed map_field embedded db_field issue
|
||||
- Fixed .save() _delta issue with DbRefs
|
||||
- Fixed Django TestCase
|
||||
- Added cmp to Embedded Document
|
||||
- Added PULL reverse_delete_rule
|
||||
- Fixed CASCADE delete bug
|
||||
- Fixed db_field data load error
|
||||
- Fixed recursive save with FileField
|
||||
|
||||
Changes in 0.6.10
|
||||
=================
|
||||
- Fixed basedict / baselist to return super(..)
|
||||
- Promoted BaseDynamicField to DynamicField
|
||||
|
||||
Changes in 0.6.9
|
||||
================
|
||||
- Fixed sparse indexes on inherited docs
|
||||
- Removed FileField auto deletion, needs more work maybe 0.7
|
||||
|
||||
Changes in 0.6.8
|
||||
================
|
||||
- Fixed FileField losing reference when no default set
|
||||
- Removed possible race condition from FileField (grid_file)
|
||||
- Added assignment to save, can now do: b = MyDoc(**kwargs).save()
|
||||
- Added support for pull operations on nested EmbeddedDocuments
|
||||
- Added support for choices with GenericReferenceFields
|
||||
- Added support for choices with GenericEmbeddedDocumentFields
|
||||
- Fixed Django 1.4 sessions first save data loss
|
||||
- FileField now automatically delete files on .delete()
|
||||
- Fix for GenericReference to_mongo method
|
||||
- Fixed connection regression
|
||||
- Updated Django User document, now allows inheritance
|
||||
|
||||
Changes in 0.6.7
|
||||
================
|
||||
- Fixed indexing on '_id' or 'pk' or 'id'
|
||||
- Invalid data from the DB now raises a InvalidDocumentError
|
||||
- Cleaned up the Validation Error - docs and code
|
||||
- Added meta `auto_create_index` so you can disable index creation
|
||||
- Added write concern options to inserts
|
||||
- Fixed typo in meta for index options
|
||||
- Bug fix Read preference now passed correctly
|
||||
- Added support for File like objects for GridFS
|
||||
- Fix for #473 - Dereferencing abstracts
|
||||
|
||||
Changes in 0.6.6
|
||||
================
|
||||
- Django 1.4 fixed (finally)
|
||||
- Added tests for Django
|
||||
|
||||
Changes in 0.6.5
|
||||
================
|
||||
- More Django updates
|
||||
|
||||
Changes in 0.6.4
|
||||
================
|
||||
|
||||
- Refactored connection / fixed replicasetconnection
|
||||
- Bug fix for unknown connection alias error message
|
||||
- Sessions support Django 1.3 and Django 1.4
|
||||
- Minor fix for ReferenceField
|
||||
|
||||
Changes in 0.6.3
|
||||
================
|
||||
- Updated sessions for Django 1.4
|
||||
- Bug fix for updates where listfields contain embedded documents
|
||||
- Bug fix for collection naming and mixins
|
||||
|
||||
Changes in 0.6.2
|
||||
================
|
||||
- Updated documentation for ReplicaSet connections
|
||||
- Hack round _types issue with SERVER-5247 - querying other arrays may also cause problems.
|
||||
|
||||
Changes in 0.6.1
|
||||
================
|
||||
- Fix for replicaSet connections
|
||||
|
||||
Changes in 0.6
|
||||
================
|
||||
|
||||
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
|
||||
- Added support for covered indexes when inheritance is off
|
||||
- No longer always upsert on save for items with a '_id'
|
||||
- Error raised if update doesn't have an operation
|
||||
- DeReferencing is now thread safe
|
||||
- Errors raised if trying to perform a join in a query
|
||||
- Updates can now take __raw__ queries
|
||||
- Added custom 2D index declarations
|
||||
- Added replicaSet connection support
|
||||
- Updated deprecated imports from pymongo (safe for pymongo 2.2)
|
||||
- Added uri support for connections
|
||||
- Added scalar for efficiently returning partial data values (aliased to values_list)
|
||||
- Fixed limit skip bug
|
||||
- Improved Inheritance / Mixin
|
||||
- Added sharding support
|
||||
- Added pymongo 2.1 support
|
||||
- Fixed Abstract documents can now declare indexes
|
||||
- Added db_alias support to individual documents
|
||||
- Fixed GridFS documents can now be pickled
|
||||
- Added Now raises an InvalidDocumentError when declaring multiple fields with the same db_field
|
||||
- Added InvalidQueryError when calling with_id with a filter
|
||||
- Added support for DBRefs in distinct()
|
||||
- Fixed issue saving False booleans
|
||||
- Fixed issue with dynamic documents deltas
|
||||
- Added Reverse Delete Rule support to ListFields - MapFields aren't supported
|
||||
- Added customisable cascade kwarg options
|
||||
- Fixed Handle None values for non-required fields
|
||||
- Removed Document._get_subclasses() - no longer required
|
||||
- Fixed bug requiring subclasses when not actually needed
|
||||
- Fixed deletion of dynamic data
|
||||
- Added support for the $elementMatch operator
|
||||
- Added reverse option to SortedListFields
|
||||
- Fixed dereferencing - multi directional list dereferencing
|
||||
- Fixed issue creating indexes with recursive embedded documents
|
||||
- Fixed recursive lookup in _unique_with_indexes
|
||||
- Fixed passing ComplexField defaults to constructor for ReferenceFields
|
||||
- Fixed validation of DictField Int keys
|
||||
- Added optional cascade saving
|
||||
- Fixed dereferencing - max_depth now taken into account
|
||||
- Fixed document mutation saving issue
|
||||
- Fixed positional operator when replacing embedded documents
|
||||
- Added Non-Django Style choices back (you can have either)
|
||||
- Fixed __repr__ of a sliced queryset
|
||||
- Added recursive validation error of documents / complex fields
|
||||
- Fixed breaking during queryset iteration
|
||||
- Added pre and post bulk-insert signals
|
||||
- Added ImageField - requires PIL
|
||||
- Fixed Reference Fields can be None in get_or_create / queries
|
||||
- Fixed accessing pk on an embedded document
|
||||
- Fixed calling a queryset after drop_collection now recreates the collection
|
||||
- Add field name to validation exception messages
|
||||
- Added UUID field
|
||||
- Improved efficiency of .get()
|
||||
- Updated ComplexFields so if required they won't accept empty lists / dicts
|
||||
- Added spec file for rpm-based distributions
|
||||
- Fixed ListField so it doesnt accept strings
|
||||
- Added DynamicDocument and EmbeddedDynamicDocument classes for expando schemas
|
||||
|
||||
Changes in v0.5.2
|
||||
=================
|
||||
|
||||
- A Robust Circular reference bugfix
|
||||
|
||||
|
||||
Changes in v0.5.1
|
||||
=================
|
||||
|
||||
- Circular reference bugfix
|
||||
- Fixed simple circular reference bug
|
||||
|
||||
Changes in v0.5
|
||||
===============
|
||||
|
@@ -38,7 +38,7 @@ master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'MongoEngine'
|
||||
copyright = u'2009-2011, Harry Marr'
|
||||
copyright = u'2009-2012, MongoEngine Authors'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
@@ -121,7 +121,7 @@ html_theme_path = ['_themes']
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
#html_static_path = ['_static']
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
|
@@ -2,19 +2,21 @@
|
||||
Using MongoEngine with Django
|
||||
=============================
|
||||
|
||||
.. note :: Updated to support Django 1.4
|
||||
|
||||
Connecting
|
||||
==========
|
||||
In your **settings.py** file, ignore the standard database settings (unless you
|
||||
also plan to use the ORM in your project), and instead call
|
||||
also plan to use the ORM in your project), and instead call
|
||||
:func:`~mongoengine.connect` somewhere in the settings module.
|
||||
|
||||
Authentication
|
||||
==============
|
||||
MongoEngine includes a Django authentication backend, which uses MongoDB. The
|
||||
:class:`~mongoengine.django.auth.User` model is a MongoEngine
|
||||
:class:`~mongoengine.Document`, but implements most of the methods and
|
||||
:class:`~mongoengine.django.auth.User` model is a MongoEngine
|
||||
:class:`~mongoengine.Document`, but implements most of the methods and
|
||||
attributes that the standard Django :class:`User` model does - so the two are
|
||||
moderately compatible. Using this backend will allow you to store users in
|
||||
moderately compatible. Using this backend will allow you to store users in
|
||||
MongoDB but still use many of the Django authentication infrastucture (such as
|
||||
the :func:`login_required` decorator and the :func:`authenticate` function). To
|
||||
enable the MongoEngine auth backend, add the following to you **settings.py**
|
||||
@@ -24,7 +26,7 @@ file::
|
||||
'mongoengine.django.auth.MongoEngineBackend',
|
||||
)
|
||||
|
||||
The :mod:`~mongoengine.django.auth` module also contains a
|
||||
The :mod:`~mongoengine.django.auth` module also contains a
|
||||
:func:`~mongoengine.django.auth.get_user` helper function, that takes a user's
|
||||
:attr:`id` and returns a :class:`~mongoengine.django.auth.User` object.
|
||||
|
||||
@@ -49,9 +51,9 @@ Storage
|
||||
=======
|
||||
With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`,
|
||||
it is useful to have a Django file storage backend that wraps this. The new
|
||||
storage module is called :class:`~mongoengine.django.storage.GridFSStorage`.
|
||||
storage module is called :class:`~mongoengine.django.storage.GridFSStorage`.
|
||||
Using it is very similar to using the default FileSystemStorage.::
|
||||
|
||||
|
||||
from mongoengine.django.storage import GridFSStorage
|
||||
fs = GridFSStorage()
|
||||
|
||||
|
@@ -3,6 +3,7 @@
|
||||
=====================
|
||||
Connecting to MongoDB
|
||||
=====================
|
||||
|
||||
To connect to a running instance of :program:`mongod`, use the
|
||||
:func:`~mongoengine.connect` function. The first argument is the name of the
|
||||
database to connect to. If the database does not exist, it will be created. If
|
||||
@@ -18,3 +19,47 @@ provide :attr:`host` and :attr:`port` arguments to
|
||||
:func:`~mongoengine.connect`::
|
||||
|
||||
connect('project1', host='192.168.1.35', port=12345)
|
||||
|
||||
Uri style connections are also supported as long as you include the database
|
||||
name - just supply the uri as the :attr:`host` to
|
||||
:func:`~mongoengine.connect`::
|
||||
|
||||
connect('project1', host='mongodb://localhost/database_name')
|
||||
|
||||
ReplicaSets
|
||||
===========
|
||||
|
||||
MongoEngine now supports :func:`~pymongo.replica_set_connection.ReplicaSetConnection`
|
||||
to use them please use a URI style connection and provide the `replicaSet` name in the
|
||||
connection kwargs.
|
||||
|
||||
Multiple Databases
|
||||
==================
|
||||
|
||||
Multiple database support was added in MongoEngine 0.6. To use multiple
|
||||
databases you can use :func:`~mongoengine.connect` and provide an `alias` name
|
||||
for the connection - if no `alias` is provided then "default" is used.
|
||||
|
||||
In the background this uses :func:`~mongoengine.register_connection` to
|
||||
store the data and you can register all aliases up front if required.
|
||||
|
||||
Individual documents can also support multiple databases by providing a
|
||||
`db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` objects
|
||||
to point across databases and collections. Below is an example schema, using
|
||||
3 different databases to store data::
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
meta = {"db_alias": "user-db"}
|
||||
|
||||
class Book(Document):
|
||||
name = StringField()
|
||||
|
||||
meta = {"db_alias": "book-db"}
|
||||
|
||||
class AuthorBooks(Document):
|
||||
author = ReferenceField(User)
|
||||
book = ReferenceField(Book)
|
||||
|
||||
meta = {"db_alias": "users-books-db"}
|
||||
|
@@ -24,6 +24,34 @@ objects** as class attributes to the document class::
|
||||
title = StringField(max_length=200, required=True)
|
||||
date_modified = DateTimeField(default=datetime.datetime.now)
|
||||
|
||||
Dynamic document schemas
|
||||
========================
|
||||
One of the benefits of MongoDb is dynamic schemas for a collection, whilst data
|
||||
should be planned and organised (after all explicit is better than implicit!)
|
||||
there are scenarios where having dynamic / expando style documents is desirable.
|
||||
|
||||
:class:`~mongoengine.DynamicDocument` documents work in the same way as
|
||||
:class:`~mongoengine.Document` but any data / attributes set to them will also
|
||||
be saved ::
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
class Page(DynamicDocument):
|
||||
title = StringField(max_length=200, required=True)
|
||||
|
||||
# Create a new page and add tags
|
||||
>>> page = Page(title='Using MongoEngine')
|
||||
>>> page.tags = ['mongodb', 'mongoengine']
|
||||
>>> page.save()
|
||||
|
||||
>>> Page.objects(tags='mongoengine').count()
|
||||
>>> 1
|
||||
|
||||
..note::
|
||||
|
||||
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
||||
|
||||
|
||||
Fields
|
||||
======
|
||||
By default, fields are not required. To make a field mandatory, set the
|
||||
@@ -34,28 +62,31 @@ not provided. Default values may optionally be a callable, which will be called
|
||||
to retrieve the value (such as in the above example). The field types available
|
||||
are as follows:
|
||||
|
||||
* :class:`~mongoengine.StringField`
|
||||
* :class:`~mongoengine.URLField`
|
||||
* :class:`~mongoengine.EmailField`
|
||||
* :class:`~mongoengine.IntField`
|
||||
* :class:`~mongoengine.FloatField`
|
||||
* :class:`~mongoengine.DecimalField`
|
||||
* :class:`~mongoengine.DateTimeField`
|
||||
* :class:`~mongoengine.BinaryField`
|
||||
* :class:`~mongoengine.BooleanField`
|
||||
* :class:`~mongoengine.ComplexDateTimeField`
|
||||
* :class:`~mongoengine.ListField`
|
||||
* :class:`~mongoengine.SortedListField`
|
||||
* :class:`~mongoengine.DateTimeField`
|
||||
* :class:`~mongoengine.DecimalField`
|
||||
* :class:`~mongoengine.DictField`
|
||||
* :class:`~mongoengine.DynamicField`
|
||||
* :class:`~mongoengine.EmailField`
|
||||
* :class:`~mongoengine.EmbeddedDocumentField`
|
||||
* :class:`~mongoengine.FileField`
|
||||
* :class:`~mongoengine.FloatField`
|
||||
* :class:`~mongoengine.GenericEmbeddedDocumentField`
|
||||
* :class:`~mongoengine.GenericReferenceField`
|
||||
* :class:`~mongoengine.GeoPointField`
|
||||
* :class:`~mongoengine.ImageField`
|
||||
* :class:`~mongoengine.IntField`
|
||||
* :class:`~mongoengine.ListField`
|
||||
* :class:`~mongoengine.MapField`
|
||||
* :class:`~mongoengine.ObjectIdField`
|
||||
* :class:`~mongoengine.ReferenceField`
|
||||
* :class:`~mongoengine.GenericReferenceField`
|
||||
* :class:`~mongoengine.EmbeddedDocumentField`
|
||||
* :class:`~mongoengine.GenericEmbeddedDocumentField`
|
||||
* :class:`~mongoengine.BooleanField`
|
||||
* :class:`~mongoengine.FileField`
|
||||
* :class:`~mongoengine.BinaryField`
|
||||
* :class:`~mongoengine.GeoPointField`
|
||||
* :class:`~mongoengine.SequenceField`
|
||||
* :class:`~mongoengine.SortedListField`
|
||||
* :class:`~mongoengine.StringField`
|
||||
* :class:`~mongoengine.URLField`
|
||||
* :class:`~mongoengine.UUIDField`
|
||||
|
||||
Field arguments
|
||||
---------------
|
||||
@@ -70,7 +101,7 @@ arguments can be set on all fields:
|
||||
|
||||
:attr:`required` (Default: False)
|
||||
If set to True and the field is not set on the document instance, a
|
||||
:class:`~mongoengine.base.ValidationError` will be raised when the document is
|
||||
:class:`~mongoengine.ValidationError` will be raised when the document is
|
||||
validated.
|
||||
|
||||
:attr:`default` (Default: None)
|
||||
@@ -107,12 +138,33 @@ arguments can be set on all fields:
|
||||
When True, use this field as a primary key for the collection.
|
||||
|
||||
:attr:`choices` (Default: None)
|
||||
An iterable of choices to which the value of this field should be limited.
|
||||
An iterable (e.g. a list or tuple) of choices to which the value of this
|
||||
field should be limited.
|
||||
|
||||
Can be either be a nested tuples of value (stored in mongo) and a
|
||||
human readable key ::
|
||||
|
||||
SIZE = (('S', 'Small'),
|
||||
('M', 'Medium'),
|
||||
('L', 'Large'),
|
||||
('XL', 'Extra Large'),
|
||||
('XXL', 'Extra Extra Large'))
|
||||
|
||||
|
||||
class Shirt(Document):
|
||||
size = StringField(max_length=3, choices=SIZE)
|
||||
|
||||
Or a flat iterable just containing values ::
|
||||
|
||||
SIZE = ('S', 'M', 'L', 'XL', 'XXL')
|
||||
|
||||
class Shirt(Document):
|
||||
size = StringField(max_length=3, choices=SIZE)
|
||||
|
||||
:attr:`help_text` (Default: None)
|
||||
Optional help text to output with the field - used by form libraries
|
||||
|
||||
:attr:`verbose` (Default: None)
|
||||
:attr:`verbose_name` (Default: None)
|
||||
Optional human-readable name for the field - used by form libraries
|
||||
|
||||
|
||||
@@ -240,6 +292,10 @@ Its value can take any of the following constants:
|
||||
:const:`mongoengine.CASCADE`
|
||||
Any object containing fields that are refererring to the object being deleted
|
||||
are deleted first.
|
||||
:const:`mongoengine.PULL`
|
||||
Removes the reference to the object (using MongoDB's "pull" operation)
|
||||
from any object's fields of
|
||||
:class:`~mongoengine.ListField` (:class:`~mongoengine.ReferenceField`).
|
||||
|
||||
|
||||
.. warning::
|
||||
@@ -382,10 +438,31 @@ If a dictionary is passed then the following options are available:
|
||||
:attr:`unique` (Default: False)
|
||||
Whether the index should be sparse.
|
||||
|
||||
.. note::
|
||||
.. warning::
|
||||
|
||||
Geospatial indexes will be automatically created for all
|
||||
:class:`~mongoengine.GeoPointField`\ s
|
||||
|
||||
Inheritance adds extra indices.
|
||||
If don't need inheritance for a document turn inheritance off - see :ref:`document-inheritance`.
|
||||
|
||||
|
||||
Geospatial indexes
|
||||
---------------------------
|
||||
Geospatial indexes will be automatically created for all
|
||||
:class:`~mongoengine.GeoPointField`\ s
|
||||
|
||||
It is also possible to explicitly define geospatial indexes. This is
|
||||
useful if you need to define a geospatial index on a subfield of a
|
||||
:class:`~mongoengine.DictField` or a custom field that contains a
|
||||
point. To create a geospatial index you must prefix the field with the
|
||||
***** sign. ::
|
||||
|
||||
class Place(Document):
|
||||
location = DictField()
|
||||
meta = {
|
||||
'indexes': [
|
||||
'*location.point',
|
||||
],
|
||||
}
|
||||
|
||||
Ordering
|
||||
========
|
||||
@@ -427,8 +504,31 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. ::
|
||||
first_post = BlogPost.objects.order_by("+published_date").first()
|
||||
assert first_post.title == "Blog Post #1"
|
||||
|
||||
Shard keys
|
||||
==========
|
||||
|
||||
If your collection is sharded, then you need to specify the shard key as a tuple,
|
||||
using the :attr:`shard_key` attribute of :attr:`-mongoengine.Document.meta`.
|
||||
This ensures that the shard key is sent with the query when calling the
|
||||
:meth:`~mongoengine.document.Document.save` or
|
||||
:meth:`~mongoengine.document.Document.update` method on an existing
|
||||
:class:`-mongoengine.Document` instance::
|
||||
|
||||
class LogEntry(Document):
|
||||
machine = StringField()
|
||||
app = StringField()
|
||||
timestamp = DateTimeField()
|
||||
data = StringField()
|
||||
|
||||
meta = {
|
||||
'shard_key': ('machine', 'timestamp',)
|
||||
}
|
||||
|
||||
.. _document-inheritance:
|
||||
|
||||
Document inheritance
|
||||
====================
|
||||
|
||||
To create a specialised type of a :class:`~mongoengine.Document` you have
|
||||
defined, you may subclass it and add any extra fields or methods you may need.
|
||||
As this is new class is not a direct subclass of
|
||||
@@ -440,10 +540,15 @@ convenient and efficient retrieval of related documents::
|
||||
class Page(Document):
|
||||
title = StringField(max_length=200, required=True)
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
# Also stored in the collection named 'page'
|
||||
class DatedPage(Page):
|
||||
date = DateTimeField()
|
||||
|
||||
.. note:: From 0.7 onwards you must declare `allow_inheritance` in the document meta.
|
||||
|
||||
|
||||
Working with existing data
|
||||
--------------------------
|
||||
To enable correct retrieval of documents involved in this kind of heirarchy,
|
||||
|
@@ -35,13 +35,23 @@ already exist, then any changes will be updated atomically. For example::
|
||||
* ``list_field.pop(0)`` - *sets* the resulting list
|
||||
* ``del(list_field)`` - *unsets* whole list
|
||||
|
||||
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
|
||||
Note that this will only work if the document exists in the database and has a
|
||||
valide :attr:`id`.
|
||||
|
||||
.. seealso::
|
||||
:ref:`guide-atomic-updates`
|
||||
|
||||
Cascading Saves
|
||||
---------------
|
||||
If your document contains :class:`~mongoengine.ReferenceField` or
|
||||
:class:`~mongoengine.GenericReferenceField` objects, then by default the
|
||||
:meth:`~mongoengine.Document.save` method will automatically save any changes to
|
||||
those objects as well. If this is not desired passing :attr:`cascade` as False
|
||||
to the save method turns this feature off.
|
||||
|
||||
Deleting documents
|
||||
------------------
|
||||
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
|
||||
Note that this will only work if the document exists in the database and has a
|
||||
valid :attr:`id`.
|
||||
|
||||
Document IDs
|
||||
============
|
||||
Each document in the database has a unique id. This may be accessed through the
|
||||
@@ -81,5 +91,5 @@ is an alias to :attr:`id`::
|
||||
.. note::
|
||||
|
||||
If you define your own primary key field, the field implicitly becomes
|
||||
required, so a :class:`ValidationError` will be thrown if you don't provide
|
||||
it.
|
||||
required, so a :class:`~mongoengine.ValidationError` will be thrown if
|
||||
you don't provide it.
|
||||
|
@@ -65,7 +65,7 @@ Deleting stored files is achieved with the :func:`delete` method::
|
||||
|
||||
marmot.photo.delete()
|
||||
|
||||
.. note::
|
||||
.. warning::
|
||||
|
||||
The FileField in a Document actually only stores the ID of a file in a
|
||||
separate GridFS collection. This means that deleting a document
|
||||
|
@@ -76,6 +76,7 @@ expressions:
|
||||
* ``istartswith`` -- string field starts with value (case insensitive)
|
||||
* ``endswith`` -- string field ends with value
|
||||
* ``iendswith`` -- string field ends with value (case insensitive)
|
||||
* ``match`` -- performs an $elemMatch so you can match an entire document within an array
|
||||
|
||||
There are a few special operators for performing geographical queries, that
|
||||
may used with :class:`~mongoengine.GeoPointField`\ s:
|
||||
@@ -194,22 +195,6 @@ to be created::
|
||||
>>> a.name == b.name and a.age == b.age
|
||||
True
|
||||
|
||||
Dereferencing results
|
||||
---------------------
|
||||
When iterating the results of :class:`~mongoengine.ListField` or
|
||||
:class:`~mongoengine.DictField` we automatically dereference any
|
||||
:class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the
|
||||
number the queries to mongo.
|
||||
|
||||
There are times when that efficiency is not enough, documents that have
|
||||
:class:`~mongoengine.ReferenceField` objects or
|
||||
:class:`~mongoengine.GenericReferenceField` objects at the top level are
|
||||
expensive as the number of queries to MongoDB can quickly rise.
|
||||
|
||||
To limit the number of queries use
|
||||
:func:`~mongoengine.queryset.QuerySet.select_related` which converts the
|
||||
QuerySet to a list and dereferences as efficiently as possible.
|
||||
|
||||
Default Document queries
|
||||
========================
|
||||
By default, the objects :attr:`~mongoengine.Document.objects` attribute on a
|
||||
@@ -312,8 +297,16 @@ would be generating "tag-clouds"::
|
||||
from operator import itemgetter
|
||||
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
|
||||
|
||||
|
||||
Query efficiency and performance
|
||||
================================
|
||||
|
||||
There are a couple of methods to improve efficiency when querying, reducing the
|
||||
information returned by the query or efficient dereferencing .
|
||||
|
||||
Retrieving a subset of fields
|
||||
=============================
|
||||
-----------------------------
|
||||
|
||||
Sometimes a subset of fields on a :class:`~mongoengine.Document` is required,
|
||||
and for efficiency only these should be retrieved from the database. This issue
|
||||
is especially important for MongoDB, as fields may often be extremely large
|
||||
@@ -346,6 +339,27 @@ will be given::
|
||||
If you later need the missing fields, just call
|
||||
:meth:`~mongoengine.Document.reload` on your document.
|
||||
|
||||
Getting related data
|
||||
--------------------
|
||||
|
||||
When iterating the results of :class:`~mongoengine.ListField` or
|
||||
:class:`~mongoengine.DictField` we automatically dereference any
|
||||
:class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the
|
||||
number the queries to mongo.
|
||||
|
||||
There are times when that efficiency is not enough, documents that have
|
||||
:class:`~mongoengine.ReferenceField` objects or
|
||||
:class:`~mongoengine.GenericReferenceField` objects at the top level are
|
||||
expensive as the number of queries to MongoDB can quickly rise.
|
||||
|
||||
To limit the number of queries use
|
||||
:func:`~mongoengine.queryset.QuerySet.select_related` which converts the
|
||||
QuerySet to a list and dereferences as efficiently as possible. By default
|
||||
:func:`~mongoengine.queryset.QuerySet.select_related` only dereferences any
|
||||
references to the depth of 1 level. If you have more complicated documents and
|
||||
want to dereference more of the object at once then increasing the :attr:`max_depth`
|
||||
will dereference more levels of the document.
|
||||
|
||||
Advanced queries
|
||||
================
|
||||
Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword
|
||||
|
@@ -5,11 +5,13 @@ Signals
|
||||
|
||||
.. versionadded:: 0.5
|
||||
|
||||
Signal support is provided by the excellent `blinker`_ library and
|
||||
will gracefully fall back if it is not available.
|
||||
.. note::
|
||||
|
||||
Signal support is provided by the excellent `blinker`_ library and
|
||||
will gracefully fall back if it is not available.
|
||||
|
||||
|
||||
The following document signals exist in MongoEngine and are pretty self explaintary:
|
||||
The following document signals exist in MongoEngine and are pretty self-explanatory:
|
||||
|
||||
* `mongoengine.signals.pre_init`
|
||||
* `mongoengine.signals.post_init`
|
||||
@@ -17,6 +19,8 @@ The following document signals exist in MongoEngine and are pretty self explaint
|
||||
* `mongoengine.signals.post_save`
|
||||
* `mongoengine.signals.pre_delete`
|
||||
* `mongoengine.signals.post_delete`
|
||||
* `mongoengine.signals.pre_bulk_insert`
|
||||
* `mongoengine.signals.post_bulk_insert`
|
||||
|
||||
Example usage::
|
||||
|
||||
@@ -42,8 +46,8 @@ Example usage::
|
||||
else:
|
||||
logging.debug("Updated")
|
||||
|
||||
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||
signals.post_save.connect(Author.post_save, sender=Author)
|
||||
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||
signals.post_save.connect(Author.post_save, sender=Author)
|
||||
|
||||
|
||||
.. _blinker: http://pypi.python.org/pypi/blinker
|
||||
|
@@ -18,6 +18,9 @@ MongoDB. To install it, simply run
|
||||
:doc:`apireference`
|
||||
The complete API documentation.
|
||||
|
||||
:doc:`upgrade`
|
||||
How to upgrade MongoEngine.
|
||||
|
||||
:doc:`django`
|
||||
Using MongoEngine and Django
|
||||
|
||||
@@ -42,7 +45,8 @@ Also, you can join the developers' `mailing list
|
||||
|
||||
Changes
|
||||
-------
|
||||
See the :doc:`changelog` for a full list of changes to MongoEngine.
|
||||
See the :doc:`changelog` for a full list of changes to MongoEngine and
|
||||
:doc:`upgrade` for upgrade information.
|
||||
|
||||
.. toctree::
|
||||
:hidden:
|
||||
|
@@ -167,6 +167,11 @@ To delete all the posts if a user is deleted set the rule::
|
||||
|
||||
See :class:`~mongoengine.ReferenceField` for more information.
|
||||
|
||||
..note::
|
||||
MapFields and DictFields currently don't support automatic handling of
|
||||
deleted references
|
||||
|
||||
|
||||
Adding data to our Tumblelog
|
||||
============================
|
||||
Now that we've defined how our documents will be structured, let's start adding
|
||||
|
@@ -2,6 +2,24 @@
|
||||
Upgrading
|
||||
=========
|
||||
|
||||
0.5 to 0.6
|
||||
==========
|
||||
|
||||
Embedded Documents - if you had a `pk` field you will have to rename it from `_id`
|
||||
to `pk` as pk is no longer a property of Embedded Documents.
|
||||
|
||||
Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw
|
||||
an InvalidDocument error as they aren't currently supported.
|
||||
|
||||
Document._get_subclasses - Is no longer used and the class method has been removed.
|
||||
|
||||
Document.objects.with_id - now raises an InvalidQueryError if used with a filter.
|
||||
|
||||
FutureWarning - A future warning has been added to all inherited classes that
|
||||
don't define `allow_inheritance` in their meta.
|
||||
|
||||
You may need to update pyMongo to 2.0 for use with Sharding.
|
||||
|
||||
0.4 to 0.5
|
||||
===========
|
||||
|
||||
@@ -9,7 +27,7 @@ There have been the following backwards incompatibilities from 0.4 to 0.5. The
|
||||
main areas of changed are: choices in fields, map_reduce and collection names.
|
||||
|
||||
Choice options:
|
||||
--------------
|
||||
---------------
|
||||
|
||||
Are now expected to be an iterable of tuples, with the first element in each
|
||||
tuple being the actual value to be stored. The second element is the
|
||||
@@ -58,7 +76,7 @@ To upgrade use a Mixin class to set meta like so ::
|
||||
class MyAceDocument(Document, BaseMixin):
|
||||
pass
|
||||
|
||||
MyAceDocument._get_collection_name() == myacedocument
|
||||
MyAceDocument._get_collection_name() == "myacedocument"
|
||||
|
||||
Alternatively, you can rename your collections eg ::
|
||||
|
||||
|
@@ -12,9 +12,7 @@ from signals import *
|
||||
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
||||
queryset.__all__ + signals.__all__)
|
||||
|
||||
__author__ = 'Harry Marr'
|
||||
|
||||
VERSION = (0, 5, 1)
|
||||
VERSION = (0, 6, 16)
|
||||
|
||||
|
||||
def get_version():
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,82 +1,166 @@
|
||||
from pymongo import Connection
|
||||
import multiprocessing
|
||||
import threading
|
||||
|
||||
__all__ = ['ConnectionError', 'connect']
|
||||
import pymongo
|
||||
from pymongo import Connection, ReplicaSetConnection, uri_parser
|
||||
|
||||
|
||||
_connection_defaults = {
|
||||
'host': 'localhost',
|
||||
'port': 27017,
|
||||
}
|
||||
_connection = {}
|
||||
_connection_settings = _connection_defaults.copy()
|
||||
__all__ = ['ConnectionError', 'connect', 'register_connection',
|
||||
'DEFAULT_CONNECTION_NAME']
|
||||
|
||||
_db_name = None
|
||||
_db_username = None
|
||||
_db_password = None
|
||||
_db = {}
|
||||
|
||||
DEFAULT_CONNECTION_NAME = 'default'
|
||||
|
||||
|
||||
class ConnectionError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _get_connection(reconnect=False):
|
||||
"""Handles the connection to the database
|
||||
_connection_settings = {}
|
||||
_connections = {}
|
||||
_dbs = {}
|
||||
|
||||
|
||||
def register_connection(alias, name, host='localhost', port=27017,
|
||||
is_slave=False, read_preference=False, slaves=None,
|
||||
username=None, password=None, **kwargs):
|
||||
"""Add a connection.
|
||||
|
||||
:param alias: the name that will be used to refer to this connection
|
||||
throughout MongoEngine
|
||||
:param name: the name of the specific database to use
|
||||
:param host: the host name of the :program:`mongod` instance to connect to
|
||||
:param port: the port that the :program:`mongod` instance is running on
|
||||
:param is_slave: whether the connection can act as a slave ** Depreciated pymongo 2.0.1+
|
||||
:param read_preference: The read preference for the collection ** Added pymongo 2.1
|
||||
:param slaves: a list of aliases of slave connections; each of these must
|
||||
be a registered connection that has :attr:`is_slave` set to ``True``
|
||||
:param username: username to authenticate with
|
||||
:param password: password to authenticate with
|
||||
:param kwargs: allow ad-hoc parameters to be passed into the pymongo driver
|
||||
|
||||
"""
|
||||
global _connection
|
||||
identity = get_identity()
|
||||
global _connection_settings
|
||||
|
||||
conn_settings = {
|
||||
'name': name,
|
||||
'host': host,
|
||||
'port': port,
|
||||
'is_slave': is_slave,
|
||||
'slaves': slaves or [],
|
||||
'username': username,
|
||||
'password': password,
|
||||
'read_preference': read_preference
|
||||
}
|
||||
|
||||
# Handle uri style connections
|
||||
if "://" in host:
|
||||
uri_dict = uri_parser.parse_uri(host)
|
||||
if uri_dict.get('database') is None:
|
||||
raise ConnectionError("If using URI style connection include "\
|
||||
"database name in string")
|
||||
conn_settings.update({
|
||||
'host': host,
|
||||
'name': uri_dict.get('database'),
|
||||
'username': uri_dict.get('username'),
|
||||
'password': uri_dict.get('password'),
|
||||
'read_preference': read_preference,
|
||||
})
|
||||
if "replicaSet" in host:
|
||||
conn_settings['replicaSet'] = True
|
||||
|
||||
conn_settings.update(kwargs)
|
||||
_connection_settings[alias] = conn_settings
|
||||
|
||||
|
||||
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
||||
global _connections
|
||||
global _dbs
|
||||
|
||||
if alias in _connections:
|
||||
get_connection(alias=alias).disconnect()
|
||||
del _connections[alias]
|
||||
if alias in _dbs:
|
||||
del _dbs[alias]
|
||||
|
||||
|
||||
def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
global _connections
|
||||
# Connect to the database if not already connected
|
||||
if _connection.get(identity) is None or reconnect:
|
||||
if reconnect:
|
||||
disconnect(alias)
|
||||
|
||||
if alias not in _connections:
|
||||
if alias not in _connection_settings:
|
||||
msg = 'Connection with alias "%s" has not been defined' % alias
|
||||
if alias == DEFAULT_CONNECTION_NAME:
|
||||
msg = 'You have not defined a default connection'
|
||||
raise ConnectionError(msg)
|
||||
conn_settings = _connection_settings[alias].copy()
|
||||
|
||||
if hasattr(pymongo, 'version_tuple'): # Support for 2.1+
|
||||
conn_settings.pop('name', None)
|
||||
conn_settings.pop('slaves', None)
|
||||
conn_settings.pop('is_slave', None)
|
||||
conn_settings.pop('username', None)
|
||||
conn_settings.pop('password', None)
|
||||
else:
|
||||
# Get all the slave connections
|
||||
if 'slaves' in conn_settings:
|
||||
slaves = []
|
||||
for slave_alias in conn_settings['slaves']:
|
||||
slaves.append(get_connection(slave_alias))
|
||||
conn_settings['slaves'] = slaves
|
||||
conn_settings.pop('read_preference', None)
|
||||
|
||||
connection_class = Connection
|
||||
if 'replicaSet' in conn_settings:
|
||||
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
|
||||
# Discard port since it can't be used on ReplicaSetConnection
|
||||
conn_settings.pop('port', None)
|
||||
# Discard replicaSet if not base string
|
||||
if not isinstance(conn_settings['replicaSet'], basestring):
|
||||
conn_settings.pop('replicaSet', None)
|
||||
connection_class = ReplicaSetConnection
|
||||
|
||||
try:
|
||||
_connection[identity] = Connection(**_connection_settings)
|
||||
_connections[alias] = connection_class(**conn_settings)
|
||||
except Exception, e:
|
||||
raise ConnectionError("Cannot connect to the database:\n%s" % e)
|
||||
return _connection[identity]
|
||||
raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e))
|
||||
return _connections[alias]
|
||||
|
||||
def _get_db(reconnect=False):
|
||||
"""Handles database connections and authentication based on the current
|
||||
identity
|
||||
|
||||
def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
global _dbs
|
||||
if reconnect:
|
||||
disconnect(alias)
|
||||
|
||||
if alias not in _dbs:
|
||||
conn = get_connection(alias)
|
||||
conn_settings = _connection_settings[alias]
|
||||
_dbs[alias] = conn[conn_settings['name']]
|
||||
# Authenticate if necessary
|
||||
if conn_settings['username'] and conn_settings['password']:
|
||||
_dbs[alias].authenticate(conn_settings['username'],
|
||||
conn_settings['password'])
|
||||
return _dbs[alias]
|
||||
|
||||
|
||||
def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||
"""Connect to the database specified by the 'db' argument.
|
||||
|
||||
Connection settings may be provided here as well if the database is not
|
||||
running on the default port on localhost. If authentication is needed,
|
||||
provide username and password arguments as well.
|
||||
|
||||
Multiple databases are supported by using aliases. Provide a separate
|
||||
`alias` to connect to a different instance of :program:`mongod`.
|
||||
|
||||
.. versionchanged:: 0.6 - added multiple database support.
|
||||
"""
|
||||
global _db, _connection
|
||||
identity = get_identity()
|
||||
# Connect if not already connected
|
||||
if _connection.get(identity) is None or reconnect:
|
||||
_connection[identity] = _get_connection(reconnect=reconnect)
|
||||
global _connections
|
||||
if alias not in _connections:
|
||||
register_connection(alias, db, **kwargs)
|
||||
|
||||
if _db.get(identity) is None or reconnect:
|
||||
# _db_name will be None if the user hasn't called connect()
|
||||
if _db_name is None:
|
||||
raise ConnectionError('Not connected to the database')
|
||||
|
||||
# Get DB from current connection and authenticate if necessary
|
||||
_db[identity] = _connection[identity][_db_name]
|
||||
if _db_username and _db_password:
|
||||
_db[identity].authenticate(_db_username, _db_password)
|
||||
|
||||
return _db[identity]
|
||||
|
||||
def get_identity():
|
||||
"""Creates an identity key based on the current process and thread
|
||||
identity.
|
||||
"""
|
||||
identity = multiprocessing.current_process()._identity
|
||||
identity = 0 if not identity else identity[0]
|
||||
|
||||
identity = (identity, threading.current_thread().ident)
|
||||
return identity
|
||||
|
||||
def connect(db, username=None, password=None, **kwargs):
|
||||
"""Connect to the database specified by the 'db' argument. Connection
|
||||
settings may be provided here as well if the database is not running on
|
||||
the default port on localhost. If authentication is needed, provide
|
||||
username and password arguments as well.
|
||||
"""
|
||||
global _connection_settings, _db_name, _db_username, _db_password, _db
|
||||
_connection_settings = dict(_connection_defaults, **kwargs)
|
||||
_db_name = db
|
||||
_db_username = username
|
||||
_db_password = password
|
||||
return _get_db(reconnect=True)
|
||||
return get_connection(alias)
|
||||
|
||||
# Support old naming convention
|
||||
_get_connection = get_connection
|
||||
_get_db = get_db
|
||||
|
@@ -1,17 +1,15 @@
|
||||
import operator
|
||||
from bson import DBRef, SON
|
||||
|
||||
import pymongo
|
||||
|
||||
from base import BaseDict, BaseList, get_document, TopLevelDocumentMetaclass
|
||||
from fields import ReferenceField
|
||||
from connection import _get_db
|
||||
from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document)
|
||||
from fields import (ReferenceField, ListField, DictField, MapField)
|
||||
from connection import get_db
|
||||
from queryset import QuerySet
|
||||
from document import Document
|
||||
|
||||
|
||||
class DeReference(object):
|
||||
|
||||
def __call__(self, items, max_depth=1, instance=None, name=None, get=False):
|
||||
def __call__(self, items, max_depth=1, instance=None, name=None):
|
||||
"""
|
||||
Cheaply dereferences the items to a set depth.
|
||||
Also handles the convertion of complex data types.
|
||||
@@ -45,7 +43,7 @@ class DeReference(object):
|
||||
|
||||
self.reference_map = self._find_references(items)
|
||||
self.object_map = self._fetch_objects(doc_type=doc_type)
|
||||
return self._attach_objects(items, 0, instance, name, get)
|
||||
return self._attach_objects(items, 0, instance, name)
|
||||
|
||||
def _find_references(self, items, depth=0):
|
||||
"""
|
||||
@@ -55,7 +53,7 @@ class DeReference(object):
|
||||
:param depth: The current depth of recursion
|
||||
"""
|
||||
reference_map = {}
|
||||
if not items:
|
||||
if not items or depth >= self.max_depth:
|
||||
return reference_map
|
||||
|
||||
# Determine the iterator to use
|
||||
@@ -65,13 +63,14 @@ class DeReference(object):
|
||||
iterator = items.iteritems()
|
||||
|
||||
# Recursively find dbreferences
|
||||
depth += 1
|
||||
for k, item in iterator:
|
||||
if hasattr(item, '_fields'):
|
||||
for field_name, field in item._fields.iteritems():
|
||||
v = item._data.get(field_name, None)
|
||||
if isinstance(v, (pymongo.dbref.DBRef)):
|
||||
if isinstance(v, (DBRef)):
|
||||
reference_map.setdefault(field.document_type, []).append(v.id)
|
||||
elif isinstance(v, (dict, pymongo.son.SON)) and '_ref' in v:
|
||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||
reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
|
||||
@@ -80,15 +79,15 @@ class DeReference(object):
|
||||
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
||||
key = field_cls
|
||||
reference_map.setdefault(key, []).extend(refs)
|
||||
elif isinstance(item, (pymongo.dbref.DBRef)):
|
||||
elif isinstance(item, (DBRef)):
|
||||
reference_map.setdefault(item.collection, []).append(item.id)
|
||||
elif isinstance(item, (dict, pymongo.son.SON)) and '_ref' in item:
|
||||
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
||||
reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id)
|
||||
elif isinstance(item, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
references = self._find_references(item, depth)
|
||||
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
||||
references = self._find_references(item, depth - 1)
|
||||
for key, refs in references.iteritems():
|
||||
reference_map.setdefault(key, []).extend(refs)
|
||||
depth += 1
|
||||
|
||||
return reference_map
|
||||
|
||||
def _fetch_objects(self, doc_type=None):
|
||||
@@ -103,16 +102,26 @@ class DeReference(object):
|
||||
for key, doc in references.iteritems():
|
||||
object_map[key] = doc
|
||||
else: # Generic reference: use the refs data to convert to document
|
||||
references = _get_db()[col].find({'_id': {'$in': refs}})
|
||||
for ref in references:
|
||||
if '_cls' in ref:
|
||||
doc = get_document(ref['_cls'])._from_son(ref)
|
||||
else:
|
||||
if doc_type and not isinstance(doc_type, (ListField, DictField, MapField,) ):
|
||||
references = doc_type._get_db()[col].find({'_id': {'$in': refs}})
|
||||
for ref in references:
|
||||
doc = doc_type._from_son(ref)
|
||||
object_map[doc.id] = doc
|
||||
object_map[doc.id] = doc
|
||||
else:
|
||||
references = get_db()[col].find({'_id': {'$in': refs}})
|
||||
for ref in references:
|
||||
if '_cls' in ref:
|
||||
doc = get_document(ref["_cls"])._from_son(ref)
|
||||
elif doc_type is None:
|
||||
doc = get_document(
|
||||
''.join(x.capitalize()
|
||||
for x in col.split('_')))._from_son(ref)
|
||||
else:
|
||||
doc = doc_type._from_son(ref)
|
||||
object_map[doc.id] = doc
|
||||
return object_map
|
||||
|
||||
def _attach_objects(self, items, depth=0, instance=None, name=None, get=False):
|
||||
def _attach_objects(self, items, depth=0, instance=None, name=None):
|
||||
"""
|
||||
Recursively finds all db references to be dereferenced
|
||||
|
||||
@@ -122,7 +131,6 @@ class DeReference(object):
|
||||
:class:`~mongoengine.base.ComplexBaseField`
|
||||
:param name: The name of the field, used for tracking changes by
|
||||
:class:`~mongoengine.base.ComplexBaseField`
|
||||
:param get: A boolean determining if being called by __get__
|
||||
"""
|
||||
if not items:
|
||||
if isinstance(items, (BaseDict, BaseList)):
|
||||
@@ -130,17 +138,16 @@ class DeReference(object):
|
||||
|
||||
if instance:
|
||||
if isinstance(items, dict):
|
||||
return BaseDict(items, instance=instance, name=name)
|
||||
return BaseDict(items, instance, name)
|
||||
else:
|
||||
return BaseList(items, instance=instance, name=name)
|
||||
return BaseList(items, instance, name)
|
||||
|
||||
if isinstance(items, (dict, pymongo.son.SON)):
|
||||
if isinstance(items, (dict, SON)):
|
||||
if '_ref' in items:
|
||||
return self.object_map.get(items['_ref'].id, items)
|
||||
elif '_types' in items and '_cls' in items:
|
||||
doc = get_document(items['_cls'])._from_son(items)
|
||||
if not get:
|
||||
doc._data = self._attach_objects(doc._data, depth, doc, name, get)
|
||||
doc._data = self._attach_objects(doc._data, depth, doc, name)
|
||||
return doc
|
||||
|
||||
if not hasattr(items, 'items'):
|
||||
@@ -152,35 +159,34 @@ class DeReference(object):
|
||||
iterator = items.iteritems()
|
||||
data = {}
|
||||
|
||||
depth += 1
|
||||
for k, v in iterator:
|
||||
if is_list:
|
||||
data.append(v)
|
||||
else:
|
||||
data[k] = v
|
||||
|
||||
if k in self.object_map:
|
||||
if k in self.object_map and not is_list:
|
||||
data[k] = self.object_map[k]
|
||||
elif hasattr(v, '_fields'):
|
||||
for field_name, field in v._fields.iteritems():
|
||||
v = data[k]._data.get(field_name, None)
|
||||
if isinstance(v, (pymongo.dbref.DBRef)):
|
||||
if isinstance(v, (DBRef)):
|
||||
data[k]._data[field_name] = self.object_map.get(v.id, v)
|
||||
elif isinstance(v, (dict, pymongo.son.SON)) and '_ref' in v:
|
||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||
data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v)
|
||||
elif isinstance(v, dict) and depth < self.max_depth:
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name, get=get)
|
||||
elif isinstance(v, (list, tuple)):
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name, get=get)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth < self.max_depth:
|
||||
data[k] = self._attach_objects(v, depth, instance=instance, name=name, get=get)
|
||||
elif isinstance(v, dict) and depth <= self.max_depth:
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
||||
elif isinstance(v, (list, tuple)) and depth <= self.max_depth:
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name)
|
||||
elif hasattr(v, 'id'):
|
||||
data[k] = self.object_map.get(v.id, v)
|
||||
|
||||
if instance and name:
|
||||
if is_list:
|
||||
return BaseList(data, instance=instance, name=name)
|
||||
return BaseDict(data, instance=instance, name=name)
|
||||
return BaseList(data, instance, name)
|
||||
return BaseDict(data, instance, name)
|
||||
depth += 1
|
||||
return data
|
||||
|
||||
dereference = DeReference()
|
||||
|
@@ -1,23 +1,39 @@
|
||||
import datetime
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from django.utils.hashcompat import md5_constructor, sha_constructor
|
||||
from django.utils.encoding import smart_str
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
import datetime
|
||||
try:
|
||||
from django.contrib.auth.hashers import check_password, make_password
|
||||
except ImportError:
|
||||
"""Handle older versions of Django"""
|
||||
from django.utils.hashcompat import md5_constructor, sha_constructor
|
||||
|
||||
def get_hexdigest(algorithm, salt, raw_password):
|
||||
raw_password, salt = smart_str(raw_password), smart_str(salt)
|
||||
if algorithm == 'md5':
|
||||
return md5_constructor(salt + raw_password).hexdigest()
|
||||
elif algorithm == 'sha1':
|
||||
return sha_constructor(salt + raw_password).hexdigest()
|
||||
raise ValueError('Got unknown password algorithm type in password')
|
||||
|
||||
def check_password(raw_password, password):
|
||||
algo, salt, hash = password.split('$')
|
||||
return hash == get_hexdigest(algo, salt, raw_password)
|
||||
|
||||
def make_password(raw_password):
|
||||
from random import random
|
||||
algo = 'sha1'
|
||||
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
|
||||
hash = get_hexdigest(algo, salt, raw_password)
|
||||
return '%s$%s$%s' % (algo, salt, hash)
|
||||
|
||||
|
||||
REDIRECT_FIELD_NAME = 'next'
|
||||
|
||||
def get_hexdigest(algorithm, salt, raw_password):
|
||||
raw_password, salt = smart_str(raw_password), smart_str(salt)
|
||||
if algorithm == 'md5':
|
||||
return md5_constructor(salt + raw_password).hexdigest()
|
||||
elif algorithm == 'sha1':
|
||||
return sha_constructor(salt + raw_password).hexdigest()
|
||||
raise ValueError('Got unknown password algorithm type in password')
|
||||
|
||||
|
||||
class User(Document):
|
||||
"""A User document that aims to mirror most of the API specified by Django
|
||||
at http://docs.djangoproject.com/en/dev/topics/auth/#users
|
||||
@@ -34,7 +50,7 @@ class User(Document):
|
||||
email = EmailField(verbose_name=_('e-mail address'))
|
||||
password = StringField(max_length=128,
|
||||
verbose_name=_('password'),
|
||||
help_text=_("Use '[algo]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
|
||||
help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
|
||||
is_staff = BooleanField(default=False,
|
||||
verbose_name=_('staff status'),
|
||||
help_text=_("Designates whether the user can log into this admin site."))
|
||||
@@ -50,6 +66,7 @@ class User(Document):
|
||||
verbose_name=_('date joined'))
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'indexes': [
|
||||
{'fields': ['username'], 'unique': True}
|
||||
]
|
||||
@@ -75,11 +92,7 @@ class User(Document):
|
||||
assigning to :attr:`~mongoengine.django.auth.User.password` as the
|
||||
password is hashed before storage.
|
||||
"""
|
||||
from random import random
|
||||
algo = 'sha1'
|
||||
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
|
||||
hash = get_hexdigest(algo, salt, raw_password)
|
||||
self.password = '%s$%s$%s' % (algo, salt, hash)
|
||||
self.password = make_password(raw_password)
|
||||
self.save()
|
||||
return self
|
||||
|
||||
@@ -89,8 +102,7 @@ class User(Document):
|
||||
:attr:`~mongoengine.django.auth.User.password` as the password is
|
||||
hashed before storage.
|
||||
"""
|
||||
algo, salt, hash = self.password.split('$')
|
||||
return hash == get_hexdigest(algo, salt, raw_password)
|
||||
return check_password(raw_password, self.password)
|
||||
|
||||
@classmethod
|
||||
def create_user(cls, username, password, email=None):
|
||||
|
@@ -1,3 +1,6 @@
|
||||
from datetime import datetime
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.sessions.backends.base import SessionBase, CreateError
|
||||
from django.core.exceptions import SuspiciousOperation
|
||||
from django.utils.encoding import force_unicode
|
||||
@@ -5,16 +8,22 @@ from django.utils.encoding import force_unicode
|
||||
from mongoengine.document import Document
|
||||
from mongoengine import fields
|
||||
from mongoengine.queryset import OperationError
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
MONGOENGINE_SESSION_DB_ALIAS = getattr(
|
||||
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
|
||||
DEFAULT_CONNECTION_NAME)
|
||||
|
||||
|
||||
class MongoSession(Document):
|
||||
session_key = fields.StringField(primary_key=True, max_length=40)
|
||||
session_data = fields.StringField()
|
||||
expire_date = fields.DateTimeField()
|
||||
|
||||
meta = {'collection': 'django_session', 'allow_inheritance': False}
|
||||
|
||||
meta = {'collection': 'django_session',
|
||||
'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
|
||||
'allow_inheritance': False}
|
||||
|
||||
|
||||
class SessionStore(SessionBase):
|
||||
@@ -35,7 +44,7 @@ class SessionStore(SessionBase):
|
||||
|
||||
def create(self):
|
||||
while True:
|
||||
self.session_key = self._get_new_session_key()
|
||||
self._session_key = self._get_new_session_key()
|
||||
try:
|
||||
self.save(must_create=True)
|
||||
except CreateError:
|
||||
@@ -45,6 +54,8 @@ class SessionStore(SessionBase):
|
||||
return
|
||||
|
||||
def save(self, must_create=False):
|
||||
if self.session_key is None:
|
||||
self._session_key = self._get_new_session_key()
|
||||
s = MongoSession(session_key=self.session_key)
|
||||
s.session_data = self.encode(self._get_session(no_load=must_create))
|
||||
s.expire_date = self.get_expiry_date()
|
||||
|
@@ -10,7 +10,7 @@ class MongoTestCase(TestCase):
|
||||
"""
|
||||
db_name = 'test_%s' % settings.MONGO_DATABASE_NAME
|
||||
def __init__(self, methodName='runtest'):
|
||||
self.db = connect(self.db_name)
|
||||
self.db = connect(self.db_name).get_db()
|
||||
super(MongoTestCase, self).__init__(methodName)
|
||||
|
||||
def _post_teardown(self):
|
||||
|
@@ -1,13 +1,15 @@
|
||||
from mongoengine import signals
|
||||
from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument,
|
||||
ValidationError, BaseDict, BaseList)
|
||||
from queryset import OperationError
|
||||
from connection import _get_db
|
||||
|
||||
import pymongo
|
||||
|
||||
__all__ = ['Document', 'EmbeddedDocument', 'ValidationError',
|
||||
'OperationError', 'InvalidCollectionError']
|
||||
from bson.dbref import DBRef
|
||||
|
||||
from mongoengine import signals
|
||||
from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument,
|
||||
BaseDict, BaseList)
|
||||
from queryset import OperationError
|
||||
from connection import get_db, DEFAULT_CONNECTION_NAME
|
||||
|
||||
__all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument',
|
||||
'DynamicEmbeddedDocument', 'OperationError', 'InvalidCollectionError']
|
||||
|
||||
|
||||
class InvalidCollectionError(Exception):
|
||||
@@ -23,6 +25,10 @@ class EmbeddedDocument(BaseDocument):
|
||||
|
||||
__metaclass__ = DocumentMetaclass
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(EmbeddedDocument, self).__init__(*args, **kwargs)
|
||||
self._changed_fields = []
|
||||
|
||||
def __delattr__(self, *args, **kwargs):
|
||||
"""Handle deletions of fields"""
|
||||
field_name = args[0]
|
||||
@@ -34,6 +40,10 @@ class EmbeddedDocument(BaseDocument):
|
||||
else:
|
||||
super(EmbeddedDocument, self).__delattr__(*args, **kwargs)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return self._data == other._data
|
||||
return False
|
||||
|
||||
|
||||
class Document(BaseDocument):
|
||||
@@ -70,49 +80,70 @@ class Document(BaseDocument):
|
||||
names. Index direction may be specified by prefixing the field names with
|
||||
a **+** or **-** sign.
|
||||
|
||||
Automatic index creation can be disabled by specifying
|
||||
attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to
|
||||
False then indexes will not be created by MongoEngine. This is useful in
|
||||
production systems where index creation is performed as part of a deployment
|
||||
system.
|
||||
|
||||
By default, _types will be added to the start of every index (that
|
||||
doesn't contain a list) if allow_inheritence is True. This can be
|
||||
doesn't contain a list) if allow_inheritance is True. This can be
|
||||
disabled by either setting types to False on the specific index or
|
||||
by setting index_types to False on the meta dictionary for the document.
|
||||
"""
|
||||
__metaclass__ = TopLevelDocumentMetaclass
|
||||
|
||||
@classmethod
|
||||
def _get_collection(self):
|
||||
"""Returns the collection for the document."""
|
||||
db = _get_db()
|
||||
collection_name = self._get_collection_name()
|
||||
@apply
|
||||
def pk():
|
||||
"""Primary key alias
|
||||
"""
|
||||
def fget(self):
|
||||
return getattr(self, self._meta['id_field'])
|
||||
def fset(self, value):
|
||||
return setattr(self, self._meta['id_field'], value)
|
||||
return property(fget, fset)
|
||||
|
||||
if not hasattr(self, '_collection') or self._collection is None:
|
||||
@classmethod
|
||||
def _get_db(cls):
|
||||
"""Some Model using other db_alias"""
|
||||
return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME ))
|
||||
|
||||
@classmethod
|
||||
def _get_collection(cls):
|
||||
"""Returns the collection for the document."""
|
||||
if not hasattr(cls, '_collection') or cls._collection is None:
|
||||
db = cls._get_db()
|
||||
collection_name = cls._get_collection_name()
|
||||
# Create collection as a capped collection if specified
|
||||
if self._meta['max_size'] or self._meta['max_documents']:
|
||||
if cls._meta['max_size'] or cls._meta['max_documents']:
|
||||
# Get max document limit and max byte size from meta
|
||||
max_size = self._meta['max_size'] or 10000000 # 10MB default
|
||||
max_documents = self._meta['max_documents']
|
||||
max_size = cls._meta['max_size'] or 10000000 # 10MB default
|
||||
max_documents = cls._meta['max_documents']
|
||||
|
||||
if collection_name in db.collection_names():
|
||||
self._collection = db[collection_name]
|
||||
cls._collection = db[collection_name]
|
||||
# The collection already exists, check if its capped
|
||||
# options match the specified capped options
|
||||
options = self._collection.options()
|
||||
options = cls._collection.options()
|
||||
if options.get('max') != max_documents or \
|
||||
options.get('size') != max_size:
|
||||
msg = ('Cannot create collection "%s" as a capped '
|
||||
'collection as it already exists') % self._collection
|
||||
'collection as it already exists') % cls._collection
|
||||
raise InvalidCollectionError(msg)
|
||||
else:
|
||||
# Create the collection as a capped collection
|
||||
opts = {'capped': True, 'size': max_size}
|
||||
if max_documents:
|
||||
opts['max'] = max_documents
|
||||
self._collection = db.create_collection(
|
||||
cls._collection = db.create_collection(
|
||||
collection_name, **opts
|
||||
)
|
||||
else:
|
||||
self._collection = db[collection_name]
|
||||
return self._collection
|
||||
cls._collection = db[collection_name]
|
||||
return cls._collection
|
||||
|
||||
def save(self, safe=True, force_insert=False, validate=True, write_options=None, _refs=None):
|
||||
def save(self, safe=True, force_insert=False, validate=True, write_options=None,
|
||||
cascade=None, cascade_kwargs=None, _refs=None):
|
||||
"""Save the :class:`~mongoengine.Document` to the database. If the
|
||||
document already exists, it will be updated, otherwise it will be
|
||||
created.
|
||||
@@ -128,16 +159,25 @@ class Document(BaseDocument):
|
||||
:meth:`~pymongo.collection.Collection.save` OR
|
||||
:meth:`~pymongo.collection.Collection.insert`
|
||||
which will be used as options for the resultant ``getLastError`` command.
|
||||
For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers
|
||||
have recorded the write and will force an fsync on each server being written to.
|
||||
For example, ``save(..., write_options={w: 2, fsync: True}, ...)`` will
|
||||
wait until at least two servers have recorded the write and will force an
|
||||
fsync on each server being written to.
|
||||
:param cascade: Sets the flag for cascading saves. You can set a default by setting
|
||||
"cascade" in the document __meta__
|
||||
:param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves
|
||||
:param _refs: A list of processed references used in cascading saves
|
||||
|
||||
.. versionchanged:: 0.5
|
||||
In existing documents it only saves changed fields using set / unset
|
||||
Saves are cascaded and any :class:`~pymongo.dbref.DBRef` objects
|
||||
Saves are cascaded and any :class:`~bson.dbref.DBRef` objects
|
||||
that have changes are saved as well.
|
||||
"""
|
||||
from fields import ReferenceField, GenericReferenceField
|
||||
.. versionchanged:: 0.6
|
||||
Cascade saves are optional = defaults to True, if you want fine grain
|
||||
control then you can turn off using document meta['cascade'] = False
|
||||
Also you can pass different kwargs to the cascade save using cascade_kwargs
|
||||
which overwrites the existing kwargs with custom values
|
||||
|
||||
"""
|
||||
signals.pre_save.send(self.__class__, document=self)
|
||||
|
||||
if validate:
|
||||
@@ -148,11 +188,11 @@ class Document(BaseDocument):
|
||||
|
||||
doc = self.to_mongo()
|
||||
|
||||
created = '_id' in doc
|
||||
creation_mode = force_insert or not created
|
||||
created = force_insert or '_id' not in doc
|
||||
|
||||
try:
|
||||
collection = self.__class__.objects._collection
|
||||
if creation_mode:
|
||||
if created:
|
||||
if force_insert:
|
||||
object_id = collection.insert(doc, safe=safe, **write_options)
|
||||
else:
|
||||
@@ -160,21 +200,34 @@ class Document(BaseDocument):
|
||||
else:
|
||||
object_id = doc['_id']
|
||||
updates, removals = self._delta()
|
||||
if updates:
|
||||
collection.update({'_id': object_id}, {"$set": updates}, upsert=True, safe=safe, **write_options)
|
||||
if removals:
|
||||
collection.update({'_id': object_id}, {"$unset": removals}, upsert=True, safe=safe, **write_options)
|
||||
|
||||
# Save any references / generic references
|
||||
_refs = _refs or []
|
||||
for name, cls in self._fields.items():
|
||||
if isinstance(cls, (ReferenceField, GenericReferenceField)):
|
||||
ref = getattr(self, name)
|
||||
if ref and str(ref) not in _refs:
|
||||
_refs.append(str(ref))
|
||||
ref.save(safe=safe, force_insert=force_insert,
|
||||
validate=validate, write_options=write_options,
|
||||
_refs=_refs)
|
||||
# Need to add shard key to query, or you get an error
|
||||
select_dict = {'_id': object_id}
|
||||
shard_key = self.__class__._meta.get('shard_key', tuple())
|
||||
for k in shard_key:
|
||||
actual_key = self._db_field_map.get(k, k)
|
||||
select_dict[actual_key] = doc[actual_key]
|
||||
|
||||
upsert = self._created
|
||||
if updates:
|
||||
collection.update(select_dict, {"$set": updates}, upsert=upsert, safe=safe, **write_options)
|
||||
if removals:
|
||||
collection.update(select_dict, {"$unset": removals}, upsert=upsert, safe=safe, **write_options)
|
||||
|
||||
cascade = self._meta.get('cascade', True) if cascade is None else cascade
|
||||
if cascade:
|
||||
kwargs = {
|
||||
"safe": safe,
|
||||
"force_insert": force_insert,
|
||||
"validate": validate,
|
||||
"write_options": write_options,
|
||||
"cascade": cascade
|
||||
}
|
||||
if cascade_kwargs: # Allow granular control over cascades
|
||||
kwargs.update(cascade_kwargs)
|
||||
kwargs['_refs'] = _refs
|
||||
#self._changed_fields = []
|
||||
self.cascade_save(**kwargs)
|
||||
|
||||
except pymongo.errors.OperationFailure, err:
|
||||
message = 'Could not save document (%s)'
|
||||
@@ -184,21 +237,33 @@ class Document(BaseDocument):
|
||||
id_field = self._meta['id_field']
|
||||
self[id_field] = self._fields[id_field].to_python(object_id)
|
||||
|
||||
def reset_changed_fields(doc, inspected_docs=None):
|
||||
"""Loop through and reset changed fields lists"""
|
||||
self._changed_fields = []
|
||||
self._created = False
|
||||
signals.post_save.send(self.__class__, document=self, created=created)
|
||||
return self
|
||||
|
||||
inspected_docs = inspected_docs or []
|
||||
inspected_docs.append(doc)
|
||||
if hasattr(doc, '_changed_fields'):
|
||||
doc._changed_fields = []
|
||||
def cascade_save(self, *args, **kwargs):
|
||||
"""Recursively saves any references / generic references on an object"""
|
||||
from fields import ReferenceField, GenericReferenceField
|
||||
_refs = kwargs.get('_refs', []) or []
|
||||
|
||||
for field_name in doc._fields:
|
||||
field = getattr(doc, field_name)
|
||||
if field not in inspected_docs and hasattr(field, '_changed_fields'):
|
||||
reset_changed_fields(field, inspected_docs)
|
||||
for name, cls in self._fields.items():
|
||||
|
||||
reset_changed_fields(self)
|
||||
signals.post_save.send(self.__class__, document=self, created=creation_mode)
|
||||
if not isinstance(cls, (ReferenceField, GenericReferenceField)):
|
||||
continue
|
||||
|
||||
ref = getattr(self, name)
|
||||
if not ref:
|
||||
continue
|
||||
if isinstance(ref, DBRef):
|
||||
continue
|
||||
|
||||
ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
|
||||
if ref and ref_id not in _refs:
|
||||
_refs.append(ref_id)
|
||||
kwargs["_refs"] = _refs
|
||||
ref.save(**kwargs)
|
||||
ref._changed_fields = []
|
||||
|
||||
def update(self, **kwargs):
|
||||
"""Performs an update on the :class:`~mongoengine.Document`
|
||||
@@ -210,7 +275,12 @@ class Document(BaseDocument):
|
||||
if not self.pk:
|
||||
raise OperationError('attempt to update a document not yet saved')
|
||||
|
||||
return self.__class__.objects(pk=self.pk).update_one(**kwargs)
|
||||
# Need to add shard key to query, or you get an error
|
||||
select_dict = {'pk': self.pk}
|
||||
shard_key = self.__class__._meta.get('shard_key', tuple())
|
||||
for k in shard_key:
|
||||
select_dict[k] = getattr(self, k)
|
||||
return self.__class__.objects(**select_dict).update_one(**kwargs)
|
||||
|
||||
def delete(self, safe=False):
|
||||
"""Delete the :class:`~mongoengine.Document` from the database. This
|
||||
@@ -220,10 +290,8 @@ class Document(BaseDocument):
|
||||
"""
|
||||
signals.pre_delete.send(self.__class__, document=self)
|
||||
|
||||
id_field = self._meta['id_field']
|
||||
object_id = self._fields[id_field].to_mongo(self[id_field])
|
||||
try:
|
||||
self.__class__.objects(**{id_field: object_id}).delete(safe=safe)
|
||||
self.__class__.objects(pk=self.pk).delete(safe=safe)
|
||||
except pymongo.errors.OperationFailure, err:
|
||||
message = u'Could not delete document (%s)' % err.message
|
||||
raise OperationError(message)
|
||||
@@ -231,47 +299,54 @@ class Document(BaseDocument):
|
||||
signals.post_delete.send(self.__class__, document=self)
|
||||
|
||||
def select_related(self, max_depth=1):
|
||||
"""Handles dereferencing of :class:`~pymongo.dbref.DBRef` objects to
|
||||
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects to
|
||||
a maximum depth in order to cut down the number queries to mongodb.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
from dereference import dereference
|
||||
self._data = dereference(self._data, max_depth)
|
||||
from dereference import DeReference
|
||||
self._data = DeReference()(self._data, max_depth)
|
||||
return self
|
||||
|
||||
def reload(self):
|
||||
def reload(self, max_depth=1):
|
||||
"""Reloads all attributes from the database.
|
||||
|
||||
.. versionadded:: 0.1.2
|
||||
.. versionchanged:: 0.6 Now chainable
|
||||
"""
|
||||
id_field = self._meta['id_field']
|
||||
obj = self.__class__.objects(**{id_field: self[id_field]}).first()
|
||||
obj = self.__class__.objects(
|
||||
**{id_field: self[id_field]}
|
||||
).first().select_related(max_depth=max_depth)
|
||||
for field in self._fields:
|
||||
setattr(self, field, self._reload(field, obj[field]))
|
||||
self._changed_fields = []
|
||||
if self._dynamic:
|
||||
for name in self._dynamic_fields.keys():
|
||||
setattr(self, name, self._reload(name, obj._data[name]))
|
||||
self._changed_fields = obj._changed_fields
|
||||
return obj
|
||||
|
||||
def _reload(self, key, value):
|
||||
"""Used by :meth:`~mongoengine.Document.reload` to ensure the
|
||||
correct instance is linked to self.
|
||||
"""
|
||||
if isinstance(value, BaseDict):
|
||||
value = [(k, self._reload(k,v)) for k,v in value.items()]
|
||||
value = BaseDict(value, instance=self, name=key)
|
||||
value = [(k, self._reload(k, v)) for k, v in value.items()]
|
||||
value = BaseDict(value, self, key)
|
||||
elif isinstance(value, BaseList):
|
||||
value = [self._reload(key, v) for v in value]
|
||||
value = BaseList(value, instance=self, name=key)
|
||||
elif isinstance(value, EmbeddedDocument):
|
||||
value = BaseList(value, self, key)
|
||||
elif isinstance(value, (EmbeddedDocument, DynamicEmbeddedDocument)):
|
||||
value._changed_fields = []
|
||||
return value
|
||||
|
||||
def to_dbref(self):
|
||||
"""Returns an instance of :class:`~pymongo.dbref.DBRef` useful in
|
||||
"""Returns an instance of :class:`~bson.dbref.DBRef` useful in
|
||||
`__raw__` queries."""
|
||||
if not self.pk:
|
||||
msg = "Only saved documents can have a valid dbref"
|
||||
raise OperationError(msg)
|
||||
return pymongo.dbref.DBRef(self.__class__._get_collection_name(), self.pk)
|
||||
return DBRef(self.__class__._get_collection_name(), self.pk)
|
||||
|
||||
@classmethod
|
||||
def register_delete_rule(cls, document_cls, field_name, rule):
|
||||
@@ -285,8 +360,52 @@ class Document(BaseDocument):
|
||||
"""Drops the entire collection associated with this
|
||||
:class:`~mongoengine.Document` type from the database.
|
||||
"""
|
||||
db = _get_db()
|
||||
from mongoengine.queryset import QuerySet
|
||||
db = cls._get_db()
|
||||
db.drop_collection(cls._get_collection_name())
|
||||
QuerySet._reset_already_indexed(cls)
|
||||
|
||||
|
||||
class DynamicDocument(Document):
|
||||
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
|
||||
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
|
||||
way as an ordinary document but has expando style properties. Any data
|
||||
passed or set against the :class:`~mongoengine.DynamicDocument` that is
|
||||
not a field is automatically converted into a
|
||||
:class:`~mongoengine.DynamicField` and data can be attributed to that
|
||||
field.
|
||||
|
||||
..note::
|
||||
|
||||
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
||||
"""
|
||||
__metaclass__ = TopLevelDocumentMetaclass
|
||||
_dynamic = True
|
||||
|
||||
def __delattr__(self, *args, **kwargs):
|
||||
"""Deletes the attribute by setting to None and allowing _delta to unset
|
||||
it"""
|
||||
field_name = args[0]
|
||||
if field_name in self._dynamic_fields:
|
||||
setattr(self, field_name, None)
|
||||
else:
|
||||
super(DynamicDocument, self).__delattr__(*args, **kwargs)
|
||||
|
||||
|
||||
class DynamicEmbeddedDocument(EmbeddedDocument):
|
||||
"""A Dynamic Embedded Document class allowing flexible, expandable and
|
||||
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
|
||||
information about dynamic documents.
|
||||
"""
|
||||
|
||||
__metaclass__ = DocumentMetaclass
|
||||
_dynamic = True
|
||||
|
||||
def __delattr__(self, *args, **kwargs):
|
||||
"""Deletes the attribute by setting to None and allowing _delta to unset
|
||||
it"""
|
||||
field_name = args[0]
|
||||
setattr(self, field_name, None)
|
||||
|
||||
|
||||
class MapReduceDocument(object):
|
||||
@@ -294,7 +413,7 @@ class MapReduceDocument(object):
|
||||
|
||||
:param collection: An instance of :class:`~pymongo.Collection`
|
||||
:param key: Document/result key, often an instance of
|
||||
:class:`~pymongo.objectid.ObjectId`. If supplied as
|
||||
:class:`~bson.objectid.ObjectId`. If supplied as
|
||||
an ``ObjectId`` found in the given ``collection``,
|
||||
the object can be accessed via the ``object`` property.
|
||||
:param value: The result(s) for this key.
|
||||
|
@@ -1,27 +1,39 @@
|
||||
from base import (BaseField, ComplexBaseField, ObjectIdField,
|
||||
ValidationError, get_document)
|
||||
from queryset import DO_NOTHING
|
||||
from document import Document, EmbeddedDocument
|
||||
from connection import _get_db
|
||||
from operator import itemgetter
|
||||
|
||||
import re
|
||||
import pymongo
|
||||
import pymongo.dbref
|
||||
import pymongo.son
|
||||
import pymongo.binary
|
||||
import datetime, time
|
||||
import datetime
|
||||
import time
|
||||
import decimal
|
||||
import gridfs
|
||||
import re
|
||||
import uuid
|
||||
|
||||
from bson import Binary, DBRef, SON, ObjectId
|
||||
|
||||
from base import (BaseField, ComplexBaseField, ObjectIdField,
|
||||
ValidationError, get_document, BaseDocument)
|
||||
from queryset import DO_NOTHING, QuerySet
|
||||
from document import Document, EmbeddedDocument
|
||||
from connection import get_db, DEFAULT_CONNECTION_NAME
|
||||
from operator import itemgetter
|
||||
|
||||
|
||||
try:
|
||||
from PIL import Image, ImageOps
|
||||
except ImportError:
|
||||
Image = None
|
||||
ImageOps = None
|
||||
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
except ImportError:
|
||||
from StringIO import StringIO
|
||||
|
||||
|
||||
__all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField',
|
||||
'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField',
|
||||
'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField',
|
||||
'DecimalField', 'ComplexDateTimeField', 'URLField',
|
||||
'DecimalField', 'ComplexDateTimeField', 'URLField', 'DynamicField',
|
||||
'GenericReferenceField', 'FileField', 'BinaryField',
|
||||
'SortedListField', 'EmailField', 'GeoPointField',
|
||||
'SequenceField', 'GenericEmbeddedDocumentField']
|
||||
'SortedListField', 'EmailField', 'GeoPointField', 'ImageField',
|
||||
'SequenceField', 'UUIDField', 'GenericEmbeddedDocumentField']
|
||||
|
||||
RECURSIVE_REFERENCE_CONSTANT = 'self'
|
||||
|
||||
@@ -37,20 +49,23 @@ class StringField(BaseField):
|
||||
super(StringField, self).__init__(**kwargs)
|
||||
|
||||
def to_python(self, value):
|
||||
return unicode(value)
|
||||
if isinstance(value, unicode):
|
||||
return value
|
||||
else:
|
||||
return value.decode('utf-8')
|
||||
|
||||
def validate(self, value):
|
||||
assert isinstance(value, (str, unicode))
|
||||
if not isinstance(value, basestring):
|
||||
self.error('StringField only accepts string values')
|
||||
|
||||
if self.max_length is not None and len(value) > self.max_length:
|
||||
raise ValidationError('String value is too long')
|
||||
self.error('String value is too long')
|
||||
|
||||
if self.min_length is not None and len(value) < self.min_length:
|
||||
raise ValidationError('String value is too short')
|
||||
self.error('String value is too short')
|
||||
|
||||
if self.regex is not None and self.regex.match(value) is None:
|
||||
message = 'String value did not match validation regex'
|
||||
raise ValidationError(message)
|
||||
self.error('String value did not match validation regex')
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
return None
|
||||
@@ -100,16 +115,15 @@ class URLField(StringField):
|
||||
|
||||
def validate(self, value):
|
||||
if not URLField.URL_REGEX.match(value):
|
||||
raise ValidationError('Invalid URL: %s' % value)
|
||||
self.error('Invalid URL: %s' % value)
|
||||
|
||||
if self.verify_exists:
|
||||
import urllib2
|
||||
try:
|
||||
request = urllib2.Request(value)
|
||||
response = urllib2.urlopen(request)
|
||||
urllib2.urlopen(request)
|
||||
except Exception, e:
|
||||
message = 'This URL appears to be a broken link: %s' % e
|
||||
raise ValidationError(message)
|
||||
self.error('This URL appears to be a broken link: %s' % e)
|
||||
|
||||
|
||||
class EmailField(StringField):
|
||||
@@ -126,7 +140,7 @@ class EmailField(StringField):
|
||||
|
||||
def validate(self, value):
|
||||
if not EmailField.EMAIL_REGEX.match(value):
|
||||
raise ValidationError('Invalid Mail-address: %s' % value)
|
||||
self.error('Invalid Mail-address: %s' % value)
|
||||
|
||||
|
||||
class IntField(BaseField):
|
||||
@@ -144,15 +158,18 @@ class IntField(BaseField):
|
||||
try:
|
||||
value = int(value)
|
||||
except:
|
||||
raise ValidationError('%s could not be converted to int' % value)
|
||||
self.error('%s could not be converted to int' % value)
|
||||
|
||||
if self.min_value is not None and value < self.min_value:
|
||||
raise ValidationError('Integer value is too small')
|
||||
self.error('Integer value is too small')
|
||||
|
||||
if self.max_value is not None and value > self.max_value:
|
||||
raise ValidationError('Integer value is too large')
|
||||
self.error('Integer value is too large')
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if value is None:
|
||||
return value
|
||||
|
||||
return int(value)
|
||||
|
||||
|
||||
@@ -170,15 +187,19 @@ class FloatField(BaseField):
|
||||
def validate(self, value):
|
||||
if isinstance(value, int):
|
||||
value = float(value)
|
||||
assert isinstance(value, float)
|
||||
if not isinstance(value, float):
|
||||
self.error('FloatField only accepts float values')
|
||||
|
||||
if self.min_value is not None and value < self.min_value:
|
||||
raise ValidationError('Float value is too small')
|
||||
self.error('Float value is too small')
|
||||
|
||||
if self.max_value is not None and value > self.max_value:
|
||||
raise ValidationError('Float value is too large')
|
||||
self.error('Float value is too large')
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if value is None:
|
||||
return value
|
||||
|
||||
return float(value)
|
||||
|
||||
|
||||
@@ -207,13 +228,13 @@ class DecimalField(BaseField):
|
||||
try:
|
||||
value = decimal.Decimal(value)
|
||||
except Exception, exc:
|
||||
raise ValidationError('Could not convert to decimal: %s' % exc)
|
||||
self.error('Could not convert value to decimal: %s' % exc)
|
||||
|
||||
if self.min_value is not None and value < self.min_value:
|
||||
raise ValidationError('Decimal value is too small')
|
||||
self.error('Decimal value is too small')
|
||||
|
||||
if self.max_value is not None and value > self.max_value:
|
||||
raise ValidationError('Decimal value is too large')
|
||||
self.error('Decimal value is too large')
|
||||
|
||||
|
||||
class BooleanField(BaseField):
|
||||
@@ -226,7 +247,8 @@ class BooleanField(BaseField):
|
||||
return bool(value)
|
||||
|
||||
def validate(self, value):
|
||||
assert isinstance(value, bool)
|
||||
if not isinstance(value, bool):
|
||||
self.error('BooleanField only accepts boolean values')
|
||||
|
||||
|
||||
class DateTimeField(BaseField):
|
||||
@@ -239,7 +261,8 @@ class DateTimeField(BaseField):
|
||||
"""
|
||||
|
||||
def validate(self, value):
|
||||
assert isinstance(value, (datetime.datetime, datetime.date))
|
||||
if not isinstance(value, (datetime.datetime, datetime.date)):
|
||||
self.error(u'cannot parse date "%s"' % value)
|
||||
|
||||
def to_mongo(self, value):
|
||||
return self.prepare_query_value(None, value)
|
||||
@@ -355,13 +378,13 @@ class ComplexDateTimeField(StringField):
|
||||
return self._convert_from_string(data)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
value = self._convert_from_datetime(value)
|
||||
value = self._convert_from_datetime(value) if value else value
|
||||
return super(ComplexDateTimeField, self).__set__(instance, value)
|
||||
|
||||
def validate(self, value):
|
||||
if not isinstance(value, datetime.datetime):
|
||||
raise ValidationError('Only datetime objects may used in a \
|
||||
ComplexDateTimeField')
|
||||
self.error('Only datetime objects may used in a '
|
||||
'ComplexDateTimeField')
|
||||
|
||||
def to_python(self, value):
|
||||
return self._convert_from_string(value)
|
||||
@@ -381,8 +404,8 @@ class EmbeddedDocumentField(BaseField):
|
||||
def __init__(self, document_type, **kwargs):
|
||||
if not isinstance(document_type, basestring):
|
||||
if not issubclass(document_type, EmbeddedDocument):
|
||||
raise ValidationError('Invalid embedded document class '
|
||||
'provided to an EmbeddedDocumentField')
|
||||
self.error('Invalid embedded document class provided to an '
|
||||
'EmbeddedDocumentField')
|
||||
self.document_type_obj = document_type
|
||||
super(EmbeddedDocumentField, self).__init__(**kwargs)
|
||||
|
||||
@@ -411,8 +434,8 @@ class EmbeddedDocumentField(BaseField):
|
||||
"""
|
||||
# Using isinstance also works for subclasses of self.document
|
||||
if not isinstance(value, self.document_type):
|
||||
raise ValidationError('Invalid embedded document instance '
|
||||
'provided to an EmbeddedDocumentField')
|
||||
self.error('Invalid embedded document instance provided to an '
|
||||
'EmbeddedDocumentField')
|
||||
self.document_type.validate(value)
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
@@ -427,6 +450,9 @@ class GenericEmbeddedDocumentField(BaseField):
|
||||
:class:`~mongoengine.EmbeddedDocument` to be stored.
|
||||
|
||||
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
|
||||
|
||||
..note :: You can use the choices param to limit the acceptable
|
||||
EmbeddedDocument types
|
||||
"""
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
@@ -441,8 +467,8 @@ class GenericEmbeddedDocumentField(BaseField):
|
||||
|
||||
def validate(self, value):
|
||||
if not isinstance(value, EmbeddedDocument):
|
||||
raise ValidationError('Invalid embedded document instance '
|
||||
'provided to an GenericEmbeddedDocumentField')
|
||||
self.error('Invalid embedded document instance provided to an '
|
||||
'GenericEmbeddedDocumentField')
|
||||
|
||||
value.validate()
|
||||
|
||||
@@ -456,9 +482,56 @@ class GenericEmbeddedDocumentField(BaseField):
|
||||
return data
|
||||
|
||||
|
||||
class DynamicField(BaseField):
|
||||
"""A tryly dynamic field type capable of handling different and varying
|
||||
types of data.
|
||||
|
||||
Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
|
||||
|
||||
def to_mongo(self, value):
|
||||
"""Convert a Python type to a MongoDBcompatible type.
|
||||
"""
|
||||
|
||||
if isinstance(value, basestring):
|
||||
return value
|
||||
|
||||
if hasattr(value, 'to_mongo'):
|
||||
return value.to_mongo()
|
||||
|
||||
if not isinstance(value, (dict, list, tuple)):
|
||||
return value
|
||||
|
||||
is_list = False
|
||||
if not hasattr(value, 'items'):
|
||||
is_list = True
|
||||
value = dict([(k, v) for k, v in enumerate(value)])
|
||||
|
||||
data = {}
|
||||
for k, v in value.items():
|
||||
data[k] = self.to_mongo(v)
|
||||
|
||||
if is_list: # Convert back to a list
|
||||
value = [v for k, v in sorted(data.items(), key=itemgetter(0))]
|
||||
else:
|
||||
value = data
|
||||
return value
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
return member_name
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if isinstance(value, basestring):
|
||||
from mongoengine.fields import StringField
|
||||
return StringField().prepare_query_value(op, value)
|
||||
return self.to_mongo(value)
|
||||
|
||||
|
||||
class ListField(ComplexBaseField):
|
||||
"""A list field that wraps a standard field, allowing multiple instances
|
||||
of the field to be used as a list in the database.
|
||||
|
||||
.. note::
|
||||
Required means it cannot be empty - as the default for ListFields is []
|
||||
"""
|
||||
|
||||
# ListFields cannot be indexed with _types - MongoDB doesn't support this
|
||||
@@ -472,14 +545,15 @@ class ListField(ComplexBaseField):
|
||||
def validate(self, value):
|
||||
"""Make sure that a list of valid fields is being used.
|
||||
"""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
raise ValidationError('Only lists and tuples may be used in a '
|
||||
'list field')
|
||||
if (not isinstance(value, (list, tuple, QuerySet)) or
|
||||
isinstance(value, basestring)):
|
||||
self.error('Only lists and tuples may be used in a list field')
|
||||
super(ListField, self).validate(value)
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if self.field:
|
||||
if op in ('set', 'unset') and (not isinstance(value, basestring)
|
||||
and not isinstance(value, BaseDocument)
|
||||
and hasattr(value, '__iter__')):
|
||||
return [self.field.prepare_query_value(op, v) for v in value]
|
||||
return self.field.prepare_query_value(op, value)
|
||||
@@ -491,27 +565,40 @@ class SortedListField(ListField):
|
||||
the database in order to ensure that a sorted list is always
|
||||
retrieved.
|
||||
|
||||
.. warning::
|
||||
There is a potential race condition when handling lists. If you set /
|
||||
save the whole list then other processes trying to save the whole list
|
||||
as well could overwrite changes. The safest way to append to a list is
|
||||
to perform a push operation.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
.. versionchanged:: 0.6 - added reverse keyword
|
||||
"""
|
||||
|
||||
_ordering = None
|
||||
_order_reverse = False
|
||||
|
||||
def __init__(self, field, **kwargs):
|
||||
if 'ordering' in kwargs.keys():
|
||||
self._ordering = kwargs.pop('ordering')
|
||||
if 'reverse' in kwargs.keys():
|
||||
self._order_reverse = kwargs.pop('reverse')
|
||||
super(SortedListField, self).__init__(field, **kwargs)
|
||||
|
||||
def to_mongo(self, value):
|
||||
value = super(SortedListField, self).to_mongo(value)
|
||||
if self._ordering is not None:
|
||||
return sorted(value, key=itemgetter(self._ordering))
|
||||
return sorted(value)
|
||||
return sorted(value, key=itemgetter(self._ordering), reverse=self._order_reverse)
|
||||
return sorted(value, reverse=self._order_reverse)
|
||||
|
||||
|
||||
class DictField(ComplexBaseField):
|
||||
"""A dictionary field that wraps a standard Python dictionary. This is
|
||||
similar to an embedded document, but the structure is not defined.
|
||||
|
||||
.. note::
|
||||
Required means it cannot be empty - as the default for ListFields is []
|
||||
|
||||
.. versionadded:: 0.3
|
||||
.. versionchanged:: 0.5 - Can now handle complex / varying types of data
|
||||
"""
|
||||
@@ -519,7 +606,8 @@ class DictField(ComplexBaseField):
|
||||
def __init__(self, basecls=None, field=None, *args, **kwargs):
|
||||
self.field = field
|
||||
self.basecls = basecls or BaseField
|
||||
assert issubclass(self.basecls, BaseField)
|
||||
if not issubclass(self.basecls, BaseField):
|
||||
self.error('DictField only accepts dict values')
|
||||
kwargs.setdefault('default', lambda: {})
|
||||
super(DictField, self).__init__(*args, **kwargs)
|
||||
|
||||
@@ -527,12 +615,13 @@ class DictField(ComplexBaseField):
|
||||
"""Make sure that a list of valid fields is being used.
|
||||
"""
|
||||
if not isinstance(value, dict):
|
||||
raise ValidationError('Only dictionaries may be used in a '
|
||||
'DictField')
|
||||
self.error('Only dictionaries may be used in a DictField')
|
||||
|
||||
if any(('.' in k or '$' in k) for k in value):
|
||||
raise ValidationError('Invalid dictionary key name - keys may not '
|
||||
'contain "." or "$" characters')
|
||||
if any(k for k in value.keys() if not isinstance(k, basestring)):
|
||||
self.error('Invalid dictionary key - documents must have only string keys')
|
||||
if any(('.' in k or '$' in k) for k in value.keys()):
|
||||
self.error('Invalid dictionary key name - keys may not contain "."'
|
||||
' or "$" characters')
|
||||
super(DictField, self).validate(value)
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
@@ -559,18 +648,19 @@ class MapField(DictField):
|
||||
|
||||
def __init__(self, field=None, *args, **kwargs):
|
||||
if not isinstance(field, BaseField):
|
||||
raise ValidationError('Argument to MapField constructor must be '
|
||||
'a valid field')
|
||||
self.error('Argument to MapField constructor must be a valid '
|
||||
'field')
|
||||
super(MapField, self).__init__(field=field, *args, **kwargs)
|
||||
|
||||
|
||||
|
||||
class ReferenceField(BaseField):
|
||||
"""A reference to a document that will be automatically dereferenced on
|
||||
access (lazily).
|
||||
|
||||
Use the `reverse_delete_rule` to handle what should happen if the document
|
||||
the field is referencing is deleted.
|
||||
the field is referencing is deleted. EmbeddedDocuments, DictFields and
|
||||
MapFields do not support reverse_delete_rules and an `InvalidDocumentError`
|
||||
will be raised if trying to set on one of these Document / Field types.
|
||||
|
||||
The options are:
|
||||
|
||||
@@ -578,6 +668,18 @@ class ReferenceField(BaseField):
|
||||
* NULLIFY - Updates the reference to null.
|
||||
* CASCADE - Deletes the documents associated with the reference.
|
||||
* DENY - Prevent the deletion of the reference object.
|
||||
* PULL - Pull the reference from a :class:`~mongoengine.ListField` of references
|
||||
|
||||
Alternative syntax for registering delete rules (useful when implementing
|
||||
bi-directional delete rules)
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Bar(Document):
|
||||
content = StringField()
|
||||
foo = ReferenceField('Foo')
|
||||
|
||||
Bar.register_delete_rule(Foo, 'bar', NULLIFY)
|
||||
|
||||
.. versionchanged:: 0.5 added `reverse_delete_rule`
|
||||
"""
|
||||
@@ -590,8 +692,8 @@ class ReferenceField(BaseField):
|
||||
"""
|
||||
if not isinstance(document_type, basestring):
|
||||
if not issubclass(document_type, (Document, basestring)):
|
||||
raise ValidationError('Argument to ReferenceField constructor '
|
||||
'must be a document class or a string')
|
||||
self.error('Argument to ReferenceField constructor must be a '
|
||||
'document class or a string')
|
||||
self.document_type_obj = document_type
|
||||
self.reverse_delete_rule = reverse_delete_rule
|
||||
super(ReferenceField, self).__init__(**kwargs)
|
||||
@@ -615,14 +717,17 @@ class ReferenceField(BaseField):
|
||||
# Get value from document instance if available
|
||||
value = instance._data.get(self.name)
|
||||
# Dereference DBRefs
|
||||
if isinstance(value, (pymongo.dbref.DBRef)):
|
||||
value = _get_db().dereference(value)
|
||||
if isinstance(value, (DBRef)):
|
||||
value = self.document_type._get_db().dereference(value)
|
||||
if value is not None:
|
||||
instance._data[self.name] = self.document_type._from_son(value)
|
||||
|
||||
return super(ReferenceField, self).__get__(instance, owner)
|
||||
|
||||
def to_mongo(self, document):
|
||||
if isinstance(document, DBRef):
|
||||
return document
|
||||
|
||||
id_field_name = self.document_type._meta['id_field']
|
||||
id_field = self.document_type._fields[id_field_name]
|
||||
|
||||
@@ -630,25 +735,28 @@ class ReferenceField(BaseField):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
id_ = document.id
|
||||
if id_ is None:
|
||||
raise ValidationError('You can only reference documents once '
|
||||
'they have been saved to the database')
|
||||
self.error('You can only reference documents once they have'
|
||||
' been saved to the database')
|
||||
else:
|
||||
id_ = document
|
||||
|
||||
id_ = id_field.to_mongo(id_)
|
||||
collection = self.document_type._get_collection_name()
|
||||
return pymongo.dbref.DBRef(collection, id_)
|
||||
return DBRef(collection, id_)
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
return self.to_mongo(value)
|
||||
|
||||
def validate(self, value):
|
||||
assert isinstance(value, (self.document_type, pymongo.dbref.DBRef))
|
||||
if not isinstance(value, (self.document_type, DBRef)):
|
||||
self.error('A ReferenceField only accepts DBRef')
|
||||
|
||||
if isinstance(value, Document) and value.id is None:
|
||||
raise ValidationError('You can only reference documents once '
|
||||
'they have been saved to the database')
|
||||
|
||||
self.error('You can only reference documents once they have been '
|
||||
'saved to the database')
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
return self.document_type._fields.get(member_name)
|
||||
@@ -661,6 +769,8 @@ class GenericReferenceField(BaseField):
|
||||
..note :: Any documents used as a generic reference must be registered in the
|
||||
document registry. Importing the model will automatically register it.
|
||||
|
||||
..note :: You can use the choices param to limit the acceptable Document types
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
|
||||
@@ -669,24 +779,24 @@ class GenericReferenceField(BaseField):
|
||||
return self
|
||||
|
||||
value = instance._data.get(self.name)
|
||||
if isinstance(value, (dict, pymongo.son.SON)):
|
||||
if isinstance(value, (dict, SON)):
|
||||
instance._data[self.name] = self.dereference(value)
|
||||
|
||||
return super(GenericReferenceField, self).__get__(instance, owner)
|
||||
|
||||
def validate(self, value):
|
||||
if not isinstance(value, (Document, pymongo.dbref.DBRef)):
|
||||
raise ValidationError('GenericReferences can only contain documents')
|
||||
if not isinstance(value, (Document, DBRef)):
|
||||
self.error('GenericReferences can only contain documents')
|
||||
|
||||
# We need the id from the saved object to create the DBRef
|
||||
if isinstance(value, Document) and value.id is None:
|
||||
raise ValidationError('You can only reference documents once '
|
||||
'they have been saved to the database')
|
||||
self.error('You can only reference documents once they have been'
|
||||
' saved to the database')
|
||||
|
||||
def dereference(self, value):
|
||||
doc_cls = get_document(value['_cls'])
|
||||
reference = value['_ref']
|
||||
doc = _get_db().dereference(reference)
|
||||
doc = doc_cls._get_db().dereference(reference)
|
||||
if doc is not None:
|
||||
doc = doc_cls._from_son(doc)
|
||||
return doc
|
||||
@@ -695,6 +805,9 @@ class GenericReferenceField(BaseField):
|
||||
if document is None:
|
||||
return None
|
||||
|
||||
if isinstance(document, (dict, SON)):
|
||||
return document
|
||||
|
||||
id_field_name = document.__class__._meta['id_field']
|
||||
id_field = document.__class__._fields[id_field_name]
|
||||
|
||||
@@ -702,17 +815,20 @@ class GenericReferenceField(BaseField):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
id_ = document.id
|
||||
if id_ is None:
|
||||
raise ValidationError('You can only reference documents once '
|
||||
'they have been saved to the database')
|
||||
self.error('You can only reference documents once they have'
|
||||
' been saved to the database')
|
||||
else:
|
||||
id_ = document
|
||||
|
||||
id_ = id_field.to_mongo(id_)
|
||||
collection = document._get_collection_name()
|
||||
ref = pymongo.dbref.DBRef(collection, id_)
|
||||
ref = DBRef(collection, id_)
|
||||
return {'_cls': document._class_name, '_ref': ref}
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
return self.to_mongo(value)
|
||||
|
||||
|
||||
@@ -725,17 +841,17 @@ class BinaryField(BaseField):
|
||||
super(BinaryField, self).__init__(**kwargs)
|
||||
|
||||
def to_mongo(self, value):
|
||||
return pymongo.binary.Binary(value)
|
||||
return Binary(value)
|
||||
|
||||
def to_python(self, value):
|
||||
# Returns str not unicode as this is binary data
|
||||
return str(value)
|
||||
return "%s" % value
|
||||
|
||||
def validate(self, value):
|
||||
assert isinstance(value, str)
|
||||
if not isinstance(value, basestring):
|
||||
self.error('BinaryField only accepts string values')
|
||||
|
||||
if self.max_bytes is not None and len(value) > self.max_bytes:
|
||||
raise ValidationError('Binary value is too long')
|
||||
self.error('Binary value is too long')
|
||||
|
||||
|
||||
class GridFSError(Exception):
|
||||
@@ -747,17 +863,28 @@ class GridFSProxy(object):
|
||||
|
||||
.. versionadded:: 0.4
|
||||
.. versionchanged:: 0.5 - added optional size param to read
|
||||
.. versionchanged:: 0.6 - added collection name param
|
||||
"""
|
||||
|
||||
def __init__(self, grid_id=None, key=None, instance=None):
|
||||
self.fs = gridfs.GridFS(_get_db()) # Filesystem instance
|
||||
self.newfile = None # Used for partial writes
|
||||
self.grid_id = grid_id # Store GridFS id for file
|
||||
self.gridout = None
|
||||
_fs = None
|
||||
|
||||
def __init__(self, grid_id=None, key=None,
|
||||
instance=None,
|
||||
db_alias=DEFAULT_CONNECTION_NAME,
|
||||
collection_name='fs'):
|
||||
self.grid_id = grid_id # Store GridFS id for file
|
||||
self.key = key
|
||||
self.instance = instance
|
||||
self.db_alias = db_alias
|
||||
self.collection_name = collection_name
|
||||
self.newfile = None # Used for partial writes
|
||||
self.gridout = None
|
||||
|
||||
def __getattr__(self, name):
|
||||
attrs = ('_fs', 'grid_id', 'key', 'instance', 'db_alias',
|
||||
'collection_name', 'newfile', 'gridout')
|
||||
if name in attrs:
|
||||
return self.__getattribute__(name)
|
||||
obj = self.get()
|
||||
if name in dir(obj):
|
||||
return getattr(obj, name)
|
||||
@@ -769,6 +896,24 @@ class GridFSProxy(object):
|
||||
def __nonzero__(self):
|
||||
return bool(self.grid_id)
|
||||
|
||||
def __getstate__(self):
|
||||
self_dict = self.__dict__
|
||||
self_dict['_fs'] = None
|
||||
return self_dict
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self.grid_id)
|
||||
|
||||
def __cmp__(self, other):
|
||||
return cmp((self.grid_id, self.collection_name, self.db_alias),
|
||||
(other.grid_id, other.collection_name, other.db_alias))
|
||||
|
||||
@property
|
||||
def fs(self):
|
||||
if not self._fs:
|
||||
self._fs = gridfs.GridFS(get_db(self.db_alias), self.collection_name)
|
||||
return self._fs
|
||||
|
||||
def get(self, id=None):
|
||||
if id:
|
||||
self.grid_id = id
|
||||
@@ -809,10 +954,14 @@ class GridFSProxy(object):
|
||||
self.newfile.writelines(lines)
|
||||
|
||||
def read(self, size=-1):
|
||||
try:
|
||||
return self.get().read(size)
|
||||
except:
|
||||
gridout = self.get()
|
||||
if gridout is None:
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
return gridout.read(size)
|
||||
except:
|
||||
return ""
|
||||
|
||||
def delete(self):
|
||||
# Delete file from GridFS, FileField still remains
|
||||
@@ -840,10 +989,16 @@ class FileField(BaseField):
|
||||
|
||||
.. versionadded:: 0.4
|
||||
.. versionchanged:: 0.5 added optional size param for read
|
||||
.. versionchanged:: 0.6 added db_alias for multidb support
|
||||
"""
|
||||
proxy_class = GridFSProxy
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
def __init__(self,
|
||||
db_alias=DEFAULT_CONNECTION_NAME,
|
||||
collection_name="fs", **kwargs):
|
||||
super(FileField, self).__init__(**kwargs)
|
||||
self.collection_name = collection_name
|
||||
self.db_alias = db_alias
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
if instance is None:
|
||||
@@ -851,17 +1006,20 @@ class FileField(BaseField):
|
||||
|
||||
# Check if a file already exists for this model
|
||||
grid_file = instance._data.get(self.name)
|
||||
self.grid_file = grid_file
|
||||
if isinstance(self.grid_file, GridFSProxy):
|
||||
if not self.grid_file.key:
|
||||
self.grid_file.key = self.name
|
||||
self.grid_file.instance = instance
|
||||
return self.grid_file
|
||||
return GridFSProxy(key=self.name, instance=instance)
|
||||
if not isinstance(grid_file, self.proxy_class):
|
||||
grid_file = self.proxy_class(key=self.name, instance=instance,
|
||||
db_alias=self.db_alias,
|
||||
collection_name=self.collection_name)
|
||||
instance._data[self.name] = grid_file
|
||||
|
||||
if not grid_file.key:
|
||||
grid_file.key = self.name
|
||||
grid_file.instance = instance
|
||||
return grid_file
|
||||
|
||||
def __set__(self, instance, value):
|
||||
key = self.name
|
||||
if isinstance(value, file) or isinstance(value, str):
|
||||
if (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or isinstance(value, basestring):
|
||||
# using "FileField() = file/string" notation
|
||||
grid_file = instance._data.get(self.name)
|
||||
# If a file already exists, delete it
|
||||
@@ -874,7 +1032,8 @@ class FileField(BaseField):
|
||||
grid_file.put(value)
|
||||
else:
|
||||
# Create a new proxy object as we don't already have one
|
||||
instance._data[key] = GridFSProxy(key=key, instance=instance)
|
||||
instance._data[key] = self.proxy_class(key=key, instance=instance,
|
||||
collection_name=self.collection_name)
|
||||
instance._data[key].put(value)
|
||||
else:
|
||||
instance._data[key] = value
|
||||
@@ -883,18 +1042,181 @@ class FileField(BaseField):
|
||||
|
||||
def to_mongo(self, value):
|
||||
# Store the GridFS file id in MongoDB
|
||||
if isinstance(value, GridFSProxy) and value.grid_id is not None:
|
||||
if isinstance(value, self.proxy_class) and value.grid_id is not None:
|
||||
return value.grid_id
|
||||
return None
|
||||
|
||||
def to_python(self, value):
|
||||
if value is not None:
|
||||
return GridFSProxy(value)
|
||||
return self.proxy_class(value,
|
||||
collection_name=self.collection_name,
|
||||
db_alias=self.db_alias)
|
||||
|
||||
def validate(self, value):
|
||||
if value.grid_id is not None:
|
||||
assert isinstance(value, GridFSProxy)
|
||||
assert isinstance(value.grid_id, pymongo.objectid.ObjectId)
|
||||
if not isinstance(value, self.proxy_class):
|
||||
self.error('FileField only accepts GridFSProxy values')
|
||||
if not isinstance(value.grid_id, ObjectId):
|
||||
self.error('Invalid GridFSProxy value')
|
||||
|
||||
|
||||
class ImageGridFsProxy(GridFSProxy):
|
||||
"""
|
||||
Proxy for ImageField
|
||||
|
||||
versionadded: 0.6
|
||||
"""
|
||||
def put(self, file_obj, **kwargs):
|
||||
"""
|
||||
Insert a image in database
|
||||
applying field properties (size, thumbnail_size)
|
||||
"""
|
||||
field = self.instance._fields[self.key]
|
||||
|
||||
try:
|
||||
img = Image.open(file_obj)
|
||||
except:
|
||||
raise ValidationError('Invalid image')
|
||||
|
||||
if (field.size and (img.size[0] > field.size['width'] or
|
||||
img.size[1] > field.size['height'])):
|
||||
size = field.size
|
||||
|
||||
if size['force']:
|
||||
img = ImageOps.fit(img,
|
||||
(size['width'],
|
||||
size['height']),
|
||||
Image.ANTIALIAS)
|
||||
else:
|
||||
img.thumbnail((size['width'],
|
||||
size['height']),
|
||||
Image.ANTIALIAS)
|
||||
|
||||
thumbnail = None
|
||||
if field.thumbnail_size:
|
||||
size = field.thumbnail_size
|
||||
|
||||
if size['force']:
|
||||
thumbnail = ImageOps.fit(img,
|
||||
(size['width'],
|
||||
size['height']),
|
||||
Image.ANTIALIAS)
|
||||
else:
|
||||
thumbnail = img.copy()
|
||||
thumbnail.thumbnail((size['width'],
|
||||
size['height']),
|
||||
Image.ANTIALIAS)
|
||||
|
||||
if thumbnail:
|
||||
thumb_id = self._put_thumbnail(thumbnail,
|
||||
img.format)
|
||||
else:
|
||||
thumb_id = None
|
||||
|
||||
w, h = img.size
|
||||
|
||||
io = StringIO()
|
||||
img.save(io, img.format)
|
||||
io.seek(0)
|
||||
|
||||
return super(ImageGridFsProxy, self).put(io,
|
||||
width=w,
|
||||
height=h,
|
||||
format=img.format,
|
||||
thumbnail_id=thumb_id,
|
||||
**kwargs)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
#deletes thumbnail
|
||||
out = self.get()
|
||||
if out and out.thumbnail_id:
|
||||
self.fs.delete(out.thumbnail_id)
|
||||
|
||||
return super(ImageGridFsProxy, self).delete(*args, **kwargs)
|
||||
|
||||
def _put_thumbnail(self, thumbnail, format, **kwargs):
|
||||
w, h = thumbnail.size
|
||||
|
||||
io = StringIO()
|
||||
thumbnail.save(io, format)
|
||||
io.seek(0)
|
||||
|
||||
return self.fs.put(io, width=w,
|
||||
height=h,
|
||||
format=format,
|
||||
**kwargs)
|
||||
@property
|
||||
def size(self):
|
||||
"""
|
||||
return a width, height of image
|
||||
"""
|
||||
out = self.get()
|
||||
if out:
|
||||
return out.width, out.height
|
||||
|
||||
@property
|
||||
def format(self):
|
||||
"""
|
||||
return format of image
|
||||
ex: PNG, JPEG, GIF, etc
|
||||
"""
|
||||
out = self.get()
|
||||
if out:
|
||||
return out.format
|
||||
|
||||
@property
|
||||
def thumbnail(self):
|
||||
"""
|
||||
return a gridfs.grid_file.GridOut
|
||||
representing a thumbnail of Image
|
||||
"""
|
||||
out = self.get()
|
||||
if out and out.thumbnail_id:
|
||||
return self.fs.get(out.thumbnail_id)
|
||||
|
||||
def write(self, *args, **kwargs):
|
||||
raise RuntimeError("Please use \"put\" method instead")
|
||||
|
||||
def writelines(self, *args, **kwargs):
|
||||
raise RuntimeError("Please use \"put\" method instead")
|
||||
|
||||
|
||||
class ImproperlyConfigured(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ImageField(FileField):
|
||||
"""
|
||||
A Image File storage field.
|
||||
|
||||
@size (width, height, force):
|
||||
max size to store images, if larger will be automatically resized
|
||||
ex: size=(800, 600, True)
|
||||
|
||||
@thumbnail (width, height, force):
|
||||
size to generate a thumbnail
|
||||
|
||||
.. versionadded:: 0.6
|
||||
"""
|
||||
proxy_class = ImageGridFsProxy
|
||||
|
||||
def __init__(self, size=None, thumbnail_size=None,
|
||||
collection_name='images', **kwargs):
|
||||
if not Image:
|
||||
raise ImproperlyConfigured("PIL library was not found")
|
||||
|
||||
params_size = ('width', 'height', 'force')
|
||||
extra_args = dict(size=size, thumbnail_size=thumbnail_size)
|
||||
for att_name, att in extra_args.items():
|
||||
if att and (isinstance(att, tuple) or isinstance(att, list)):
|
||||
setattr(self, att_name, dict(
|
||||
map(None, params_size, att)))
|
||||
else:
|
||||
setattr(self, att_name, None)
|
||||
|
||||
super(ImageField, self).__init__(
|
||||
collection_name=collection_name,
|
||||
**kwargs)
|
||||
|
||||
|
||||
class GeoPointField(BaseField):
|
||||
@@ -909,14 +1231,14 @@ class GeoPointField(BaseField):
|
||||
"""Make sure that a geo-value is of type (x, y)
|
||||
"""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
raise ValidationError('GeoPointField can only accept tuples or '
|
||||
'lists of (x, y)')
|
||||
self.error('GeoPointField can only accept tuples or lists '
|
||||
'of (x, y)')
|
||||
|
||||
if not len(value) == 2:
|
||||
raise ValidationError('Value must be a two-dimensional point.')
|
||||
self.error('Value must be a two-dimensional point')
|
||||
if (not isinstance(value[0], (float, int)) and
|
||||
not isinstance(value[1], (float, int))):
|
||||
raise ValidationError('Both values in point must be float or int.')
|
||||
self.error('Both values in point must be float or int')
|
||||
|
||||
|
||||
class SequenceField(IntField):
|
||||
@@ -932,8 +1254,9 @@ class SequenceField(IntField):
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
def __init__(self, collection_name=None, *args, **kwargs):
|
||||
def __init__(self, collection_name=None, db_alias = None, *args, **kwargs):
|
||||
self.collection_name = collection_name or 'mongoengine.counters'
|
||||
self.db_alias = db_alias or DEFAULT_CONNECTION_NAME
|
||||
return super(SequenceField, self).__init__(*args, **kwargs)
|
||||
|
||||
def generate_new_value(self):
|
||||
@@ -942,7 +1265,7 @@ class SequenceField(IntField):
|
||||
"""
|
||||
sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(),
|
||||
self.name)
|
||||
collection = _get_db()[self.collection_name]
|
||||
collection = get_db(alias = self.db_alias )[self.collection_name]
|
||||
counter = collection.find_and_modify(query={"_id": sequence_id},
|
||||
update={"$inc": {"next": 1}},
|
||||
new=True,
|
||||
@@ -977,3 +1300,30 @@ class SequenceField(IntField):
|
||||
if value is None:
|
||||
value = self.generate_new_value()
|
||||
return value
|
||||
|
||||
|
||||
class UUIDField(BaseField):
|
||||
"""A UUID field.
|
||||
|
||||
.. versionadded:: 0.6
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(UUIDField, self).__init__(**kwargs)
|
||||
|
||||
def to_python(self, value):
|
||||
if not isinstance(value, basestring):
|
||||
value = unicode(value)
|
||||
return uuid.UUID(value)
|
||||
|
||||
def to_mongo(self, value):
|
||||
return unicode(value)
|
||||
|
||||
def validate(self, value):
|
||||
if not isinstance(value, uuid.UUID):
|
||||
if not isinstance(value, basestring):
|
||||
value = str(value)
|
||||
try:
|
||||
value = uuid.UUID(value)
|
||||
except Exception, exc:
|
||||
self.error('Could not convert to UUID: %s' % exc)
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -42,3 +42,5 @@ pre_save = _signals.signal('pre_save')
|
||||
post_save = _signals.signal('post_save')
|
||||
pre_delete = _signals.signal('pre_delete')
|
||||
post_delete = _signals.signal('post_delete')
|
||||
pre_bulk_insert = _signals.signal('pre_bulk_insert')
|
||||
post_bulk_insert = _signals.signal('post_bulk_insert')
|
||||
|
@@ -1,4 +1,4 @@
|
||||
from mongoengine.connection import _get_db
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
|
||||
class query_counter(object):
|
||||
@@ -7,7 +7,7 @@ class query_counter(object):
|
||||
def __init__(self):
|
||||
""" Construct the query_counter. """
|
||||
self.counter = 0
|
||||
self.db = _get_db()
|
||||
self.db = get_db()
|
||||
|
||||
def __enter__(self):
|
||||
""" On every with block we need to drop the profile collection. """
|
||||
|
54
python-mongoengine.spec
Normal file
54
python-mongoengine.spec
Normal file
@@ -0,0 +1,54 @@
|
||||
# sitelib for noarch packages, sitearch for others (remove the unneeded one)
|
||||
%{!?python_sitelib: %global python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")}
|
||||
%{!?python_sitearch: %global python_sitearch %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(1))")}
|
||||
|
||||
%define srcname mongoengine
|
||||
|
||||
Name: python-%{srcname}
|
||||
Version: 0.6.16
|
||||
Release: 1%{?dist}
|
||||
Summary: A Python Document-Object Mapper for working with MongoDB
|
||||
|
||||
Group: Development/Libraries
|
||||
License: MIT
|
||||
URL: https://github.com/MongoEngine/mongoengine
|
||||
Source0: %{srcname}-%{version}.tar.bz2
|
||||
|
||||
BuildRequires: python-devel
|
||||
BuildRequires: python-setuptools
|
||||
|
||||
Requires: mongodb
|
||||
Requires: pymongo
|
||||
Requires: python-blinker
|
||||
Requires: python-imaging
|
||||
|
||||
|
||||
%description
|
||||
MongoEngine is an ORM-like layer on top of PyMongo.
|
||||
|
||||
%prep
|
||||
%setup -q -n %{srcname}-%{version}
|
||||
|
||||
|
||||
%build
|
||||
# Remove CFLAGS=... for noarch packages (unneeded)
|
||||
CFLAGS="$RPM_OPT_FLAGS" %{__python} setup.py build
|
||||
|
||||
|
||||
%install
|
||||
rm -rf $RPM_BUILD_ROOT
|
||||
%{__python} setup.py install -O1 --skip-build --root $RPM_BUILD_ROOT
|
||||
|
||||
%clean
|
||||
rm -rf $RPM_BUILD_ROOT
|
||||
|
||||
%files
|
||||
%defattr(-,root,root,-)
|
||||
%doc docs AUTHORS LICENSE README.rst
|
||||
# For noarch packages: sitelib
|
||||
%{python_sitelib}/*
|
||||
# For arch-specific packages: sitearch
|
||||
# %{python_sitearch}/*
|
||||
|
||||
%changelog
|
||||
* See: http://readthedocs.org/docs/mongoengine-odm/en/latest/changelog.html
|
1
requirements.txt
Normal file
1
requirements.txt
Normal file
@@ -0,0 +1 @@
|
||||
pymongo
|
13
setup.cfg
Normal file
13
setup.cfg
Normal file
@@ -0,0 +1,13 @@
|
||||
[aliases]
|
||||
test = nosetests
|
||||
|
||||
[nosetests]
|
||||
verbosity = 2
|
||||
detailed-errors = 1
|
||||
#with-coverage = 1
|
||||
#cover-erase = 1
|
||||
#cover-html = 1
|
||||
#cover-html-dir = ../htmlcov
|
||||
#cover-package = mongoengine
|
||||
where = tests
|
||||
#tests = test_bugfix.py
|
9
setup.py
9
setup.py
@@ -35,10 +35,12 @@ CLASSIFIERS = [
|
||||
|
||||
setup(name='mongoengine',
|
||||
version=VERSION,
|
||||
packages=find_packages(),
|
||||
packages=find_packages(exclude=('tests',)),
|
||||
author='Harry Marr',
|
||||
author_email='harry.marr@{nospam}gmail.com',
|
||||
url='http://hmarr.com/mongoengine/',
|
||||
maintainer="Ross Lawley",
|
||||
maintainer_email="ross.lawley@{nospam}gmail.com",
|
||||
url='http://mongoengine.org/',
|
||||
license='MIT',
|
||||
include_package_data=True,
|
||||
description=DESCRIPTION,
|
||||
@@ -46,6 +48,5 @@ setup(name='mongoengine',
|
||||
platforms=['any'],
|
||||
classifiers=CLASSIFIERS,
|
||||
install_requires=['pymongo'],
|
||||
test_suite='tests',
|
||||
tests_require=['blinker', 'django==1.3']
|
||||
tests_require=['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL']
|
||||
)
|
||||
|
@@ -1,9 +1,6 @@
|
||||
from datetime import datetime
|
||||
import pymongo
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.base import BaseField
|
||||
from mongoengine.connection import _get_db
|
||||
|
||||
|
||||
class PickleEmbedded(EmbeddedDocument):
|
||||
@@ -15,6 +12,7 @@ class PickleTest(Document):
|
||||
string = StringField(choices=(('One', '1'), ('Two', '2')))
|
||||
embedded = EmbeddedDocumentField(PickleEmbedded)
|
||||
lists = ListField(StringField())
|
||||
photo = FileField()
|
||||
|
||||
|
||||
class Mixin(object):
|
||||
@@ -22,4 +20,4 @@ class Mixin(object):
|
||||
|
||||
|
||||
class Base(Document):
|
||||
pass
|
||||
meta = {'allow_inheritance': True}
|
||||
|
BIN
tests/mongoengine.png
Normal file
BIN
tests/mongoengine.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 8.1 KiB |
98
tests/test_connection.py
Normal file
98
tests/test_connection.py
Normal file
@@ -0,0 +1,98 @@
|
||||
import datetime
|
||||
import pymongo
|
||||
import unittest
|
||||
|
||||
import mongoengine.connection
|
||||
|
||||
from bson.tz_util import utc
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db, get_connection, ConnectionError
|
||||
|
||||
|
||||
class ConnectionTest(unittest.TestCase):
|
||||
|
||||
def tearDown(self):
|
||||
mongoengine.connection._connection_settings = {}
|
||||
mongoengine.connection._connections = {}
|
||||
mongoengine.connection._dbs = {}
|
||||
|
||||
def test_connect(self):
|
||||
"""Ensure that the connect() method works properly.
|
||||
"""
|
||||
connect('mongoenginetest')
|
||||
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
|
||||
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
|
||||
connect('mongoenginetest2', alias='testdb')
|
||||
conn = get_connection('testdb')
|
||||
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
|
||||
|
||||
def test_connect_uri(self):
|
||||
"""Ensure that the connect() method works properly with uri's
|
||||
"""
|
||||
c = connect(db='mongoenginetest', alias='admin')
|
||||
c.admin.system.users.remove({})
|
||||
c.mongoenginetest.system.users.remove({})
|
||||
|
||||
c.admin.add_user("admin", "password")
|
||||
c.admin.authenticate("admin", "password")
|
||||
c.mongoenginetest.add_user("username", "password")
|
||||
|
||||
self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost')
|
||||
|
||||
connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest')
|
||||
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
|
||||
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
|
||||
def test_register_connection(self):
|
||||
"""Ensure that connections with different aliases may be registered.
|
||||
"""
|
||||
register_connection('testdb', 'mongoenginetest2')
|
||||
|
||||
self.assertRaises(ConnectionError, get_connection)
|
||||
conn = get_connection('testdb')
|
||||
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
|
||||
|
||||
db = get_db('testdb')
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertEqual(db.name, 'mongoenginetest2')
|
||||
|
||||
def test_connection_kwargs(self):
|
||||
"""Ensure that connection kwargs get passed to pymongo.
|
||||
"""
|
||||
connect('mongoenginetest', alias='t1', tz_aware=True)
|
||||
conn = get_connection('t1')
|
||||
|
||||
self.assertTrue(conn.tz_aware)
|
||||
|
||||
connect('mongoenginetest2', alias='t2')
|
||||
conn = get_connection('t2')
|
||||
self.assertFalse(conn.tz_aware)
|
||||
|
||||
def test_datetime(self):
|
||||
connect('mongoenginetest', tz_aware=True)
|
||||
d = datetime.datetime(2010, 5, 5, tzinfo=utc)
|
||||
|
||||
class DateDoc(Document):
|
||||
the_date = DateTimeField(required=True)
|
||||
|
||||
DateDoc.drop_collection()
|
||||
DateDoc(the_date=d).save()
|
||||
|
||||
date_doc = DateDoc.objects.first()
|
||||
self.assertEqual(d, date_doc.the_date)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@@ -1,7 +1,7 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import _get_db
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.tests import query_counter
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = _get_db()
|
||||
self.db = get_db()
|
||||
|
||||
def test_list_item_dereference(self):
|
||||
"""Ensure that DBRef items in ListFields are dereferenced.
|
||||
@@ -188,6 +188,51 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
|
||||
|
||||
def test_circular_tree_reference(self):
|
||||
"""Ensure you can handle circular references with more than one level
|
||||
"""
|
||||
class Other(EmbeddedDocument):
|
||||
name = StringField()
|
||||
friends = ListField(ReferenceField('Person'))
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
other = EmbeddedDocumentField(Other, default=lambda: Other())
|
||||
|
||||
def __repr__(self):
|
||||
return "<Person: %s>" % self.name
|
||||
|
||||
Person.drop_collection()
|
||||
paul = Person(name="Paul")
|
||||
paul.save()
|
||||
maria = Person(name="Maria")
|
||||
maria.save()
|
||||
julia = Person(name='Julia')
|
||||
julia.save()
|
||||
anna = Person(name='Anna')
|
||||
anna.save()
|
||||
|
||||
paul.other.friends = [maria, julia, anna]
|
||||
paul.other.name = "Paul's friends"
|
||||
paul.save()
|
||||
|
||||
maria.other.friends = [paul, julia, anna]
|
||||
maria.other.name = "Maria's friends"
|
||||
maria.save()
|
||||
|
||||
julia.other.friends = [paul, maria, anna]
|
||||
julia.other.name = "Julia's friends"
|
||||
julia.save()
|
||||
|
||||
anna.other.friends = [paul, maria, julia]
|
||||
anna.other.name = "Anna's friends"
|
||||
anna.save()
|
||||
|
||||
self.assertEquals(
|
||||
"[<Person: Paul>, <Person: Maria>, <Person: Julia>, <Person: Anna>]",
|
||||
"%s" % Person.objects()
|
||||
)
|
||||
|
||||
def test_generic_reference(self):
|
||||
|
||||
class UserA(Document):
|
||||
@@ -715,3 +760,106 @@ class FieldTest(unittest.TestCase):
|
||||
UserB.drop_collection()
|
||||
UserC.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
def test_multidirectional_lists(self):
|
||||
|
||||
class Asset(Document):
|
||||
name = StringField(max_length=250, required=True)
|
||||
parent = GenericReferenceField(default=None)
|
||||
parents = ListField(GenericReferenceField())
|
||||
children = ListField(GenericReferenceField())
|
||||
|
||||
Asset.drop_collection()
|
||||
|
||||
root = Asset(name='', path="/", title="Site Root")
|
||||
root.save()
|
||||
|
||||
company = Asset(name='company', title='Company', parent=root, parents=[root])
|
||||
company.save()
|
||||
|
||||
root.children = [company]
|
||||
root.save()
|
||||
|
||||
root = root.reload()
|
||||
self.assertEquals(root.children, [company])
|
||||
self.assertEquals(company.parents, [root])
|
||||
|
||||
def test_dict_in_dbref_instance(self):
|
||||
|
||||
class Person(Document):
|
||||
name = StringField(max_length=250, required=True)
|
||||
|
||||
class Room(Document):
|
||||
number = StringField(max_length=250, required=True)
|
||||
staffs_with_position = ListField(DictField())
|
||||
|
||||
Person.drop_collection()
|
||||
Room.drop_collection()
|
||||
|
||||
bob = Person.objects.create(name='Bob')
|
||||
bob.save()
|
||||
sarah = Person.objects.create(name='Sarah')
|
||||
sarah.save()
|
||||
|
||||
room_101 = Room.objects.create(number="101")
|
||||
room_101.staffs_with_position = [
|
||||
{'position_key': 'window', 'staff': sarah},
|
||||
{'position_key': 'door', 'staff': bob.to_dbref()}]
|
||||
room_101.save()
|
||||
|
||||
room = Room.objects.first().select_related()
|
||||
self.assertEquals(room.staffs_with_position[0]['staff'], sarah)
|
||||
self.assertEquals(room.staffs_with_position[1]['staff'], bob)
|
||||
|
||||
def test_document_reload_no_inheritance(self):
|
||||
class Foo(Document):
|
||||
meta = {'allow_inheritance': False}
|
||||
bar = ReferenceField('Bar')
|
||||
baz = ReferenceField('Baz')
|
||||
|
||||
class Bar(Document):
|
||||
meta = {'allow_inheritance': False}
|
||||
msg = StringField(required=True, default='Blammo!')
|
||||
|
||||
class Baz(Document):
|
||||
meta = {'allow_inheritance': False}
|
||||
msg = StringField(required=True, default='Kaboom!')
|
||||
|
||||
Foo.drop_collection()
|
||||
Bar.drop_collection()
|
||||
Baz.drop_collection()
|
||||
|
||||
bar = Bar()
|
||||
bar.save()
|
||||
baz = Baz()
|
||||
baz.save()
|
||||
foo = Foo()
|
||||
foo.bar = bar
|
||||
foo.baz = baz
|
||||
foo.save()
|
||||
foo.reload()
|
||||
|
||||
self.assertEquals(type(foo.bar), Bar)
|
||||
self.assertEquals(type(foo.baz), Baz)
|
||||
|
||||
def test_list_lookup_not_checked_in_map(self):
|
||||
"""Ensure we dereference list data correctly
|
||||
"""
|
||||
class Comment(Document):
|
||||
id = IntField(primary_key=True)
|
||||
text = StringField()
|
||||
|
||||
class Message(Document):
|
||||
id = IntField(primary_key=True)
|
||||
comments = ListField(ReferenceField(Comment))
|
||||
|
||||
Comment.drop_collection()
|
||||
Message.drop_collection()
|
||||
|
||||
c1 = Comment(id=0, text='zero').save()
|
||||
c2 = Comment(id=1, text='one').save()
|
||||
Message(id=1, comments=[c1, c2]).save()
|
||||
|
||||
msg = Message.objects.get(id=1)
|
||||
self.assertEqual(0, msg.comments[0].id)
|
||||
self.assertEqual(1, msg.comments[1].id)
|
@@ -8,8 +8,14 @@ from mongoengine.django.shortcuts import get_document_or_404
|
||||
from django.http import Http404
|
||||
from django.template import Context, Template
|
||||
from django.conf import settings
|
||||
from django.core.paginator import Paginator
|
||||
|
||||
settings.configure()
|
||||
|
||||
from django.contrib.sessions.tests import SessionTestsMixin
|
||||
from mongoengine.django.sessions import SessionStore, MongoSession
|
||||
|
||||
|
||||
class QuerySetTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
@@ -67,3 +73,38 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234')
|
||||
self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk))
|
||||
|
||||
def test_pagination(self):
|
||||
"""Ensure that Pagination works as expected
|
||||
"""
|
||||
class Page(Document):
|
||||
name = StringField()
|
||||
|
||||
Page.drop_collection()
|
||||
|
||||
for i in xrange(1, 11):
|
||||
Page(name=str(i)).save()
|
||||
|
||||
paginator = Paginator(Page.objects.all(), 2)
|
||||
|
||||
t = Template("{% for i in page.object_list %}{{ i.name }}:{% endfor %}")
|
||||
for p in paginator.page_range:
|
||||
d = {"page": paginator.page(p)}
|
||||
end = p * 2
|
||||
start = end - 1
|
||||
self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end))
|
||||
|
||||
|
||||
|
||||
class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase):
|
||||
backend = SessionStore
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
MongoSession.drop_collection()
|
||||
super(MongoDBSessionTest, self).setUp()
|
||||
|
||||
def test_first_save(self):
|
||||
session = SessionStore()
|
||||
session['test'] = True
|
||||
session.save()
|
||||
self.assertTrue('test' in session)
|
File diff suppressed because it is too large
Load Diff
502
tests/test_dynamic_document.py
Normal file
502
tests/test_dynamic_document.py
Normal file
@@ -0,0 +1,502 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
|
||||
class DynamicDocTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
|
||||
class Person(DynamicDocument):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
self.Person = Person
|
||||
|
||||
def test_simple_dynamic_document(self):
|
||||
"""Ensures simple dynamic documents are saved correctly"""
|
||||
|
||||
p = self.Person()
|
||||
p.name = "James"
|
||||
p.age = 34
|
||||
|
||||
self.assertEquals(p.to_mongo(),
|
||||
{"_types": ["Person"], "_cls": "Person",
|
||||
"name": "James", "age": 34}
|
||||
)
|
||||
|
||||
p.save()
|
||||
|
||||
self.assertEquals(self.Person.objects.first().age, 34)
|
||||
|
||||
# Confirm no changes to self.Person
|
||||
self.assertFalse(hasattr(self.Person, 'age'))
|
||||
|
||||
def test_dynamic_document_delta(self):
|
||||
"""Ensures simple dynamic documents can delta correctly"""
|
||||
p = self.Person(name="James", age=34)
|
||||
self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {}))
|
||||
|
||||
p.doc = 123
|
||||
del(p.doc)
|
||||
self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1}))
|
||||
|
||||
def test_change_scope_of_variable(self):
|
||||
"""Test changing the scope of a dynamic field has no adverse effects"""
|
||||
p = self.Person()
|
||||
p.name = "Dean"
|
||||
p.misc = 22
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
p.misc = {'hello': 'world'}
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertEquals(p.misc, {'hello': 'world'})
|
||||
|
||||
def test_delete_dynamic_field(self):
|
||||
"""Test deleting a dynamic field works"""
|
||||
self.Person.drop_collection()
|
||||
p = self.Person()
|
||||
p.name = "Dean"
|
||||
p.misc = 22
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
p.misc = {'hello': 'world'}
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertEquals(p.misc, {'hello': 'world'})
|
||||
collection = self.db[self.Person._get_collection_name()]
|
||||
obj = collection.find_one()
|
||||
self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name'])
|
||||
|
||||
del(p.misc)
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertFalse(hasattr(p, 'misc'))
|
||||
|
||||
obj = collection.find_one()
|
||||
self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'name'])
|
||||
|
||||
def test_dynamic_document_queries(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
p = self.Person()
|
||||
p.name = "Dean"
|
||||
p.age = 22
|
||||
p.save()
|
||||
|
||||
self.assertEquals(1, self.Person.objects(age=22).count())
|
||||
p = self.Person.objects(age=22)
|
||||
p = p.get()
|
||||
self.assertEquals(22, p.age)
|
||||
|
||||
def test_complex_dynamic_document_queries(self):
|
||||
class Person(DynamicDocument):
|
||||
name = StringField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(name="test")
|
||||
p.age = "ten"
|
||||
p.save()
|
||||
|
||||
p1 = Person(name="test1")
|
||||
p1.age = "less then ten and a half"
|
||||
p1.save()
|
||||
|
||||
p2 = Person(name="test2")
|
||||
p2.age = 10
|
||||
p2.save()
|
||||
|
||||
self.assertEquals(Person.objects(age__icontains='ten').count(), 2)
|
||||
self.assertEquals(Person.objects(age__gte=10).count(), 1)
|
||||
|
||||
def test_complex_data_lookups(self):
|
||||
"""Ensure you can query dynamic document dynamic fields"""
|
||||
p = self.Person()
|
||||
p.misc = {'hello': 'world'}
|
||||
p.save()
|
||||
|
||||
self.assertEquals(1, self.Person.objects(misc__hello='world').count())
|
||||
|
||||
def test_inheritance(self):
|
||||
"""Ensure that dynamic document plays nice with inheritance"""
|
||||
class Employee(self.Person):
|
||||
salary = IntField()
|
||||
|
||||
Employee.drop_collection()
|
||||
|
||||
self.assertTrue('name' in Employee._fields)
|
||||
self.assertTrue('salary' in Employee._fields)
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
self.Person._get_collection_name())
|
||||
|
||||
joe_bloggs = Employee()
|
||||
joe_bloggs.name = "Joe Bloggs"
|
||||
joe_bloggs.salary = 10
|
||||
joe_bloggs.age = 20
|
||||
joe_bloggs.save()
|
||||
|
||||
self.assertEquals(1, self.Person.objects(age=20).count())
|
||||
self.assertEquals(1, Employee.objects(age=20).count())
|
||||
|
||||
joe_bloggs = self.Person.objects.first()
|
||||
self.assertTrue(isinstance(joe_bloggs, Employee))
|
||||
|
||||
def test_embedded_dynamic_document(self):
|
||||
"""Test dynamic embedded documents"""
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
pass
|
||||
|
||||
class Doc(DynamicDocument):
|
||||
pass
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
|
||||
"embedded_field": {
|
||||
"_types": ['Embedded'], "_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ['1', 2, {'hello': 'world'}]
|
||||
}
|
||||
})
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEquals(doc.embedded_field.__class__, Embedded)
|
||||
self.assertEquals(doc.embedded_field.string_field, "hello")
|
||||
self.assertEquals(doc.embedded_field.int_field, 1)
|
||||
self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||
self.assertEquals(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}])
|
||||
|
||||
def test_complex_embedded_documents(self):
|
||||
"""Test complex dynamic embedded documents setups"""
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
pass
|
||||
|
||||
class Doc(DynamicDocument):
|
||||
pass
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = 'hello'
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {'hello': 'world'}
|
||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||
|
||||
embedded_1.list_field = ['1', 2, embedded_2]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
|
||||
"embedded_field": {
|
||||
"_types": ['Embedded'], "_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ['1', 2,
|
||||
{"_types": ['Embedded'], "_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ['1', 2, {'hello': 'world'}]}
|
||||
]
|
||||
}
|
||||
})
|
||||
doc.save()
|
||||
doc = Doc.objects.first()
|
||||
self.assertEquals(doc.embedded_field.__class__, Embedded)
|
||||
self.assertEquals(doc.embedded_field.string_field, "hello")
|
||||
self.assertEquals(doc.embedded_field.int_field, 1)
|
||||
self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||
self.assertEquals(doc.embedded_field.list_field[0], '1')
|
||||
self.assertEquals(doc.embedded_field.list_field[1], 2)
|
||||
|
||||
embedded_field = doc.embedded_field.list_field[2]
|
||||
|
||||
self.assertEquals(embedded_field.__class__, Embedded)
|
||||
self.assertEquals(embedded_field.string_field, "hello")
|
||||
self.assertEquals(embedded_field.int_field, 1)
|
||||
self.assertEquals(embedded_field.dict_field, {'hello': 'world'})
|
||||
self.assertEquals(embedded_field.list_field, ['1', 2, {'hello': 'world'}])
|
||||
|
||||
def test_delta_for_dynamic_documents(self):
|
||||
p = self.Person()
|
||||
p.name = "Dean"
|
||||
p.age = 22
|
||||
p.save()
|
||||
|
||||
p.age = 24
|
||||
self.assertEquals(p.age, 24)
|
||||
self.assertEquals(p._get_changed_fields(), ['age'])
|
||||
self.assertEquals(p._delta(), ({'age': 24}, {}))
|
||||
|
||||
p = self.Person.objects(age=22).get()
|
||||
p.age = 24
|
||||
self.assertEquals(p.age, 24)
|
||||
self.assertEquals(p._get_changed_fields(), ['age'])
|
||||
self.assertEquals(p._delta(), ({'age': 24}, {}))
|
||||
|
||||
p.save()
|
||||
self.assertEquals(1, self.Person.objects(age=24).count())
|
||||
|
||||
def test_delta(self):
|
||||
|
||||
class Doc(DynamicDocument):
|
||||
pass
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEquals(doc._get_changed_fields(), [])
|
||||
self.assertEquals(doc._delta(), ({}, {}))
|
||||
|
||||
doc.string_field = 'hello'
|
||||
self.assertEquals(doc._get_changed_fields(), ['string_field'])
|
||||
self.assertEquals(doc._delta(), ({'string_field': 'hello'}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
self.assertEquals(doc._get_changed_fields(), ['int_field'])
|
||||
self.assertEquals(doc._delta(), ({'int_field': 1}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||
doc.dict_field = dict_value
|
||||
self.assertEquals(doc._get_changed_fields(), ['dict_field'])
|
||||
self.assertEquals(doc._delta(), ({'dict_field': dict_value}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ['1', 2, {'hello': 'world'}]
|
||||
doc.list_field = list_value
|
||||
self.assertEquals(doc._get_changed_fields(), ['list_field'])
|
||||
self.assertEquals(doc._delta(), ({'list_field': list_value}, {}))
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
self.assertEquals(doc._get_changed_fields(), ['dict_field'])
|
||||
self.assertEquals(doc._delta(), ({}, {'dict_field': 1}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
self.assertEquals(doc._get_changed_fields(), ['list_field'])
|
||||
self.assertEquals(doc._delta(), ({}, {'list_field': 1}))
|
||||
|
||||
def test_delta_recursive(self):
|
||||
"""Testing deltaing works with dynamic documents"""
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
pass
|
||||
|
||||
class Doc(DynamicDocument):
|
||||
pass
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEquals(doc._get_changed_fields(), [])
|
||||
self.assertEquals(doc._delta(), ({}, {}))
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field'])
|
||||
|
||||
embedded_delta = {
|
||||
'string_field': 'hello',
|
||||
'int_field': 1,
|
||||
'dict_field': {'hello': 'world'},
|
||||
'list_field': ['1', 2, {'hello': 'world'}]
|
||||
}
|
||||
self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||
embedded_delta.update({
|
||||
'_types': ['Embedded'],
|
||||
'_cls': 'Embedded',
|
||||
})
|
||||
self.assertEquals(doc._delta(), ({'embedded_field': embedded_delta}, {}))
|
||||
|
||||
doc.save()
|
||||
doc.reload()
|
||||
|
||||
doc.embedded_field.dict_field = {}
|
||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.dict_field'])
|
||||
self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
|
||||
|
||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
|
||||
doc.embedded_field.list_field = []
|
||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
|
||||
self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1}))
|
||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = 'hello'
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {'hello': 'world'}
|
||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||
|
||||
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
|
||||
self.assertEquals(doc.embedded_field._delta(), ({
|
||||
'list_field': ['1', 2, {
|
||||
'_cls': 'Embedded',
|
||||
'_types': ['Embedded'],
|
||||
'string_field': 'hello',
|
||||
'dict_field': {'hello': 'world'},
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
}]
|
||||
}, {}))
|
||||
|
||||
self.assertEquals(doc._delta(), ({
|
||||
'embedded_field.list_field': ['1', 2, {
|
||||
'_cls': 'Embedded',
|
||||
'_types': ['Embedded'],
|
||||
'string_field': 'hello',
|
||||
'dict_field': {'hello': 'world'},
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
}]
|
||||
}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
|
||||
self.assertEquals(doc.embedded_field.list_field[2]._changed_fields, [])
|
||||
self.assertEquals(doc.embedded_field.list_field[0], '1')
|
||||
self.assertEquals(doc.embedded_field.list_field[1], 2)
|
||||
for k in doc.embedded_field.list_field[2]._fields:
|
||||
self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k])
|
||||
|
||||
doc.embedded_field.list_field[2].string_field = 'world'
|
||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field'])
|
||||
self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {}))
|
||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world')
|
||||
|
||||
# Test multiple assignments
|
||||
doc.embedded_field.list_field[2].string_field = 'hello world'
|
||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
|
||||
self.assertEquals(doc.embedded_field._delta(), ({
|
||||
'list_field': ['1', 2, {
|
||||
'_types': ['Embedded'],
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello world',
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
'dict_field': {'hello': 'world'}}]}, {}))
|
||||
self.assertEquals(doc._delta(), ({
|
||||
'embedded_field.list_field': ['1', 2, {
|
||||
'_types': ['Embedded'],
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello world',
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
'dict_field': {'hello': 'world'}}
|
||||
]}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world')
|
||||
|
||||
# Test list native methods
|
||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.append(1)
|
||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1])
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.sort()
|
||||
doc.save()
|
||||
doc.reload()
|
||||
self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}])
|
||||
|
||||
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
|
||||
del(doc.embedded_field.list_field[2].list_field)
|
||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1}))
|
||||
|
||||
doc.save()
|
||||
doc.reload()
|
||||
|
||||
doc.dict_field = {'embedded': embedded_1}
|
||||
doc.save()
|
||||
doc.reload()
|
||||
|
||||
doc.dict_field['embedded'].string_field = 'Hello World'
|
||||
self.assertEquals(doc._get_changed_fields(), ['dict_field.embedded.string_field'])
|
||||
self.assertEquals(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {}))
|
||||
|
||||
def test_indexes(self):
|
||||
"""Ensure that indexes are used when meta[indexes] is specified.
|
||||
"""
|
||||
class BlogPost(DynamicDocument):
|
||||
meta = {
|
||||
'indexes': [
|
||||
'-date',
|
||||
('category', '-date')
|
||||
],
|
||||
}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
# _id, '-date', ('cat', 'date')
|
||||
# NB: there is no index on _types by itself, since
|
||||
# the indices on -date and tags will both contain
|
||||
# _types as first element in the key
|
||||
self.assertEqual(len(info), 3)
|
||||
|
||||
# Indexes are lazy so use list() to perform query
|
||||
list(BlogPost.objects)
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('_types', 1), ('category', 1), ('date', -1)]
|
||||
in info)
|
||||
self.assertTrue([('_types', 1), ('date', -1)] in info)
|
@@ -1,20 +1,29 @@
|
||||
import unittest
|
||||
import datetime
|
||||
from decimal import Decimal
|
||||
|
||||
import pymongo
|
||||
import os
|
||||
import unittest
|
||||
import uuid
|
||||
import StringIO
|
||||
import tempfile
|
||||
import gridfs
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import _get_db
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.base import _document_registry, NotRegistered
|
||||
|
||||
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png')
|
||||
|
||||
|
||||
class FieldTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = _get_db()
|
||||
self.db = get_db()
|
||||
|
||||
def tearDown(self):
|
||||
self.db.drop_collection('fs.files')
|
||||
self.db.drop_collection('fs.chunks')
|
||||
|
||||
def test_default_values(self):
|
||||
"""Ensure that default field values are used when creating a document.
|
||||
@@ -44,6 +53,93 @@ class FieldTest(unittest.TestCase):
|
||||
person = Person(age=30)
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
def test_not_required_handles_none_in_update(self):
|
||||
"""Ensure that every fields should accept None if required is False.
|
||||
"""
|
||||
|
||||
class HandleNoneFields(Document):
|
||||
str_fld = StringField()
|
||||
int_fld = IntField()
|
||||
flt_fld = FloatField()
|
||||
comp_dt_fld = ComplexDateTimeField()
|
||||
|
||||
HandleNoneFields.drop_collection()
|
||||
|
||||
doc = HandleNoneFields()
|
||||
doc.str_fld = u'spam ham egg'
|
||||
doc.int_fld = 42
|
||||
doc.flt_fld = 4.2
|
||||
doc.com_dt_fld = datetime.datetime.utcnow()
|
||||
doc.save()
|
||||
|
||||
res = HandleNoneFields.objects(id=doc.id).update(
|
||||
set__str_fld=None,
|
||||
set__int_fld=None,
|
||||
set__flt_fld=None,
|
||||
set__comp_dt_fld=None,
|
||||
)
|
||||
self.assertEqual(res, 1)
|
||||
|
||||
# Retrive data from db and verify it.
|
||||
ret = HandleNoneFields.objects.all()[0]
|
||||
self.assertEqual(ret.str_fld, None)
|
||||
self.assertEqual(ret.int_fld, None)
|
||||
self.assertEqual(ret.flt_fld, None)
|
||||
|
||||
# Return current time if retrived value is None.
|
||||
self.assertTrue(isinstance(ret.comp_dt_fld, datetime.datetime))
|
||||
|
||||
def test_not_required_handles_none_from_database(self):
|
||||
"""Ensure that every fields can handle null values from the database.
|
||||
"""
|
||||
|
||||
class HandleNoneFields(Document):
|
||||
str_fld = StringField(required=True)
|
||||
int_fld = IntField(required=True)
|
||||
flt_fld = FloatField(required=True)
|
||||
comp_dt_fld = ComplexDateTimeField(required=True)
|
||||
|
||||
HandleNoneFields.drop_collection()
|
||||
|
||||
doc = HandleNoneFields()
|
||||
doc.str_fld = u'spam ham egg'
|
||||
doc.int_fld = 42
|
||||
doc.flt_fld = 4.2
|
||||
doc.com_dt_fld = datetime.datetime.utcnow()
|
||||
doc.save()
|
||||
|
||||
collection = self.db[HandleNoneFields._get_collection_name()]
|
||||
obj = collection.update({"_id": doc.id}, {"$unset": {
|
||||
"str_fld": 1,
|
||||
"int_fld": 1,
|
||||
"flt_fld": 1,
|
||||
"comp_dt_fld": 1}
|
||||
})
|
||||
|
||||
# Retrive data from db and verify it.
|
||||
ret = HandleNoneFields.objects.all()[0]
|
||||
|
||||
self.assertEqual(ret.str_fld, None)
|
||||
self.assertEqual(ret.int_fld, None)
|
||||
self.assertEqual(ret.flt_fld, None)
|
||||
# Return current time if retrived value is None.
|
||||
self.assert_(isinstance(ret.comp_dt_fld, datetime.datetime))
|
||||
|
||||
self.assertRaises(ValidationError, ret.validate)
|
||||
|
||||
def test_int_and_float_ne_operator(self):
|
||||
class TestDocument(Document):
|
||||
int_fld = IntField()
|
||||
float_fld = FloatField()
|
||||
|
||||
TestDocument.drop_collection()
|
||||
|
||||
TestDocument(int_fld=None, float_fld=None).save()
|
||||
TestDocument(int_fld=1, float_fld=1).save()
|
||||
|
||||
self.assertEqual(1, TestDocument.objects(int_fld__ne=None).count())
|
||||
self.assertEqual(1, TestDocument.objects(float_fld__ne=None).count())
|
||||
|
||||
def test_object_id_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to string fields.
|
||||
"""
|
||||
@@ -175,6 +271,26 @@ class FieldTest(unittest.TestCase):
|
||||
person.admin = 'Yes'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
def test_uuid_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to UUID fields.
|
||||
"""
|
||||
class Person(Document):
|
||||
api_key = UUIDField()
|
||||
|
||||
person = Person()
|
||||
# any uuid type is valid
|
||||
person.api_key = uuid.uuid4()
|
||||
person.validate()
|
||||
person.api_key = uuid.uuid1()
|
||||
person.validate()
|
||||
|
||||
# last g cannot belong to an hex number
|
||||
person.api_key = '9d159858-549b-4975-9f98-dd2f987c113g'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
# short strings don't validate
|
||||
person.api_key = '9d159858-549b-4975-9f98-dd2f987c113'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
def test_datetime_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to datetime fields.
|
||||
"""
|
||||
@@ -242,24 +358,6 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertNotEquals(log.date, d1)
|
||||
self.assertEquals(log.date, d2)
|
||||
|
||||
# Pre UTC microseconds above 1000 is wonky.
|
||||
# log.date has an invalid microsecond value so I can't construct
|
||||
# a date to compare.
|
||||
#
|
||||
# However, the timedelta is predicable with pre UTC timestamps
|
||||
# It always adds 16 seconds and [777216-776217] microseconds
|
||||
for i in xrange(1001, 3113, 33):
|
||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertNotEquals(log.date, d1)
|
||||
|
||||
delta = log.date - d1
|
||||
self.assertEquals(delta.seconds, 16)
|
||||
microseconds = 777216 - (i % 1000)
|
||||
self.assertEquals(delta.microseconds, microseconds)
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
def test_complexdatetime_storage(self):
|
||||
@@ -337,27 +435,27 @@ class FieldTest(unittest.TestCase):
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
count = logs.count()
|
||||
i = 0
|
||||
while i == count-1:
|
||||
self.assertTrue(logs[i].date <= logs[i+1].date)
|
||||
i +=1
|
||||
while i == count - 1:
|
||||
self.assertTrue(logs[i].date <= logs[i + 1].date)
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
count = logs.count()
|
||||
i = 0
|
||||
while i == count-1:
|
||||
self.assertTrue(logs[i].date >= logs[i+1].date)
|
||||
i +=1
|
||||
while i == count - 1:
|
||||
self.assertTrue(logs[i].date >= logs[i + 1].date)
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980,1,1))
|
||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 30)
|
||||
|
||||
logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980,1,1))
|
||||
logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 30)
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=datetime.datetime(2011,1,1),
|
||||
date__gte=datetime.datetime(2000,1,1),
|
||||
date__lte=datetime.datetime(2011, 1, 1),
|
||||
date__gte=datetime.datetime(2000, 1, 1),
|
||||
)
|
||||
self.assertEqual(logs.count(), 10)
|
||||
|
||||
@@ -459,6 +557,31 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_reverse_list_sorting(self):
|
||||
'''Ensure that a reverse sorted list field properly sorts values'''
|
||||
|
||||
class Category(EmbeddedDocument):
|
||||
count = IntField()
|
||||
name = StringField()
|
||||
|
||||
class CategoryList(Document):
|
||||
categories = SortedListField(EmbeddedDocumentField(Category), ordering='count', reverse=True)
|
||||
name = StringField()
|
||||
|
||||
catlist = CategoryList(name="Top categories")
|
||||
cat1 = Category(name='posts', count=10)
|
||||
cat2 = Category(name='food', count=100)
|
||||
cat3 = Category(name='drink', count=40)
|
||||
catlist.categories = [cat1, cat2, cat3]
|
||||
catlist.save()
|
||||
catlist.reload()
|
||||
|
||||
self.assertEqual(catlist.categories[0].name, cat2.name)
|
||||
self.assertEqual(catlist.categories[1].name, cat3.name)
|
||||
self.assertEqual(catlist.categories[2].name, cat1.name)
|
||||
|
||||
CategoryList.drop_collection()
|
||||
|
||||
def test_list_field(self):
|
||||
"""Ensure that list types work as expected.
|
||||
"""
|
||||
@@ -485,7 +608,6 @@ class FieldTest(unittest.TestCase):
|
||||
post.info = [{'test': 3}]
|
||||
post.save()
|
||||
|
||||
|
||||
self.assertEquals(BlogPost.objects.count(), 3)
|
||||
self.assertEquals(BlogPost.objects.filter(info__exact='test').count(), 1)
|
||||
self.assertEquals(BlogPost.objects.filter(info__0__test='test').count(), 1)
|
||||
@@ -495,6 +617,21 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEquals(BlogPost.objects.filter(info__100__test__exact='test').count(), 0)
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_list_field_passed_in_value(self):
|
||||
class Foo(Document):
|
||||
bars = ListField(ReferenceField("Bar"))
|
||||
|
||||
class Bar(Document):
|
||||
text = StringField()
|
||||
|
||||
bar = Bar(text="hi")
|
||||
bar.save()
|
||||
|
||||
foo = Foo(bars=[])
|
||||
foo.bars.append(bar)
|
||||
self.assertEquals(repr(foo.bars), '[<Bar: Bar object>]')
|
||||
|
||||
|
||||
def test_list_field_strict(self):
|
||||
"""Ensure that list field handles validation if provided a strict field type."""
|
||||
|
||||
@@ -515,6 +652,39 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
def test_list_field_rejects_strings(self):
|
||||
"""Strings aren't valid list field data types"""
|
||||
|
||||
class Simple(Document):
|
||||
mapping = ListField()
|
||||
|
||||
Simple.drop_collection()
|
||||
e = Simple()
|
||||
e.mapping = 'hello world'
|
||||
|
||||
self.assertRaises(ValidationError, e.save)
|
||||
|
||||
def test_complex_field_required(self):
|
||||
"""Ensure required cant be None / Empty"""
|
||||
|
||||
class Simple(Document):
|
||||
mapping = ListField(required=True)
|
||||
|
||||
Simple.drop_collection()
|
||||
e = Simple()
|
||||
e.mapping = []
|
||||
|
||||
self.assertRaises(ValidationError, e.save)
|
||||
|
||||
class Simple(Document):
|
||||
mapping = DictField(required=True)
|
||||
|
||||
Simple.drop_collection()
|
||||
e = Simple()
|
||||
e.mapping = {}
|
||||
|
||||
self.assertRaises(ValidationError, e.save)
|
||||
|
||||
def test_list_field_complex(self):
|
||||
"""Ensure that the list fields can handle the complex types."""
|
||||
|
||||
@@ -582,6 +752,9 @@ class FieldTest(unittest.TestCase):
|
||||
post.info = {'the.title': 'test'}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {1: 'test'}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {'title': 'test'}
|
||||
post.save()
|
||||
|
||||
@@ -600,6 +773,13 @@ class FieldTest(unittest.TestCase):
|
||||
# Confirm handles non strings or non existing keys
|
||||
self.assertEquals(BlogPost.objects.filter(info__details__test__exact=5).count(), 0)
|
||||
self.assertEquals(BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0)
|
||||
|
||||
post = BlogPost.objects.create(info={'title': 'original'})
|
||||
post.info.update({'title': 'updated'})
|
||||
post.save()
|
||||
post.reload()
|
||||
self.assertEquals('updated', post.info['title'])
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_dictfield_strict(self):
|
||||
@@ -727,6 +907,48 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
Extensible.drop_collection()
|
||||
|
||||
def test_embedded_mapfield_db_field(self):
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
number = IntField(default=0, db_field='i')
|
||||
|
||||
class Test(Document):
|
||||
my_map = MapField(field=EmbeddedDocumentField(Embedded), db_field='x')
|
||||
|
||||
Test.drop_collection()
|
||||
|
||||
test = Test()
|
||||
test.my_map['DICTIONARY_KEY'] = Embedded(number=1)
|
||||
test.save()
|
||||
|
||||
Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1)
|
||||
|
||||
test = Test.objects.get()
|
||||
self.assertEqual(test.my_map['DICTIONARY_KEY'].number, 2)
|
||||
doc = self.db.test.find_one()
|
||||
self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2)
|
||||
|
||||
def test_embedded_db_field(self):
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
number = IntField(default=0, db_field='i')
|
||||
|
||||
class Test(Document):
|
||||
embedded = EmbeddedDocumentField(Embedded, db_field='x')
|
||||
|
||||
Test.drop_collection()
|
||||
|
||||
test = Test()
|
||||
test.embedded = Embedded(number=1)
|
||||
test.save()
|
||||
|
||||
Test.objects.update_one(inc__embedded__number=1)
|
||||
|
||||
test = Test.objects.get()
|
||||
self.assertEqual(test.embedded.number, 2)
|
||||
doc = self.db.test.find_one()
|
||||
self.assertEqual(doc['x']['i'], 2)
|
||||
|
||||
def test_embedded_document_validation(self):
|
||||
"""Ensure that invalid embedded documents cannot be assigned to
|
||||
embedded document fields.
|
||||
@@ -942,15 +1164,29 @@ class FieldTest(unittest.TestCase):
|
||||
class Company(Document):
|
||||
name = StringField()
|
||||
|
||||
Product.drop_collection()
|
||||
Company.drop_collection()
|
||||
|
||||
ten_gen = Company(name='10gen')
|
||||
ten_gen.save()
|
||||
mongodb = Product(name='MongoDB', company=ten_gen)
|
||||
mongodb.save()
|
||||
|
||||
me = Product(name='MongoEngine')
|
||||
me.save()
|
||||
|
||||
obj = Product.objects(company=ten_gen).first()
|
||||
self.assertEqual(obj, mongodb)
|
||||
self.assertEqual(obj.company, ten_gen)
|
||||
|
||||
obj = Product.objects(company=None).first()
|
||||
self.assertEqual(obj, me)
|
||||
|
||||
obj, created = Product.objects.get_or_create(company=None)
|
||||
|
||||
self.assertEqual(created, False)
|
||||
self.assertEqual(obj, me)
|
||||
|
||||
def test_reference_query_conversion(self):
|
||||
"""Ensure that ReferenceFields can be queried using objects and values
|
||||
of the type of the primary key of the referenced object.
|
||||
@@ -1062,7 +1298,6 @@ class FieldTest(unittest.TestCase):
|
||||
Post.drop_collection()
|
||||
User.drop_collection()
|
||||
|
||||
|
||||
def test_generic_reference_document_not_registered(self):
|
||||
"""Ensure dereferencing out of the document registry throws a
|
||||
`NotRegistered` error.
|
||||
@@ -1089,7 +1324,7 @@ class FieldTest(unittest.TestCase):
|
||||
user = User.objects.first()
|
||||
try:
|
||||
user.bookmarks
|
||||
raise AssertionError, "Link was removed from the registry"
|
||||
raise AssertionError("Link was removed from the registry")
|
||||
except NotRegistered:
|
||||
pass
|
||||
|
||||
@@ -1108,6 +1343,74 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEquals(repr(Person.objects(city=None)),
|
||||
"[<Person: Person object>]")
|
||||
|
||||
|
||||
def test_generic_reference_choices(self):
|
||||
"""Ensure that a GenericReferenceField can handle choices
|
||||
"""
|
||||
class Link(Document):
|
||||
title = StringField()
|
||||
|
||||
class Post(Document):
|
||||
title = StringField()
|
||||
|
||||
class Bookmark(Document):
|
||||
bookmark_object = GenericReferenceField(choices=(Post,))
|
||||
|
||||
Link.drop_collection()
|
||||
Post.drop_collection()
|
||||
Bookmark.drop_collection()
|
||||
|
||||
link_1 = Link(title="Pitchfork")
|
||||
link_1.save()
|
||||
|
||||
post_1 = Post(title="Behind the Scenes of the Pavement Reunion")
|
||||
post_1.save()
|
||||
|
||||
bm = Bookmark(bookmark_object=link_1)
|
||||
self.assertRaises(ValidationError, bm.validate)
|
||||
|
||||
bm = Bookmark(bookmark_object=post_1)
|
||||
bm.save()
|
||||
|
||||
bm = Bookmark.objects.first()
|
||||
self.assertEqual(bm.bookmark_object, post_1)
|
||||
|
||||
def test_generic_reference_list_choices(self):
|
||||
"""Ensure that a ListField properly dereferences generic references and
|
||||
respects choices.
|
||||
"""
|
||||
class Link(Document):
|
||||
title = StringField()
|
||||
|
||||
class Post(Document):
|
||||
title = StringField()
|
||||
|
||||
class User(Document):
|
||||
bookmarks = ListField(GenericReferenceField(choices=(Post,)))
|
||||
|
||||
Link.drop_collection()
|
||||
Post.drop_collection()
|
||||
User.drop_collection()
|
||||
|
||||
link_1 = Link(title="Pitchfork")
|
||||
link_1.save()
|
||||
|
||||
post_1 = Post(title="Behind the Scenes of the Pavement Reunion")
|
||||
post_1.save()
|
||||
|
||||
user = User(bookmarks=[link_1])
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
user = User(bookmarks=[post_1])
|
||||
user.save()
|
||||
|
||||
user = User.objects.first()
|
||||
self.assertEqual(user.bookmarks, [post_1])
|
||||
|
||||
Link.drop_collection()
|
||||
Post.drop_collection()
|
||||
User.drop_collection()
|
||||
|
||||
def test_binary_fields(self):
|
||||
"""Ensure that binary fields can be stored and retrieved.
|
||||
"""
|
||||
@@ -1213,6 +1516,53 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
Shirt.drop_collection()
|
||||
|
||||
def test_simple_choices_validation(self):
|
||||
"""Ensure that value is in a container of allowed values.
|
||||
"""
|
||||
class Shirt(Document):
|
||||
size = StringField(max_length=3, choices=('S', 'M', 'L', 'XL', 'XXL'))
|
||||
|
||||
Shirt.drop_collection()
|
||||
|
||||
shirt = Shirt()
|
||||
shirt.validate()
|
||||
|
||||
shirt.size = "S"
|
||||
shirt.validate()
|
||||
|
||||
shirt.size = "XS"
|
||||
self.assertRaises(ValidationError, shirt.validate)
|
||||
|
||||
Shirt.drop_collection()
|
||||
|
||||
def test_simple_choices_get_field_display(self):
|
||||
"""Test dynamic helper for returning the display value of a choices field.
|
||||
"""
|
||||
class Shirt(Document):
|
||||
size = StringField(max_length=3, choices=('S', 'M', 'L', 'XL', 'XXL'))
|
||||
style = StringField(max_length=3, choices=('Small', 'Baggy', 'wide'), default='Small')
|
||||
|
||||
Shirt.drop_collection()
|
||||
|
||||
shirt = Shirt()
|
||||
|
||||
self.assertEqual(shirt.get_size_display(), None)
|
||||
self.assertEqual(shirt.get_style_display(), 'Small')
|
||||
|
||||
shirt.size = "XXL"
|
||||
shirt.style = "Baggy"
|
||||
self.assertEqual(shirt.get_size_display(), 'XXL')
|
||||
self.assertEqual(shirt.get_style_display(), 'Baggy')
|
||||
|
||||
# Set as Z - an invalid choice
|
||||
shirt.size = "Z"
|
||||
shirt.style = "Z"
|
||||
self.assertEqual(shirt.get_size_display(), 'Z')
|
||||
self.assertEqual(shirt.get_style_display(), 'Z')
|
||||
self.assertRaises(ValidationError, shirt.validate)
|
||||
|
||||
Shirt.drop_collection()
|
||||
|
||||
def test_file_fields(self):
|
||||
"""Ensure that file fields can be written to and their data retrieved
|
||||
"""
|
||||
@@ -1242,6 +1592,21 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEquals(result.file.read(), text)
|
||||
self.assertEquals(result.file.content_type, content_type)
|
||||
result.file.delete() # Remove file from GridFS
|
||||
PutFile.objects.delete()
|
||||
|
||||
# Ensure file-like objects are stored
|
||||
putfile = PutFile()
|
||||
putstring = StringIO.StringIO()
|
||||
putstring.write(text)
|
||||
putstring.seek(0)
|
||||
putfile.file.put(putstring, content_type=content_type)
|
||||
putfile.save()
|
||||
putfile.validate()
|
||||
result = PutFile.objects.first()
|
||||
self.assertTrue(putfile == result)
|
||||
self.assertEquals(result.file.read(), text)
|
||||
self.assertEquals(result.file.content_type, content_type)
|
||||
result.file.delete()
|
||||
|
||||
streamfile = StreamFile()
|
||||
streamfile.file.new_file(content_type=content_type)
|
||||
@@ -1289,7 +1654,50 @@ class FieldTest(unittest.TestCase):
|
||||
# Make sure FileField is optional and not required
|
||||
class DemoFile(Document):
|
||||
file = FileField()
|
||||
d = DemoFile.objects.create()
|
||||
DemoFile.objects.create()
|
||||
|
||||
|
||||
def test_file_field_no_default(self):
|
||||
|
||||
class GridDocument(Document):
|
||||
the_file = FileField()
|
||||
|
||||
GridDocument.drop_collection()
|
||||
|
||||
with tempfile.TemporaryFile() as f:
|
||||
f.write("Hello World!")
|
||||
f.flush()
|
||||
|
||||
# Test without default
|
||||
doc_a = GridDocument()
|
||||
doc_a.save()
|
||||
|
||||
|
||||
doc_b = GridDocument.objects.with_id(doc_a.id)
|
||||
doc_b.the_file.replace(f, filename='doc_b')
|
||||
doc_b.save()
|
||||
self.assertNotEquals(doc_b.the_file.grid_id, None)
|
||||
|
||||
# Test it matches
|
||||
doc_c = GridDocument.objects.with_id(doc_b.id)
|
||||
self.assertEquals(doc_b.the_file.grid_id, doc_c.the_file.grid_id)
|
||||
|
||||
# Test with default
|
||||
doc_d = GridDocument(the_file='')
|
||||
doc_d.save()
|
||||
|
||||
doc_e = GridDocument.objects.with_id(doc_d.id)
|
||||
self.assertEquals(doc_d.the_file.grid_id, doc_e.the_file.grid_id)
|
||||
|
||||
doc_e.the_file.replace(f, filename='doc_e')
|
||||
doc_e.save()
|
||||
|
||||
doc_f = GridDocument.objects.with_id(doc_e.id)
|
||||
self.assertEquals(doc_e.the_file.grid_id, doc_f.the_file.grid_id)
|
||||
|
||||
db = GridDocument._get_db()
|
||||
grid_fs = gridfs.GridFS(db)
|
||||
self.assertEquals(['doc_b', 'doc_e'], grid_fs.list())
|
||||
|
||||
def test_file_uniqueness(self):
|
||||
"""Ensure that each instance of a FileField is unique
|
||||
@@ -1328,6 +1736,95 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
def test_image_field(self):
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField()
|
||||
|
||||
TestImage.drop_collection()
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(open(TEST_IMAGE_PATH, 'r'))
|
||||
t.save()
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEquals(t.image.format, 'PNG')
|
||||
|
||||
w, h = t.image.size
|
||||
self.assertEquals(w, 371)
|
||||
self.assertEquals(h, 76)
|
||||
|
||||
t.image.delete()
|
||||
|
||||
def test_image_field_resize(self):
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField(size=(185, 37))
|
||||
|
||||
TestImage.drop_collection()
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(open(TEST_IMAGE_PATH, 'r'))
|
||||
t.save()
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEquals(t.image.format, 'PNG')
|
||||
w, h = t.image.size
|
||||
|
||||
self.assertEquals(w, 185)
|
||||
self.assertEquals(h, 37)
|
||||
|
||||
t.image.delete()
|
||||
|
||||
def test_image_field_thumbnail(self):
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField(thumbnail_size=(92, 18))
|
||||
|
||||
TestImage.drop_collection()
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(open(TEST_IMAGE_PATH, 'r'))
|
||||
t.save()
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEquals(t.image.thumbnail.format, 'PNG')
|
||||
self.assertEquals(t.image.thumbnail.width, 92)
|
||||
self.assertEquals(t.image.thumbnail.height, 18)
|
||||
|
||||
t.image.delete()
|
||||
|
||||
|
||||
def test_file_multidb(self):
|
||||
register_connection('testfiles', 'testfiles')
|
||||
class TestFile(Document):
|
||||
name = StringField()
|
||||
file = FileField(db_alias="testfiles",
|
||||
collection_name="macumba")
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
# delete old filesystem
|
||||
get_db("testfiles").macumba.files.drop()
|
||||
get_db("testfiles").macumba.chunks.drop()
|
||||
|
||||
# First instance
|
||||
testfile = TestFile()
|
||||
testfile.name = "Hello, World!"
|
||||
testfile.file.put('Hello, World!',
|
||||
name="hello.txt")
|
||||
testfile.save()
|
||||
|
||||
data = get_db("testfiles").macumba.files.find_one()
|
||||
self.assertEquals(data.get('name'), 'hello.txt')
|
||||
|
||||
testfile = TestFile.objects.first()
|
||||
self.assertEquals(testfile.file.read(),
|
||||
'Hello, World!')
|
||||
|
||||
def test_geo_indexes(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
@@ -1488,7 +1985,6 @@ class FieldTest(unittest.TestCase):
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
|
||||
|
||||
def test_generic_embedded_document(self):
|
||||
class Car(EmbeddedDocument):
|
||||
name = StringField()
|
||||
@@ -1501,6 +1997,8 @@ class FieldTest(unittest.TestCase):
|
||||
name = StringField()
|
||||
like = GenericEmbeddedDocumentField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
person = Person(name='Test User')
|
||||
person.like = Car(name='Fiat')
|
||||
person.save()
|
||||
@@ -1514,5 +2012,103 @@ class FieldTest(unittest.TestCase):
|
||||
person = Person.objects.first()
|
||||
self.assertTrue(isinstance(person.like, Dish))
|
||||
|
||||
def test_generic_embedded_document_choices(self):
|
||||
"""Ensure you can limit GenericEmbeddedDocument choices
|
||||
"""
|
||||
class Car(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class Dish(EmbeddedDocument):
|
||||
food = StringField(required=True)
|
||||
number = IntField()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
like = GenericEmbeddedDocumentField(choices=(Dish,))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
person = Person(name='Test User')
|
||||
person.like = Car(name='Fiat')
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
person.like = Dish(food="arroz", number=15)
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertTrue(isinstance(person.like, Dish))
|
||||
|
||||
def test_generic_list_embedded_document_choices(self):
|
||||
"""Ensure you can limit GenericEmbeddedDocument choices inside a list
|
||||
field
|
||||
"""
|
||||
class Car(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class Dish(EmbeddedDocument):
|
||||
food = StringField(required=True)
|
||||
number = IntField()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
likes = ListField(GenericEmbeddedDocumentField(choices=(Dish,)))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
person = Person(name='Test User')
|
||||
person.likes = [Car(name='Fiat')]
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
person.likes = [Dish(food="arroz", number=15)]
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertTrue(isinstance(person.likes[0], Dish))
|
||||
|
||||
def test_recursive_validation(self):
|
||||
"""Ensure that a validation result to_dict is available.
|
||||
"""
|
||||
class Author(EmbeddedDocument):
|
||||
name = StringField(required=True)
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
author = EmbeddedDocumentField(Author, required=True)
|
||||
content = StringField(required=True)
|
||||
|
||||
class Post(Document):
|
||||
title = StringField(required=True)
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
bob = Author(name='Bob')
|
||||
post = Post(title='hello world')
|
||||
post.comments.append(Comment(content='hello', author=bob))
|
||||
post.comments.append(Comment(author=bob))
|
||||
|
||||
try:
|
||||
post.validate()
|
||||
except ValidationError, error:
|
||||
pass
|
||||
|
||||
# ValidationError.errors property
|
||||
self.assertTrue(hasattr(error, 'errors'))
|
||||
self.assertTrue(isinstance(error.errors, dict))
|
||||
self.assertTrue('comments' in error.errors)
|
||||
self.assertTrue(1 in error.errors['comments'])
|
||||
self.assertTrue(isinstance(error.errors['comments'][1]['content'],
|
||||
ValidationError))
|
||||
|
||||
# ValidationError.schema property
|
||||
error_dict = error.to_dict()
|
||||
self.assertTrue(isinstance(error_dict, dict))
|
||||
self.assertTrue('comments' in error_dict)
|
||||
self.assertTrue(1 in error_dict['comments'])
|
||||
self.assertTrue('content' in error_dict['comments'][1])
|
||||
self.assertEquals(error_dict['comments'][1]['content'],
|
||||
'Field is required')
|
||||
|
||||
post.comments[1].content = 'here we go'
|
||||
post.validate()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@@ -1,13 +1,14 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
import pymongo
|
||||
from bson import ObjectId
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from mongoengine.queryset import (QuerySet, QuerySetManager,
|
||||
MultipleObjectsReturned, DoesNotExist,
|
||||
QueryFieldList)
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import _get_connection
|
||||
from mongoengine.connection import get_connection
|
||||
from mongoengine.tests import query_counter
|
||||
|
||||
|
||||
@@ -15,10 +16,11 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
meta = {'allow_inheritance': True}
|
||||
self.Person = Person
|
||||
|
||||
def test_initialisation(self):
|
||||
@@ -59,8 +61,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(len(people), 2)
|
||||
results = list(people)
|
||||
self.assertTrue(isinstance(results[0], self.Person))
|
||||
self.assertTrue(isinstance(results[0].id, (pymongo.objectid.ObjectId,
|
||||
str, unicode)))
|
||||
self.assertTrue(isinstance(results[0].id, (ObjectId, str, unicode)))
|
||||
self.assertEqual(results[0].name, "User A")
|
||||
self.assertEqual(results[0].age, 20)
|
||||
self.assertEqual(results[1].name, "User B")
|
||||
@@ -110,6 +111,16 @@ class QuerySetTest(unittest.TestCase):
|
||||
people = list(self.Person.objects[80000:80001])
|
||||
self.assertEqual(len(people), 0)
|
||||
|
||||
# Test larger slice __repr__
|
||||
self.Person.objects.delete()
|
||||
for i in xrange(55):
|
||||
self.Person(name='A%s' % i, age=i).save()
|
||||
|
||||
self.assertEqual(len(self.Person.objects), 55)
|
||||
self.assertEqual("Person object", "%s" % self.Person.objects[0])
|
||||
self.assertEqual("[<Person: Person object>, <Person: Person object>]", "%s" % self.Person.objects[1:3])
|
||||
self.assertEqual("[<Person: Person object>, <Person: Person object>]", "%s" % self.Person.objects[51:53])
|
||||
|
||||
def test_find_one(self):
|
||||
"""Ensure that a query using find_one returns a valid result.
|
||||
"""
|
||||
@@ -144,6 +155,8 @@ class QuerySetTest(unittest.TestCase):
|
||||
person = self.Person.objects.with_id(person1.id)
|
||||
self.assertEqual(person.name, "User A")
|
||||
|
||||
self.assertRaises(InvalidQueryError, self.Person.objects(name="User A").with_id, person1.id)
|
||||
|
||||
def test_find_only_one(self):
|
||||
"""Ensure that a query using ``get`` returns at most one result.
|
||||
"""
|
||||
@@ -316,11 +329,11 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
BlogPost(title="ABC", comments=[c1, c2]).save()
|
||||
|
||||
BlogPost.objects(comments__by="joe").update(inc__comments__S__votes=1)
|
||||
BlogPost.objects(comments__by="jane").update(inc__comments__S__votes=1)
|
||||
|
||||
post = BlogPost.objects.first()
|
||||
self.assertEquals(post.comments[0].by, 'joe')
|
||||
self.assertEquals(post.comments[0].votes, 4)
|
||||
self.assertEquals(post.comments[1].by, 'jane')
|
||||
self.assertEquals(post.comments[1].votes, 8)
|
||||
|
||||
# Currently the $ operator only applies to the first matched item in
|
||||
# the query
|
||||
@@ -368,6 +381,34 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertRaises(OperationError, update_nested)
|
||||
Simple.drop_collection()
|
||||
|
||||
def test_update_using_positional_operator_embedded_document(self):
|
||||
"""Ensure that the embedded documents can be updated using the positional
|
||||
operator."""
|
||||
|
||||
class Vote(EmbeddedDocument):
|
||||
score = IntField()
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
by = StringField()
|
||||
votes = EmbeddedDocumentField(Vote)
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
c1 = Comment(by="joe", votes=Vote(score=3))
|
||||
c2 = Comment(by="jane", votes=Vote(score=7))
|
||||
|
||||
BlogPost(title="ABC", comments=[c1, c2]).save()
|
||||
|
||||
BlogPost.objects(comments__by="joe").update(set__comments__S__votes=Vote(score=4))
|
||||
|
||||
post = BlogPost.objects.first()
|
||||
self.assertEquals(post.comments[0].by, 'joe')
|
||||
self.assertEquals(post.comments[0].votes.score, 4)
|
||||
|
||||
def test_mapfield_update(self):
|
||||
"""Ensure that the MapField can be updated."""
|
||||
class Member(EmbeddedDocument):
|
||||
@@ -439,7 +480,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(person.name, "User C")
|
||||
|
||||
def test_bulk_insert(self):
|
||||
"""Ensure that query by array position works.
|
||||
"""Ensure that bulk insert works
|
||||
"""
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
@@ -449,12 +490,15 @@ class QuerySetTest(unittest.TestCase):
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
class Blog(Document):
|
||||
title = StringField()
|
||||
title = StringField(unique=True)
|
||||
tags = ListField(StringField())
|
||||
posts = ListField(EmbeddedDocumentField(Post))
|
||||
|
||||
Blog.drop_collection()
|
||||
|
||||
# Recreates the collection
|
||||
self.assertEqual(0, Blog.objects.count())
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
@@ -468,10 +512,10 @@ class QuerySetTest(unittest.TestCase):
|
||||
blogs.append(Blog(title="post %s" % i, posts=[post1, post2]))
|
||||
|
||||
Blog.objects.insert(blogs, load_bulk=False)
|
||||
self.assertEqual(q, 2) # 1 for the inital connection and 1 for the insert
|
||||
self.assertEqual(q, 1) # 1 for the insert
|
||||
|
||||
Blog.objects.insert(blogs)
|
||||
self.assertEqual(q, 4) # 1 for insert, and 1 for in bulk
|
||||
self.assertEqual(q, 3) # 1 for insert, and 1 for in bulk fetch (3 in total)
|
||||
|
||||
Blog.drop_collection()
|
||||
|
||||
@@ -519,6 +563,23 @@ class QuerySetTest(unittest.TestCase):
|
||||
obj_id = Blog.objects.insert(blog1, load_bulk=False)
|
||||
self.assertEquals(obj_id.__class__.__name__, 'ObjectId')
|
||||
|
||||
Blog.drop_collection()
|
||||
post3 = Post(comments=[comment1, comment1])
|
||||
blog1 = Blog(title="foo", posts=[post1, post2])
|
||||
blog2 = Blog(title="bar", posts=[post2, post3])
|
||||
blog3 = Blog(title="baz", posts=[post1, post2])
|
||||
Blog.objects.insert([blog1, blog2])
|
||||
|
||||
def throw_operation_error_not_unique():
|
||||
Blog.objects.insert([blog2, blog3], safe=True)
|
||||
|
||||
self.assertRaises(OperationError, throw_operation_error_not_unique)
|
||||
self.assertEqual(Blog.objects.count(), 2)
|
||||
|
||||
Blog.objects.insert([blog2, blog3], write_options={'continue_on_error': True})
|
||||
self.assertEqual(Blog.objects.count(), 3)
|
||||
|
||||
|
||||
def test_slave_okay(self):
|
||||
"""Ensures that a query can take slave_okay syntax
|
||||
"""
|
||||
@@ -567,19 +628,46 @@ class QuerySetTest(unittest.TestCase):
|
||||
people1 = [person for person in queryset]
|
||||
people2 = [person for person in queryset]
|
||||
|
||||
self.assertEqual(people1, people2)
|
||||
|
||||
def test_repr_iteration(self):
|
||||
"""Ensure that QuerySet __repr__ can handle loops
|
||||
"""
|
||||
self.Person(name='Person 1').save()
|
||||
self.Person(name='Person 2').save()
|
||||
|
||||
queryset = self.Person.objects
|
||||
self.assertEquals('[<Person: Person object>, <Person: Person object>]', repr(queryset))
|
||||
# Check that it still works even if iteration is interrupted.
|
||||
for person in queryset:
|
||||
self.assertEquals('.. queryset mid-iteration ..', repr(queryset))
|
||||
break
|
||||
people3 = [person for person in queryset]
|
||||
|
||||
self.assertEqual(people1, people2)
|
||||
self.assertEqual(people1, people3)
|
||||
|
||||
def test_repr(self):
|
||||
"""Test repr behavior isnt destructive"""
|
||||
|
||||
class Doc(Document):
|
||||
number = IntField()
|
||||
|
||||
def __repr__(self):
|
||||
return "<Doc: %s>" % self.number
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
for i in xrange(1000):
|
||||
Doc(number=i).save()
|
||||
|
||||
docs = Doc.objects.order_by('number')
|
||||
|
||||
self.assertEquals(docs.count(), 1000)
|
||||
self.assertEquals(len(docs), 1000)
|
||||
|
||||
docs_string = "%s" % docs
|
||||
self.assertTrue("Doc: 0" in docs_string)
|
||||
|
||||
self.assertEquals(docs.count(), 1000)
|
||||
self.assertEquals(len(docs), 1000)
|
||||
|
||||
# Limit and skip
|
||||
self.assertEquals('[<Doc: 1>, <Doc: 2>, <Doc: 3>]', "%s" % docs[1:4])
|
||||
|
||||
self.assertEquals(docs.count(), 3)
|
||||
self.assertEquals(len(docs), 3)
|
||||
for doc in docs:
|
||||
self.assertEqual('.. queryset mid-iteration ..', repr(docs))
|
||||
|
||||
def test_regex_query_shortcuts(self):
|
||||
"""Ensure that contains, startswith, endswith, etc work.
|
||||
@@ -1277,6 +1365,37 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.Person.objects(name='Test User').delete()
|
||||
self.assertEqual(1, BlogPost.objects.count())
|
||||
|
||||
def test_reverse_delete_rule_cascade_self_referencing(self):
|
||||
"""Ensure self-referencing CASCADE deletes do not result in infinite loop
|
||||
"""
|
||||
class Category(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField('self', reverse_delete_rule=CASCADE)
|
||||
|
||||
num_children = 3
|
||||
base = Category(name='Root')
|
||||
base.save()
|
||||
|
||||
# Create a simple parent-child tree
|
||||
for i in range(num_children):
|
||||
child_name = 'Child-%i' % i
|
||||
child = Category(name=child_name, parent=base)
|
||||
child.save()
|
||||
|
||||
for i in range(num_children):
|
||||
child_child_name = 'Child-Child-%i' % i
|
||||
child_child = Category(name=child_child_name, parent=child)
|
||||
child_child.save()
|
||||
|
||||
tree_size = 1 + num_children + (num_children * num_children)
|
||||
self.assertEquals(tree_size, Category.objects.count())
|
||||
self.assertEquals(num_children, Category.objects(parent=base).count())
|
||||
|
||||
# The delete should effectively wipe out the Category collection
|
||||
# without resulting in infinite parent-child cascade recursion
|
||||
base.delete()
|
||||
self.assertEquals(0, Category.objects.count())
|
||||
|
||||
def test_reverse_delete_rule_nullify(self):
|
||||
"""Ensure nullification of references to deleted documents.
|
||||
"""
|
||||
@@ -1321,6 +1440,36 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
self.assertRaises(OperationError, self.Person.objects.delete)
|
||||
|
||||
def test_reverse_delete_rule_pull(self):
|
||||
"""Ensure pulling of references to deleted documents.
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
authors = ListField(ReferenceField(self.Person,
|
||||
reverse_delete_rule=PULL))
|
||||
|
||||
BlogPost.drop_collection()
|
||||
self.Person.drop_collection()
|
||||
|
||||
me = self.Person(name='Test User')
|
||||
me.save()
|
||||
|
||||
someoneelse = self.Person(name='Some-one Else')
|
||||
someoneelse.save()
|
||||
|
||||
post = BlogPost(content='Watching TV', authors=[me, someoneelse])
|
||||
post.save()
|
||||
|
||||
another = BlogPost(content='Chilling Out', authors=[someoneelse])
|
||||
another.save()
|
||||
|
||||
someoneelse.delete()
|
||||
post.reload()
|
||||
another.reload()
|
||||
|
||||
self.assertEqual(post.authors, [me])
|
||||
self.assertEqual(another.authors, [])
|
||||
|
||||
def test_update(self):
|
||||
"""Ensure that atomic updates work properly.
|
||||
"""
|
||||
@@ -1371,20 +1520,85 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_update_pull(self):
|
||||
def test_update_push_and_pull_add_to_set(self):
|
||||
"""Ensure that the 'pull' update operation works correctly.
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
slug = StringField()
|
||||
tags = ListField(StringField())
|
||||
|
||||
post = BlogPost(slug="test", tags=['code', 'mongodb', 'code'])
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(slug="test")
|
||||
post.save()
|
||||
|
||||
BlogPost.objects.filter(id=post.id).update(push__tags="code")
|
||||
post.reload()
|
||||
self.assertEqual(post.tags, ["code"])
|
||||
|
||||
BlogPost.objects.filter(id=post.id).update(push_all__tags=["mongodb", "code"])
|
||||
post.reload()
|
||||
self.assertEqual(post.tags, ["code", "mongodb", "code"])
|
||||
|
||||
BlogPost.objects(slug="test").update(pull__tags="code")
|
||||
post.reload()
|
||||
self.assertTrue('code' not in post.tags)
|
||||
self.assertEqual(len(post.tags), 1)
|
||||
self.assertEqual(post.tags, ["mongodb"])
|
||||
|
||||
|
||||
BlogPost.objects(slug="test").update(pull_all__tags=["mongodb", "code"])
|
||||
post.reload()
|
||||
self.assertEqual(post.tags, [])
|
||||
|
||||
BlogPost.objects(slug="test").update(__raw__={"$addToSet": {"tags": {"$each": ["code", "mongodb", "code"]}}})
|
||||
post.reload()
|
||||
self.assertEqual(post.tags, ["code", "mongodb"])
|
||||
|
||||
def test_add_to_set_each(self):
|
||||
class Item(Document):
|
||||
name = StringField(required=True)
|
||||
description = StringField(max_length=50)
|
||||
parents = ListField(ReferenceField('self'))
|
||||
|
||||
Item.drop_collection()
|
||||
|
||||
item = Item(name='test item').save()
|
||||
parent_1 = Item(name='parent 1').save()
|
||||
parent_2 = Item(name='parent 2').save()
|
||||
|
||||
item.update(add_to_set__parents=[parent_1, parent_2, parent_1])
|
||||
item.reload()
|
||||
|
||||
self.assertEqual([parent_1, parent_2], item.parents)
|
||||
|
||||
def test_pull_nested(self):
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
class Collaborator(EmbeddedDocument):
|
||||
user = StringField()
|
||||
|
||||
def __unicode__(self):
|
||||
return '%s' % self.user
|
||||
|
||||
class Site(Document):
|
||||
name = StringField(max_length=75, unique=True, required=True)
|
||||
collaborators = ListField(EmbeddedDocumentField(Collaborator))
|
||||
|
||||
|
||||
Site.drop_collection()
|
||||
|
||||
c = Collaborator(user='Esteban')
|
||||
s = Site(name="test", collaborators=[c])
|
||||
s.save()
|
||||
|
||||
Site.objects(id=s.id).update_one(pull__collaborators__user='Esteban')
|
||||
self.assertEqual(Site.objects.first().collaborators, [])
|
||||
|
||||
def pull_all():
|
||||
Site.objects(id=s.id).update_one(pull_all__collaborators__user=['Ross'])
|
||||
|
||||
self.assertRaises(InvalidQueryError, pull_all)
|
||||
|
||||
def test_update_one_pop_generic_reference(self):
|
||||
|
||||
@@ -1449,6 +1663,37 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
|
||||
def test_set_list_embedded_documents(self):
|
||||
|
||||
class Author(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class Message(Document):
|
||||
title = StringField()
|
||||
authors = ListField(EmbeddedDocumentField('Author'))
|
||||
|
||||
Message.drop_collection()
|
||||
|
||||
message = Message(title="hello", authors=[Author(name="Harry")])
|
||||
message.save()
|
||||
|
||||
Message.objects(authors__name="Harry").update_one(
|
||||
set__authors__S=Author(name="Ross"))
|
||||
|
||||
message = message.reload()
|
||||
self.assertEquals(message.authors[0].name, "Ross")
|
||||
|
||||
Message.objects(authors__name="Ross").update_one(
|
||||
set__authors=[Author(name="Harry"),
|
||||
Author(name="Ross"),
|
||||
Author(name="Adam")])
|
||||
|
||||
message = message.reload()
|
||||
self.assertEquals(message.authors[0].name, "Harry")
|
||||
self.assertEquals(message.authors[1].name, "Ross")
|
||||
self.assertEquals(message.authors[2].name, "Adam")
|
||||
|
||||
def test_order_by(self):
|
||||
"""Ensure that QuerySets may be ordered.
|
||||
"""
|
||||
@@ -1749,9 +1994,9 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
# Check item_frequencies works for non-list fields
|
||||
def test_assertions(f):
|
||||
self.assertEqual(set(['1', '2']), set(f.keys()))
|
||||
self.assertEqual(f['1'], 1)
|
||||
self.assertEqual(f['2'], 2)
|
||||
self.assertEqual(set([1, 2]), set(f.keys()))
|
||||
self.assertEqual(f[1], 1)
|
||||
self.assertEqual(f[2], 2)
|
||||
|
||||
exec_js = BlogPost.objects.item_frequencies('hits')
|
||||
map_reduce = BlogPost.objects.item_frequencies('hits', map_reduce=True)
|
||||
@@ -1840,6 +2085,80 @@ class QuerySetTest(unittest.TestCase):
|
||||
freq = Person.objects.item_frequencies('city', normalize=True, map_reduce=True)
|
||||
self.assertEquals(freq, {'CRB': 0.5, None: 0.5})
|
||||
|
||||
def test_item_frequencies_with_null_embedded(self):
|
||||
class Data(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class Extra(EmbeddedDocument):
|
||||
tag = StringField()
|
||||
|
||||
class Person(Document):
|
||||
data = EmbeddedDocumentField(Data, required=True)
|
||||
extra = EmbeddedDocumentField(Extra)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person()
|
||||
p.data = Data(name="Wilson Jr")
|
||||
p.save()
|
||||
|
||||
p = Person()
|
||||
p.data = Data(name="Wesley")
|
||||
p.extra = Extra(tag="friend")
|
||||
p.save()
|
||||
|
||||
ot = Person.objects.item_frequencies('extra.tag', map_reduce=False)
|
||||
self.assertEquals(ot, {None: 1.0, u'friend': 1.0})
|
||||
|
||||
ot = Person.objects.item_frequencies('extra.tag', map_reduce=True)
|
||||
self.assertEquals(ot, {None: 1.0, u'friend': 1.0})
|
||||
|
||||
def test_item_frequencies_with_0_values(self):
|
||||
class Test(Document):
|
||||
val = IntField()
|
||||
|
||||
Test.drop_collection()
|
||||
t = Test()
|
||||
t.val = 0
|
||||
t.save()
|
||||
|
||||
ot = Test.objects.item_frequencies('val', map_reduce=True)
|
||||
self.assertEquals(ot, {0: 1})
|
||||
ot = Test.objects.item_frequencies('val', map_reduce=False)
|
||||
self.assertEquals(ot, {0: 1})
|
||||
|
||||
def test_item_frequencies_with_False_values(self):
|
||||
class Test(Document):
|
||||
val = BooleanField()
|
||||
|
||||
Test.drop_collection()
|
||||
t = Test()
|
||||
t.val = False
|
||||
t.save()
|
||||
|
||||
ot = Test.objects.item_frequencies('val', map_reduce=True)
|
||||
self.assertEquals(ot, {False: 1})
|
||||
ot = Test.objects.item_frequencies('val', map_reduce=False)
|
||||
self.assertEquals(ot, {False: 1})
|
||||
|
||||
def test_item_frequencies_normalize(self):
|
||||
class Test(Document):
|
||||
val = IntField()
|
||||
|
||||
Test.drop_collection()
|
||||
|
||||
for i in xrange(50):
|
||||
Test(val=1).save()
|
||||
|
||||
for i in xrange(20):
|
||||
Test(val=2).save()
|
||||
|
||||
freqs = Test.objects.item_frequencies('val', map_reduce=False, normalize=True)
|
||||
self.assertEquals(freqs, {1: 50.0/70, 2: 20.0/70})
|
||||
|
||||
freqs = Test.objects.item_frequencies('val', map_reduce=True, normalize=True)
|
||||
self.assertEquals(freqs, {1: 50.0/70, 2: 20.0/70})
|
||||
|
||||
def test_average(self):
|
||||
"""Ensure that field can be averaged correctly.
|
||||
"""
|
||||
@@ -1882,6 +2201,24 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(set(self.Person.objects(age=30).distinct('name')),
|
||||
set(['Mr Orange', 'Mr Pink']))
|
||||
|
||||
def test_distinct_handles_references(self):
|
||||
class Foo(Document):
|
||||
bar = ReferenceField("Bar")
|
||||
|
||||
class Bar(Document):
|
||||
text = StringField()
|
||||
|
||||
Bar.drop_collection()
|
||||
Foo.drop_collection()
|
||||
|
||||
bar = Bar(text="hi")
|
||||
bar.save()
|
||||
|
||||
foo = Foo(bar=bar)
|
||||
foo.save()
|
||||
|
||||
self.assertEquals(Foo.objects.distinct("bar"), [bar])
|
||||
|
||||
def test_custom_manager(self):
|
||||
"""Ensure that custom QuerySetManager instances work as expected.
|
||||
"""
|
||||
@@ -2197,10 +2534,10 @@ class QuerySetTest(unittest.TestCase):
|
||||
events = Event.objects(location__within_box=box)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0].id, event2.id)
|
||||
|
||||
|
||||
# check that polygon works for users who have a server >= 1.9
|
||||
server_version = tuple(
|
||||
_get_connection().server_info()['version'].split('.')
|
||||
get_connection().server_info()['version'].split('.')
|
||||
)
|
||||
required_version = tuple("1.9.0".split("."))
|
||||
if server_version >= required_version:
|
||||
@@ -2214,7 +2551,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
events = Event.objects(location__within_polygon=polygon)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0].id, event1.id)
|
||||
|
||||
|
||||
polygon2 = [
|
||||
(54.033586,-1.742249),
|
||||
(52.792797,-1.225891),
|
||||
@@ -2222,7 +2559,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
]
|
||||
events = Event.objects(location__within_polygon=polygon2)
|
||||
self.assertEqual(events.count(), 0)
|
||||
|
||||
|
||||
Event.drop_collection()
|
||||
|
||||
def test_spherical_geospatial_operators(self):
|
||||
@@ -2569,6 +2906,278 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
self.assertRaises(TypeError, invalid_where)
|
||||
|
||||
def test_scalar(self):
|
||||
|
||||
class Organization(Document):
|
||||
id = ObjectIdField('_id')
|
||||
name = StringField()
|
||||
|
||||
class User(Document):
|
||||
id = ObjectIdField('_id')
|
||||
name = StringField()
|
||||
organization = ObjectIdField()
|
||||
|
||||
User.drop_collection()
|
||||
Organization.drop_collection()
|
||||
|
||||
whitehouse = Organization(name="White House")
|
||||
whitehouse.save()
|
||||
User(name="Bob Dole", organization=whitehouse.id).save()
|
||||
|
||||
# Efficient way to get all unique organization names for a given
|
||||
# set of users (Pretend this has additional filtering.)
|
||||
user_orgs = set(User.objects.scalar('organization'))
|
||||
orgs = Organization.objects(id__in=user_orgs).scalar('name')
|
||||
self.assertEqual(list(orgs), ['White House'])
|
||||
|
||||
# Efficient for generating listings, too.
|
||||
orgs = Organization.objects.scalar('name').in_bulk(list(user_orgs))
|
||||
user_map = User.objects.scalar('name', 'organization')
|
||||
user_listing = [(user, orgs[org]) for user, org in user_map]
|
||||
self.assertEqual([("Bob Dole", "White House")], user_listing)
|
||||
|
||||
def test_scalar_simple(self):
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
y = BooleanField()
|
||||
|
||||
TestDoc.drop_collection()
|
||||
|
||||
TestDoc(x=10, y=True).save()
|
||||
TestDoc(x=20, y=False).save()
|
||||
TestDoc(x=30, y=True).save()
|
||||
|
||||
plist = list(TestDoc.objects.scalar('x', 'y'))
|
||||
|
||||
self.assertEqual(len(plist), 3)
|
||||
self.assertEqual(plist[0], (10, True))
|
||||
self.assertEqual(plist[1], (20, False))
|
||||
self.assertEqual(plist[2], (30, True))
|
||||
|
||||
class UserDoc(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
UserDoc.drop_collection()
|
||||
|
||||
UserDoc(name="Wilson Jr", age=19).save()
|
||||
UserDoc(name="Wilson", age=43).save()
|
||||
UserDoc(name="Eliana", age=37).save()
|
||||
UserDoc(name="Tayza", age=15).save()
|
||||
|
||||
ulist = list(UserDoc.objects.scalar('name', 'age'))
|
||||
|
||||
self.assertEqual(ulist, [
|
||||
(u'Wilson Jr', 19),
|
||||
(u'Wilson', 43),
|
||||
(u'Eliana', 37),
|
||||
(u'Tayza', 15)])
|
||||
|
||||
ulist = list(UserDoc.objects.scalar('name').order_by('age'))
|
||||
|
||||
self.assertEqual(ulist, [
|
||||
(u'Tayza'),
|
||||
(u'Wilson Jr'),
|
||||
(u'Eliana'),
|
||||
(u'Wilson')])
|
||||
|
||||
def test_scalar_embedded(self):
|
||||
class Profile(EmbeddedDocument):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
class Locale(EmbeddedDocument):
|
||||
city = StringField()
|
||||
country = StringField()
|
||||
|
||||
class Person(Document):
|
||||
profile = EmbeddedDocumentField(Profile)
|
||||
locale = EmbeddedDocumentField(Locale)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(profile=Profile(name="Wilson Jr", age=19),
|
||||
locale=Locale(city="Corumba-GO", country="Brazil")).save()
|
||||
|
||||
Person(profile=Profile(name="Gabriel Falcao", age=23),
|
||||
locale=Locale(city="New York", country="USA")).save()
|
||||
|
||||
Person(profile=Profile(name="Lincoln de souza", age=28),
|
||||
locale=Locale(city="Belo Horizonte", country="Brazil")).save()
|
||||
|
||||
Person(profile=Profile(name="Walter cruz", age=30),
|
||||
locale=Locale(city="Brasilia", country="Brazil")).save()
|
||||
|
||||
self.assertEqual(
|
||||
list(Person.objects.order_by('profile__age').scalar('profile__name')),
|
||||
[u'Wilson Jr', u'Gabriel Falcao', u'Lincoln de souza', u'Walter cruz'])
|
||||
|
||||
ulist = list(Person.objects.order_by('locale.city')
|
||||
.scalar('profile__name', 'profile__age', 'locale__city'))
|
||||
self.assertEqual(ulist,
|
||||
[(u'Lincoln de souza', 28, u'Belo Horizonte'),
|
||||
(u'Walter cruz', 30, u'Brasilia'),
|
||||
(u'Wilson Jr', 19, u'Corumba-GO'),
|
||||
(u'Gabriel Falcao', 23, u'New York')])
|
||||
|
||||
def test_scalar_decimal(self):
|
||||
from decimal import Decimal
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
rating = DecimalField()
|
||||
|
||||
Person.drop_collection()
|
||||
Person(name="Wilson Jr", rating=Decimal('1.0')).save()
|
||||
|
||||
ulist = list(Person.objects.scalar('name', 'rating'))
|
||||
self.assertEqual(ulist, [(u'Wilson Jr', Decimal('1.0'))])
|
||||
|
||||
|
||||
def test_scalar_reference_field(self):
|
||||
class State(Document):
|
||||
name = StringField()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
state = ReferenceField(State)
|
||||
|
||||
State.drop_collection()
|
||||
Person.drop_collection()
|
||||
|
||||
s1 = State(name="Goias")
|
||||
s1.save()
|
||||
|
||||
Person(name="Wilson JR", state=s1).save()
|
||||
|
||||
plist = list(Person.objects.scalar('name', 'state'))
|
||||
self.assertEqual(plist, [(u'Wilson JR', s1)])
|
||||
|
||||
def test_scalar_generic_reference_field(self):
|
||||
class State(Document):
|
||||
name = StringField()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
state = GenericReferenceField()
|
||||
|
||||
State.drop_collection()
|
||||
Person.drop_collection()
|
||||
|
||||
s1 = State(name="Goias")
|
||||
s1.save()
|
||||
|
||||
Person(name="Wilson JR", state=s1).save()
|
||||
|
||||
plist = list(Person.objects.scalar('name', 'state'))
|
||||
self.assertEqual(plist, [(u'Wilson JR', s1)])
|
||||
|
||||
def test_scalar_db_field(self):
|
||||
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
y = BooleanField()
|
||||
|
||||
TestDoc.drop_collection()
|
||||
|
||||
TestDoc(x=10, y=True).save()
|
||||
TestDoc(x=20, y=False).save()
|
||||
TestDoc(x=30, y=True).save()
|
||||
|
||||
plist = list(TestDoc.objects.scalar('x', 'y'))
|
||||
self.assertEqual(len(plist), 3)
|
||||
self.assertEqual(plist[0], (10, True))
|
||||
self.assertEqual(plist[1], (20, False))
|
||||
self.assertEqual(plist[2], (30, True))
|
||||
|
||||
def test_scalar_primary_key(self):
|
||||
|
||||
class SettingValue(Document):
|
||||
key = StringField(primary_key=True)
|
||||
value = StringField()
|
||||
|
||||
SettingValue.drop_collection()
|
||||
s = SettingValue(key="test", value="test value")
|
||||
s.save()
|
||||
|
||||
val = SettingValue.objects.scalar('key', 'value')
|
||||
self.assertEqual(list(val), [('test', 'test value')])
|
||||
|
||||
def test_scalar_cursor_behaviour(self):
|
||||
"""Ensure that a query returns a valid set of results.
|
||||
"""
|
||||
person1 = self.Person(name="User A", age=20)
|
||||
person1.save()
|
||||
person2 = self.Person(name="User B", age=30)
|
||||
person2.save()
|
||||
|
||||
# Find all people in the collection
|
||||
people = self.Person.objects.scalar('name')
|
||||
self.assertEqual(len(people), 2)
|
||||
results = list(people)
|
||||
self.assertEqual(results[0], "User A")
|
||||
self.assertEqual(results[1], "User B")
|
||||
|
||||
# Use a query to filter the people found to just person1
|
||||
people = self.Person.objects(age=20).scalar('name')
|
||||
self.assertEqual(len(people), 1)
|
||||
person = people.next()
|
||||
self.assertEqual(person, "User A")
|
||||
|
||||
# Test limit
|
||||
people = list(self.Person.objects.limit(1).scalar('name'))
|
||||
self.assertEqual(len(people), 1)
|
||||
self.assertEqual(people[0], 'User A')
|
||||
|
||||
# Test skip
|
||||
people = list(self.Person.objects.skip(1).scalar('name'))
|
||||
self.assertEqual(len(people), 1)
|
||||
self.assertEqual(people[0], 'User B')
|
||||
|
||||
person3 = self.Person(name="User C", age=40)
|
||||
person3.save()
|
||||
|
||||
# Test slice limit
|
||||
people = list(self.Person.objects[:2].scalar('name'))
|
||||
self.assertEqual(len(people), 2)
|
||||
self.assertEqual(people[0], 'User A')
|
||||
self.assertEqual(people[1], 'User B')
|
||||
|
||||
# Test slice skip
|
||||
people = list(self.Person.objects[1:].scalar('name'))
|
||||
self.assertEqual(len(people), 2)
|
||||
self.assertEqual(people[0], 'User B')
|
||||
self.assertEqual(people[1], 'User C')
|
||||
|
||||
# Test slice limit and skip
|
||||
people = list(self.Person.objects[1:2].scalar('name'))
|
||||
self.assertEqual(len(people), 1)
|
||||
self.assertEqual(people[0], 'User B')
|
||||
|
||||
people = list(self.Person.objects[1:1].scalar('name'))
|
||||
self.assertEqual(len(people), 0)
|
||||
|
||||
# Test slice out of range
|
||||
people = list(self.Person.objects.scalar('name')[80000:80001])
|
||||
self.assertEqual(len(people), 0)
|
||||
|
||||
# Test larger slice __repr__
|
||||
self.Person.objects.delete()
|
||||
for i in xrange(55):
|
||||
self.Person(name='A%s' % i, age=i).save()
|
||||
|
||||
self.assertEqual(len(self.Person.objects.scalar('name')), 55)
|
||||
self.assertEqual("A0", "%s" % self.Person.objects.order_by('name').scalar('name').first())
|
||||
self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').order_by('name')[0])
|
||||
self.assertEqual("[u'A1', u'A2']", "%s" % self.Person.objects.order_by('age').scalar('name')[1:3])
|
||||
self.assertEqual("[u'A51', u'A52']", "%s" % self.Person.objects.order_by('age').scalar('name')[51:53])
|
||||
|
||||
# with_id and in_bulk
|
||||
person = self.Person.objects.order_by('name').first()
|
||||
self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').with_id(person.id))
|
||||
|
||||
pks = self.Person.objects.order_by('age').scalar('pk')[1:3]
|
||||
self.assertEqual("[u'A1', u'A2']", "%s" % sorted(self.Person.objects.scalar('name').in_bulk(list(pks)).values()))
|
||||
|
||||
|
||||
class QTest(unittest.TestCase):
|
||||
|
||||
@@ -2790,6 +3399,30 @@ class QueryFieldListTest(unittest.TestCase):
|
||||
q += QueryFieldList(fields=['a'], value={"$slice": 5})
|
||||
self.assertEqual(q.as_dict(), {'a': {"$slice": 5}})
|
||||
|
||||
def test_elem_match(self):
|
||||
class Foo(EmbeddedDocument):
|
||||
shape = StringField()
|
||||
color = StringField()
|
||||
trick = BooleanField()
|
||||
meta = {'allow_inheritance': False}
|
||||
|
||||
class Bar(Document):
|
||||
foo = ListField(EmbeddedDocumentField(Foo))
|
||||
meta = {'allow_inheritance': False}
|
||||
|
||||
Bar.drop_collection()
|
||||
|
||||
b1 = Bar(foo=[Foo(shape= "square", color ="purple", thick = False),
|
||||
Foo(shape= "circle", color ="red", thick = True)])
|
||||
b1.save()
|
||||
|
||||
b2 = Bar(foo=[Foo(shape= "square", color ="red", thick = True),
|
||||
Foo(shape= "circle", color ="purple", thick = False)])
|
||||
b2.save()
|
||||
|
||||
ak = list(Bar.objects(foo__match={'shape': "square", "color": "purple"}))
|
||||
self.assertEqual([b1], ak)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
32
tests/test_replicaset_connection.py
Normal file
32
tests/test_replicaset_connection.py
Normal file
@@ -0,0 +1,32 @@
|
||||
import unittest
|
||||
import pymongo
|
||||
from pymongo import ReadPreference, ReplicaSetConnection
|
||||
|
||||
import mongoengine
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db, get_connection, ConnectionError
|
||||
|
||||
|
||||
class ConnectionTest(unittest.TestCase):
|
||||
|
||||
def tearDown(self):
|
||||
mongoengine.connection._connection_settings = {}
|
||||
mongoengine.connection._connections = {}
|
||||
mongoengine.connection._dbs = {}
|
||||
|
||||
def test_replicaset_uri_passes_read_preference(self):
|
||||
"""Requires a replica set called "rs" on port 27017
|
||||
"""
|
||||
|
||||
try:
|
||||
conn = connect(db='mongoenginetest', host="mongodb://localhost/mongoenginetest?replicaSet=rs", read_preference=ReadPreference.SECONDARY_ONLY)
|
||||
except ConnectionError, e:
|
||||
return
|
||||
|
||||
if not isinstance(conn, ReplicaSetConnection):
|
||||
return
|
||||
|
||||
self.assertEquals(conn.read_preference, ReadPreference.SECONDARY_ONLY)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@@ -56,6 +56,18 @@ class SignalTests(unittest.TestCase):
|
||||
@classmethod
|
||||
def post_delete(cls, sender, document, **kwargs):
|
||||
signal_output.append('post_delete signal, %s' % document)
|
||||
|
||||
@classmethod
|
||||
def pre_bulk_insert(cls, sender, documents, **kwargs):
|
||||
signal_output.append('pre_bulk_insert signal, %s' % documents)
|
||||
|
||||
@classmethod
|
||||
def post_bulk_insert(cls, sender, documents, **kwargs):
|
||||
signal_output.append('post_bulk_insert signal, %s' % documents)
|
||||
if kwargs.get('loaded', False):
|
||||
signal_output.append('Is loaded')
|
||||
else:
|
||||
signal_output.append('Not loaded')
|
||||
self.Author = Author
|
||||
|
||||
|
||||
@@ -104,7 +116,9 @@ class SignalTests(unittest.TestCase):
|
||||
len(signals.pre_save.receivers),
|
||||
len(signals.post_save.receivers),
|
||||
len(signals.pre_delete.receivers),
|
||||
len(signals.post_delete.receivers)
|
||||
len(signals.post_delete.receivers),
|
||||
len(signals.pre_bulk_insert.receivers),
|
||||
len(signals.post_bulk_insert.receivers),
|
||||
)
|
||||
|
||||
signals.pre_init.connect(Author.pre_init, sender=Author)
|
||||
@@ -113,6 +127,8 @@ class SignalTests(unittest.TestCase):
|
||||
signals.post_save.connect(Author.post_save, sender=Author)
|
||||
signals.pre_delete.connect(Author.pre_delete, sender=Author)
|
||||
signals.post_delete.connect(Author.post_delete, sender=Author)
|
||||
signals.pre_bulk_insert.connect(Author.pre_bulk_insert, sender=Author)
|
||||
signals.post_bulk_insert.connect(Author.post_bulk_insert, sender=Author)
|
||||
|
||||
signals.pre_init.connect(Another.pre_init, sender=Another)
|
||||
signals.post_init.connect(Another.post_init, sender=Another)
|
||||
@@ -128,6 +144,8 @@ class SignalTests(unittest.TestCase):
|
||||
signals.pre_delete.disconnect(self.Author.pre_delete)
|
||||
signals.post_save.disconnect(self.Author.post_save)
|
||||
signals.pre_save.disconnect(self.Author.pre_save)
|
||||
signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert)
|
||||
signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert)
|
||||
|
||||
signals.pre_init.disconnect(self.Another.pre_init)
|
||||
signals.post_init.disconnect(self.Another.post_init)
|
||||
@@ -143,7 +161,9 @@ class SignalTests(unittest.TestCase):
|
||||
len(signals.pre_save.receivers),
|
||||
len(signals.post_save.receivers),
|
||||
len(signals.pre_delete.receivers),
|
||||
len(signals.post_delete.receivers)
|
||||
len(signals.post_delete.receivers),
|
||||
len(signals.pre_bulk_insert.receivers),
|
||||
len(signals.post_bulk_insert.receivers),
|
||||
)
|
||||
|
||||
self.assertEqual(self.pre_signals, post_signals)
|
||||
@@ -154,6 +174,14 @@ class SignalTests(unittest.TestCase):
|
||||
def create_author():
|
||||
a1 = self.Author(name='Bill Shakespeare')
|
||||
|
||||
def bulk_create_author_with_load():
|
||||
a1 = self.Author(name='Bill Shakespeare')
|
||||
self.Author.objects.insert([a1], load_bulk=True)
|
||||
|
||||
def bulk_create_author_without_load():
|
||||
a1 = self.Author(name='Bill Shakespeare')
|
||||
self.Author.objects.insert([a1], load_bulk=False)
|
||||
|
||||
self.assertEqual(self.get_signal_output(create_author), [
|
||||
"pre_init signal, Author",
|
||||
"{'name': 'Bill Shakespeare'}",
|
||||
@@ -178,4 +206,25 @@ class SignalTests(unittest.TestCase):
|
||||
self.assertEqual(self.get_signal_output(a1.delete), [
|
||||
'pre_delete signal, William Shakespeare',
|
||||
'post_delete signal, William Shakespeare',
|
||||
])
|
||||
])
|
||||
|
||||
signal_output = self.get_signal_output(bulk_create_author_with_load)
|
||||
|
||||
# The output of this signal is not entirely deterministic. The reloaded
|
||||
# object will have an object ID. Hence, we only check part of the output
|
||||
self.assertEquals(signal_output[3],
|
||||
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]")
|
||||
self.assertEquals(signal_output[-2:],
|
||||
["post_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||
"Is loaded",])
|
||||
|
||||
self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [
|
||||
"pre_init signal, Author",
|
||||
"{'name': 'Bill Shakespeare'}",
|
||||
"post_init signal, Bill Shakespeare",
|
||||
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||
"post_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||
"Not loaded",
|
||||
])
|
||||
|
||||
self.Author.objects.delete()
|
Reference in New Issue
Block a user