Compare commits
264 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
735e043ff6 | ||
|
6e7f2b73cf | ||
|
d645ce9745 | ||
|
7c08c140da | ||
|
81d402dc17 | ||
|
966fa12358 | ||
|
87792e1921 | ||
|
4c8296acc6 | ||
|
9989da07ed | ||
|
1c5e6a3425 | ||
|
eedf908770 | ||
|
5c9ef41403 | ||
|
0bf2ad5b67 | ||
|
a0e3f382cd | ||
|
f09c39b5d7 | ||
|
89c67bf259 | ||
|
ea666d4607 | ||
|
b8af154439 | ||
|
f594ece32a | ||
|
03beb6852a | ||
|
ab9e9a3329 | ||
|
a4b09344af | ||
|
8cb8aa392c | ||
|
3255519792 | ||
|
7e64bb2503 | ||
|
86a78402c3 | ||
|
ba276452fb | ||
|
4ffa8d0124 | ||
|
4bc5082681 | ||
|
0e3c34e1da | ||
|
658b3784ae | ||
|
0526f577ff | ||
|
bb1b9bc1d3 | ||
|
b1eeb77ddc | ||
|
999d4a7676 | ||
|
1b80193aac | ||
|
be8d39a48c | ||
|
a2f3d70f28 | ||
|
676a7bf712 | ||
|
e990a6c70c | ||
|
90fa0f6c4a | ||
|
22010d7d95 | ||
|
66279bd90f | ||
|
19da228855 | ||
|
9e67941bad | ||
|
0454fc74e9 | ||
|
2f6b1c7611 | ||
|
f00bed6058 | ||
|
529c522594 | ||
|
2bb9493fcf | ||
|
839ed8a64a | ||
|
017a31ffd0 | ||
|
83b961c84d | ||
|
fa07423ca5 | ||
|
dd4af2df81 | ||
|
44bd8cb85b | ||
|
52d80ac23c | ||
|
43a5d73e14 | ||
|
abc764951d | ||
|
9cc6164026 | ||
|
475488b9f2 | ||
|
95b1783834 | ||
|
12c8b5c0b9 | ||
|
f99b7a811b | ||
|
0575abab23 | ||
|
9eebcf7beb | ||
|
ed74477150 | ||
|
2801b38c75 | ||
|
dc3fea875e | ||
|
aab8c2b687 | ||
|
3577773af3 | ||
|
dd023edc0f | ||
|
8ac9e6dc19 | ||
|
f45d4d781d | ||
|
c95652d6a8 | ||
|
97b37f75d3 | ||
|
95dae48778 | ||
|
73635033bd | ||
|
c1619d2a62 | ||
|
b87ef982f6 | ||
|
91aa90ad4a | ||
|
4b3cea9e78 | ||
|
2420b5e937 | ||
|
f23a976bea | ||
|
4226cd08f1 | ||
|
7a230f1693 | ||
|
a43d0d4612 | ||
|
78a40a0c70 | ||
|
2c69d8f0b0 | ||
|
0018c38b83 | ||
|
8df81571fc | ||
|
d1add62a06 | ||
|
c419f3379a | ||
|
69d57209f7 | ||
|
7ca81d6fb8 | ||
|
8a046bfa5d | ||
|
3628a7653c | ||
|
48f988acd7 | ||
|
6526923345 | ||
|
24fd1acce6 | ||
|
cbb9235dc5 | ||
|
19ec2c9bc9 | ||
|
6459d4c0b6 | ||
|
1304f2721f | ||
|
8bde0c0e53 | ||
|
598ffd3e5c | ||
|
1a4533a9cf | ||
|
601f0eb168 | ||
|
3070e0bf5d | ||
|
83c11a9834 | ||
|
5c912b930e | ||
|
1b17fb0ae7 | ||
|
d83e67c121 | ||
|
ae39ed94c9 | ||
|
1e51180d42 | ||
|
87ba69d02e | ||
|
8879d5560b | ||
|
c1621ee39c | ||
|
b0aa98edb4 | ||
|
a7a2fe0216 | ||
|
8e50f5fa3c | ||
|
31793520bf | ||
|
0b6b0368c5 | ||
|
d1d30a9280 | ||
|
420c6f2d1e | ||
|
34f06c4971 | ||
|
9cc4bbd49d | ||
|
f66b312869 | ||
|
2405ba8708 | ||
|
a91b6bff8b | ||
|
450dc11a68 | ||
|
1ce2f84ce5 | ||
|
f55b241cfa | ||
|
34d08ce8ef | ||
|
4f5aa8c43b | ||
|
27b375060d | ||
|
cbfdc401f7 | ||
|
b58bf3e0ce | ||
|
1fff7e9aca | ||
|
494b981b13 | ||
|
dd93995bd0 | ||
|
b3bb4add9c | ||
|
d305e71c27 | ||
|
0d92baa670 | ||
|
7a1b110f62 | ||
|
db8df057ce | ||
|
5d8ffded40 | ||
|
07f3e5356d | ||
|
1ece62f960 | ||
|
056c604dc3 | ||
|
2d08eec093 | ||
|
614b590551 | ||
|
6d90ce250a | ||
|
ea31846a19 | ||
|
e6317776c1 | ||
|
efeaba39a4 | ||
|
1a97dfd479 | ||
|
9fecf2b303 | ||
|
3d0d2f48ad | ||
|
581605e0e2 | ||
|
45d3a7f6ff | ||
|
7ca2ea0766 | ||
|
89220c142b | ||
|
c73ce3d220 | ||
|
b0f127af4e | ||
|
766d54795f | ||
|
bd41c6eea4 | ||
|
2435786713 | ||
|
9e7ea64bd2 | ||
|
89a6eee6af | ||
|
2ec1476e50 | ||
|
2d9b581f34 | ||
|
5bb63f645b | ||
|
a856c7cc37 | ||
|
26db9d8a9d | ||
|
8060179f6d | ||
|
77ebd87fed | ||
|
e4bc92235d | ||
|
27a4d83ce8 | ||
|
ece9b902f8 | ||
|
65a2f8a68b | ||
|
9c212306b8 | ||
|
1fdc7ce6bb | ||
|
0b22c140c5 | ||
|
944aa45459 | ||
|
c9842ba13a | ||
|
8840680303 | ||
|
376b9b1316 | ||
|
54bb1cb3d9 | ||
|
43468b474e | ||
|
28a957c684 | ||
|
ec5ddbf391 | ||
|
bab186e195 | ||
|
bc7e874476 | ||
|
97114b5948 | ||
|
45e015d71d | ||
|
0ff6531953 | ||
|
ba298c3cfc | ||
|
0479bea40b | ||
|
a536097804 | ||
|
bbefd0fdf9 | ||
|
2aa8b04c21 | ||
|
aeebdfec51 | ||
|
debfcdf498 | ||
|
5c4b33e8e6 | ||
|
eb54037b66 | ||
|
f48af8db3b | ||
|
97c5b957dd | ||
|
95e7397803 | ||
|
43a989978a | ||
|
27734a7c26 | ||
|
dd786d6fc4 | ||
|
be1c28fc45 | ||
|
20e41b3523 | ||
|
e07ecc5cf8 | ||
|
3360b72531 | ||
|
233b13d670 | ||
|
5bcbb4fdaa | ||
|
dbe2f5f2b8 | ||
|
ca8b58d66d | ||
|
f80f0b416f | ||
|
d7765511ee | ||
|
0240a09056 | ||
|
ab15c4eec9 | ||
|
4ce1ba81a6 | ||
|
530440b333 | ||
|
b80fda36af | ||
|
42d24263ef | ||
|
1e2797e7ce | ||
|
f7075766fc | ||
|
5647ca70bb | ||
|
2b8aa6bafc | ||
|
410443471c | ||
|
0bb9781b91 | ||
|
2769d6d7ca | ||
|
120b9433c2 | ||
|
605092bd88 | ||
|
a4a8c94374 | ||
|
0e93f6c0db | ||
|
aa2add39ad | ||
|
a928047147 | ||
|
c474ca0f13 | ||
|
88dc64653e | ||
|
5f4b70f3a9 | ||
|
51b429e5b0 | ||
|
360624eb6e | ||
|
d9d2291837 | ||
|
cbdf816232 | ||
|
2d71eb8a18 | ||
|
64d2532ce9 | ||
|
0376910f33 | ||
|
6d503119a1 | ||
|
bfae93e57e | ||
|
49a66ba81a | ||
|
a1d43fecd9 | ||
|
d0e42a4798 | ||
|
46e088d379 | ||
|
1bf9f28f4b | ||
|
8cfe13ad90 | ||
|
7e376b40bb | ||
|
3ec9dfc108 | ||
|
540a0cc59c | ||
|
83eb4f6b16 | ||
|
95c58bd793 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -13,4 +13,5 @@ env/
|
|||||||
.settings
|
.settings
|
||||||
.project
|
.project
|
||||||
.pydevproject
|
.pydevproject
|
||||||
tests/bugfix.py
|
tests/test_bugfix.py
|
||||||
|
htmlcov/
|
15
.travis.yml
Normal file
15
.travis.yml
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
# http://travis-ci.org/#!/MongoEngine/mongoengine
|
||||||
|
language: python
|
||||||
|
python:
|
||||||
|
- 2.5
|
||||||
|
- 2.6
|
||||||
|
- 2.7
|
||||||
|
- 3.1
|
||||||
|
- 3.2
|
||||||
|
install:
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install zlib1g zlib1g-dev; fi
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/; fi
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi
|
||||||
|
- python setup.py install
|
||||||
|
script:
|
||||||
|
- python setup.py test
|
22
AUTHORS
22
AUTHORS
@@ -8,6 +8,7 @@ Florian Schlachter <flori@n-schlachter.de>
|
|||||||
Steve Challis <steve@stevechallis.com>
|
Steve Challis <steve@stevechallis.com>
|
||||||
Wilson Júnior <wilsonpjunior@gmail.com>
|
Wilson Júnior <wilsonpjunior@gmail.com>
|
||||||
Dan Crosta https://github.com/dcrosta
|
Dan Crosta https://github.com/dcrosta
|
||||||
|
Laine Herron https://github.com/LaineHerron
|
||||||
|
|
||||||
CONTRIBUTORS
|
CONTRIBUTORS
|
||||||
|
|
||||||
@@ -100,3 +101,24 @@ that much better:
|
|||||||
* Jacob Peddicord
|
* Jacob Peddicord
|
||||||
* Nils Hasenbanck
|
* Nils Hasenbanck
|
||||||
* mostlystatic
|
* mostlystatic
|
||||||
|
* Greg Banks
|
||||||
|
* swashbuckler
|
||||||
|
* Adam Reeve
|
||||||
|
* Anthony Nemitz
|
||||||
|
* deignacio
|
||||||
|
* shaunduncan
|
||||||
|
* Meir Kriheli
|
||||||
|
* Andrey Fedoseev
|
||||||
|
* aparajita
|
||||||
|
* Tristan Escalada
|
||||||
|
* Alexander Koshelev
|
||||||
|
* Jaime Irurzun
|
||||||
|
* Alexandre González
|
||||||
|
* Thomas Steinacher
|
||||||
|
* Tommi Komulainen
|
||||||
|
* Peter Landry
|
||||||
|
* biszkoptwielki
|
||||||
|
* Anton Kolechkin
|
||||||
|
* Sergey Nikitin
|
||||||
|
* psychogenic
|
||||||
|
* Stefan Wójcik
|
||||||
|
2
LICENSE
2
LICENSE
@@ -1,4 +1,4 @@
|
|||||||
Copyright (c) 2009-2010 Harry Marr
|
Copyright (c) 2009-2012 See AUTHORS
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person
|
Permission is hereby granted, free of charge, to any person
|
||||||
obtaining a copy of this software and associated documentation
|
obtaining a copy of this software and associated documentation
|
||||||
|
@@ -2,9 +2,13 @@
|
|||||||
MongoEngine
|
MongoEngine
|
||||||
===========
|
===========
|
||||||
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
||||||
|
:Repository: https://github.com/MongoEngine/mongoengine
|
||||||
:Author: Harry Marr (http://github.com/hmarr)
|
:Author: Harry Marr (http://github.com/hmarr)
|
||||||
:Maintainer: Ross Lawley (http://github.com/rozza)
|
:Maintainer: Ross Lawley (http://github.com/rozza)
|
||||||
|
|
||||||
|
.. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master
|
||||||
|
:target: http://travis-ci.org/MongoEngine/mongoengine
|
||||||
|
|
||||||
About
|
About
|
||||||
=====
|
=====
|
||||||
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
||||||
@@ -22,7 +26,7 @@ setup.py install``.
|
|||||||
|
|
||||||
Dependencies
|
Dependencies
|
||||||
============
|
============
|
||||||
- pymongo 1.1+
|
- pymongo 2.1.1+
|
||||||
- sphinx (optional - for documentation generation)
|
- sphinx (optional - for documentation generation)
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
@@ -96,3 +100,4 @@ Contributing
|
|||||||
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to
|
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to
|
||||||
contribute to the project, fork it on GitHub and send a pull request, all
|
contribute to the project, fork it on GitHub and send a pull request, all
|
||||||
contributions and suggestions are welcome!
|
contributions and suggestions are welcome!
|
||||||
|
|
||||||
|
49
benchmark.py
49
benchmark.py
@@ -28,47 +28,64 @@ def main():
|
|||||||
|
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - Pymongo
|
Creating 10000 dictionaries - Pymongo
|
||||||
1.1141769886
|
3.86744189262
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - MongoEngine
|
Creating 10000 dictionaries - MongoEngine
|
||||||
2.37724113464
|
6.23374891281
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||||
1.92479610443
|
5.33027005196
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||||
|
pass - No Cascade
|
||||||
|
|
||||||
0.5.X
|
0.5.X
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - Pymongo
|
Creating 10000 dictionaries - Pymongo
|
||||||
1.10552310944
|
3.89597702026
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - MongoEngine
|
Creating 10000 dictionaries - MongoEngine
|
||||||
16.5169169903
|
21.7735359669
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||||
14.9446101189
|
19.8670389652
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||||
14.912801981
|
pass - No Cascade
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, force=True
|
|
||||||
14.9617750645
|
|
||||||
|
|
||||||
Performance
|
0.6.X
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - Pymongo
|
Creating 10000 dictionaries - Pymongo
|
||||||
1.10072994232
|
3.81559205055
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - MongoEngine
|
Creating 10000 dictionaries - MongoEngine
|
||||||
5.27341103554
|
10.0446798801
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||||
4.49365401268
|
9.51354718208
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||||
4.43459296227
|
9.02567505836
|
||||||
----------------------------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------------------------
|
||||||
Creating 10000 dictionaries - MongoEngine, force=True
|
Creating 10000 dictionaries - MongoEngine, force=True
|
||||||
4.40114378929
|
8.44933390617
|
||||||
|
|
||||||
|
0.7.X
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - Pymongo
|
||||||
|
3.78801012039
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine
|
||||||
|
9.73050498962
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||||
|
8.33456707001
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||||
|
8.37778115273
|
||||||
|
----------------------------------------------------------------------------------------------------
|
||||||
|
Creating 10000 dictionaries - MongoEngine, force=True
|
||||||
|
8.36906409264
|
||||||
"""
|
"""
|
||||||
|
|
||||||
setup = """
|
setup = """
|
||||||
|
@@ -31,6 +31,9 @@ Documents
|
|||||||
.. autoclass:: mongoengine.document.MapReduceDocument
|
.. autoclass:: mongoengine.document.MapReduceDocument
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.ValidationError
|
||||||
|
:members:
|
||||||
|
|
||||||
Querying
|
Querying
|
||||||
========
|
========
|
||||||
|
|
||||||
@@ -44,25 +47,28 @@ Querying
|
|||||||
Fields
|
Fields
|
||||||
======
|
======
|
||||||
|
|
||||||
.. autoclass:: mongoengine.StringField
|
.. autoclass:: mongoengine.BinaryField
|
||||||
.. autoclass:: mongoengine.URLField
|
.. autoclass:: mongoengine.BooleanField
|
||||||
.. autoclass:: mongoengine.EmailField
|
|
||||||
.. autoclass:: mongoengine.IntField
|
|
||||||
.. autoclass:: mongoengine.FloatField
|
|
||||||
.. autoclass:: mongoengine.DecimalField
|
|
||||||
.. autoclass:: mongoengine.DateTimeField
|
|
||||||
.. autoclass:: mongoengine.ComplexDateTimeField
|
.. autoclass:: mongoengine.ComplexDateTimeField
|
||||||
.. autoclass:: mongoengine.ListField
|
.. autoclass:: mongoengine.DateTimeField
|
||||||
.. autoclass:: mongoengine.SortedListField
|
.. autoclass:: mongoengine.DecimalField
|
||||||
.. autoclass:: mongoengine.DictField
|
.. autoclass:: mongoengine.DictField
|
||||||
|
.. autoclass:: mongoengine.DynamicField
|
||||||
|
.. autoclass:: mongoengine.EmailField
|
||||||
|
.. autoclass:: mongoengine.EmbeddedDocumentField
|
||||||
|
.. autoclass:: mongoengine.FileField
|
||||||
|
.. autoclass:: mongoengine.FloatField
|
||||||
|
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
|
||||||
|
.. autoclass:: mongoengine.GenericReferenceField
|
||||||
|
.. autoclass:: mongoengine.GeoPointField
|
||||||
|
.. autoclass:: mongoengine.ImageField
|
||||||
|
.. autoclass:: mongoengine.IntField
|
||||||
|
.. autoclass:: mongoengine.ListField
|
||||||
.. autoclass:: mongoengine.MapField
|
.. autoclass:: mongoengine.MapField
|
||||||
.. autoclass:: mongoengine.ObjectIdField
|
.. autoclass:: mongoengine.ObjectIdField
|
||||||
.. autoclass:: mongoengine.ReferenceField
|
.. autoclass:: mongoengine.ReferenceField
|
||||||
.. autoclass:: mongoengine.GenericReferenceField
|
|
||||||
.. autoclass:: mongoengine.EmbeddedDocumentField
|
|
||||||
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
|
|
||||||
.. autoclass:: mongoengine.BooleanField
|
|
||||||
.. autoclass:: mongoengine.FileField
|
|
||||||
.. autoclass:: mongoengine.BinaryField
|
|
||||||
.. autoclass:: mongoengine.GeoPointField
|
|
||||||
.. autoclass:: mongoengine.SequenceField
|
.. autoclass:: mongoengine.SequenceField
|
||||||
|
.. autoclass:: mongoengine.SortedListField
|
||||||
|
.. autoclass:: mongoengine.StringField
|
||||||
|
.. autoclass:: mongoengine.URLField
|
||||||
|
.. autoclass:: mongoengine.UUIDField
|
||||||
|
@@ -2,11 +2,147 @@
|
|||||||
Changelog
|
Changelog
|
||||||
=========
|
=========
|
||||||
|
|
||||||
Changes in 0.6.X
|
Changes in 0.7.X
|
||||||
|
=================
|
||||||
|
- Added NotUniqueError for duplicate keys (MongoEngine/mongoengine#62)
|
||||||
|
- Added custom collection / sequence naming for SequenceFields (MongoEngine/mongoengine#92)
|
||||||
|
- Fixed UnboundLocalError in composite index with pk field (MongoEngine/mongoengine#88)
|
||||||
|
- Updated ReferenceField's to optionally store ObjectId strings
|
||||||
|
this will become the default in 0.8 (MongoEngine/mongoengine#89)
|
||||||
|
- Added FutureWarning - save will default to `cascade=False` in 0.8
|
||||||
|
- Added example of indexing embedded document fields (MongoEngine/mongoengine#75)
|
||||||
|
- Fixed ImageField resizing when forcing size (MongoEngine/mongoengine#80)
|
||||||
|
- Add flexibility for fields handling bad data (MongoEngine/mongoengine#78)
|
||||||
|
- Embedded Documents no longer handle meta definitions
|
||||||
|
- Use weakref proxies in base lists / dicts (MongoEngine/mongoengine#74)
|
||||||
|
- Improved queryset filtering (hmarr/mongoengine#554)
|
||||||
|
- Fixed Dynamic Documents and Embedded Documents (hmarr/mongoengine#561)
|
||||||
|
- Fixed abstract classes and shard keys (MongoEngine/mongoengine#64)
|
||||||
|
- Fixed Python 2.5 support
|
||||||
|
- Added Python 3 support (thanks to Laine Heron)
|
||||||
|
|
||||||
|
Changes in 0.6.20
|
||||||
|
=================
|
||||||
|
- Added support for distinct and db_alias (MongoEngine/mongoengine#59)
|
||||||
|
- Improved support for chained querysets when constraining the same fields (hmarr/mongoengine#554)
|
||||||
|
- Fixed BinaryField lookup re (MongoEngine/mongoengine#48)
|
||||||
|
|
||||||
|
Changes in 0.6.19
|
||||||
|
=================
|
||||||
|
|
||||||
|
- Added Binary support to UUID (MongoEngine/mongoengine#47)
|
||||||
|
- Fixed MapField lookup for fields without declared lookups (MongoEngine/mongoengine#46)
|
||||||
|
- Fixed BinaryField python value issue (MongoEngine/mongoengine#48)
|
||||||
|
- Fixed SequenceField non numeric value lookup (MongoEngine/mongoengine#41)
|
||||||
|
- Fixed queryset manager issue (MongoEngine/mongoengine#52)
|
||||||
|
- Fixed FileField comparision (hmarr/mongoengine#547)
|
||||||
|
|
||||||
|
Changes in 0.6.18
|
||||||
|
=================
|
||||||
|
- Fixed recursion loading bug in _get_changed_fields
|
||||||
|
|
||||||
|
Changes in 0.6.17
|
||||||
|
=================
|
||||||
|
- Fixed issue with custom queryset manager expecting explict variable names
|
||||||
|
|
||||||
|
Changes in 0.6.16
|
||||||
|
=================
|
||||||
|
- Fixed issue where db_alias wasn't inherited
|
||||||
|
|
||||||
|
Changes in 0.6.15
|
||||||
|
=================
|
||||||
|
- Updated validation error messages
|
||||||
|
- Added support for null / zero / false values in item_frequencies
|
||||||
|
- Fixed cascade save edge case
|
||||||
|
- Fixed geo index creation through reference fields
|
||||||
|
- Added support for args / kwargs when using @queryset_manager
|
||||||
|
- Deref list custom id fix
|
||||||
|
|
||||||
|
Changes in 0.6.14
|
||||||
|
=================
|
||||||
|
- Fixed error dict with nested validation
|
||||||
|
- Fixed Int/Float fields and not equals None
|
||||||
|
- Exclude tests from installation
|
||||||
|
- Allow tuples for index meta
|
||||||
|
- Fixed use of str in instance checks
|
||||||
|
- Fixed unicode support in transform update
|
||||||
|
- Added support for add_to_set and each
|
||||||
|
|
||||||
|
Changes in 0.6.13
|
||||||
|
=================
|
||||||
|
- Fixed EmbeddedDocument db_field validation issue
|
||||||
|
- Fixed StringField unicode issue
|
||||||
|
- Fixes __repr__ modifying the cursor
|
||||||
|
|
||||||
|
Changes in 0.6.12
|
||||||
|
=================
|
||||||
|
- Fixes scalar lookups for primary_key
|
||||||
|
- Fixes error with _delta handling DBRefs
|
||||||
|
|
||||||
|
Changes in 0.6.11
|
||||||
|
==================
|
||||||
|
- Fixed inconsistency handling None values field attrs
|
||||||
|
- Fixed map_field embedded db_field issue
|
||||||
|
- Fixed .save() _delta issue with DbRefs
|
||||||
|
- Fixed Django TestCase
|
||||||
|
- Added cmp to Embedded Document
|
||||||
|
- Added PULL reverse_delete_rule
|
||||||
|
- Fixed CASCADE delete bug
|
||||||
|
- Fixed db_field data load error
|
||||||
|
- Fixed recursive save with FileField
|
||||||
|
|
||||||
|
Changes in 0.6.10
|
||||||
|
=================
|
||||||
|
- Fixed basedict / baselist to return super(..)
|
||||||
|
- Promoted BaseDynamicField to DynamicField
|
||||||
|
|
||||||
|
Changes in 0.6.9
|
||||||
|
================
|
||||||
|
- Fixed sparse indexes on inherited docs
|
||||||
|
- Removed FileField auto deletion, needs more work maybe 0.7
|
||||||
|
|
||||||
|
Changes in 0.6.8
|
||||||
|
================
|
||||||
|
- Fixed FileField losing reference when no default set
|
||||||
|
- Removed possible race condition from FileField (grid_file)
|
||||||
|
- Added assignment to save, can now do: `b = MyDoc(**kwargs).save()`
|
||||||
|
- Added support for pull operations on nested EmbeddedDocuments
|
||||||
|
- Added support for choices with GenericReferenceFields
|
||||||
|
- Added support for choices with GenericEmbeddedDocumentFields
|
||||||
|
- Fixed Django 1.4 sessions first save data loss
|
||||||
|
- FileField now automatically delete files on .delete()
|
||||||
|
- Fix for GenericReference to_mongo method
|
||||||
|
- Fixed connection regression
|
||||||
|
- Updated Django User document, now allows inheritance
|
||||||
|
|
||||||
|
Changes in 0.6.7
|
||||||
|
================
|
||||||
|
- Fixed indexing on '_id' or 'pk' or 'id'
|
||||||
|
- Invalid data from the DB now raises a InvalidDocumentError
|
||||||
|
- Cleaned up the Validation Error - docs and code
|
||||||
|
- Added meta `auto_create_index` so you can disable index creation
|
||||||
|
- Added write concern options to inserts
|
||||||
|
- Fixed typo in meta for index options
|
||||||
|
- Bug fix Read preference now passed correctly
|
||||||
|
- Added support for File like objects for GridFS
|
||||||
|
- Fix for #473 - Dereferencing abstracts
|
||||||
|
|
||||||
|
Changes in 0.6.6
|
||||||
|
================
|
||||||
|
- Django 1.4 fixed (finally)
|
||||||
|
- Added tests for Django
|
||||||
|
|
||||||
|
Changes in 0.6.5
|
||||||
|
================
|
||||||
|
- More Django updates
|
||||||
|
|
||||||
|
Changes in 0.6.4
|
||||||
================
|
================
|
||||||
|
|
||||||
- updated replicasetconnection - pop port if exists
|
- Refactored connection / fixed replicasetconnection
|
||||||
- bug fix for unknown connection alias error message
|
- Bug fix for unknown connection alias error message
|
||||||
|
- Sessions support Django 1.3 and Django 1.4
|
||||||
|
- Minor fix for ReferenceField
|
||||||
|
|
||||||
Changes in 0.6.3
|
Changes in 0.6.3
|
||||||
================
|
================
|
||||||
|
@@ -62,28 +62,31 @@ not provided. Default values may optionally be a callable, which will be called
|
|||||||
to retrieve the value (such as in the above example). The field types available
|
to retrieve the value (such as in the above example). The field types available
|
||||||
are as follows:
|
are as follows:
|
||||||
|
|
||||||
* :class:`~mongoengine.StringField`
|
* :class:`~mongoengine.BinaryField`
|
||||||
* :class:`~mongoengine.URLField`
|
* :class:`~mongoengine.BooleanField`
|
||||||
* :class:`~mongoengine.EmailField`
|
|
||||||
* :class:`~mongoengine.IntField`
|
|
||||||
* :class:`~mongoengine.FloatField`
|
|
||||||
* :class:`~mongoengine.DecimalField`
|
|
||||||
* :class:`~mongoengine.DateTimeField`
|
|
||||||
* :class:`~mongoengine.ComplexDateTimeField`
|
* :class:`~mongoengine.ComplexDateTimeField`
|
||||||
* :class:`~mongoengine.ListField`
|
* :class:`~mongoengine.DateTimeField`
|
||||||
* :class:`~mongoengine.SortedListField`
|
* :class:`~mongoengine.DecimalField`
|
||||||
* :class:`~mongoengine.DictField`
|
* :class:`~mongoengine.DictField`
|
||||||
|
* :class:`~mongoengine.DynamicField`
|
||||||
|
* :class:`~mongoengine.EmailField`
|
||||||
|
* :class:`~mongoengine.EmbeddedDocumentField`
|
||||||
|
* :class:`~mongoengine.FileField`
|
||||||
|
* :class:`~mongoengine.FloatField`
|
||||||
|
* :class:`~mongoengine.GenericEmbeddedDocumentField`
|
||||||
|
* :class:`~mongoengine.GenericReferenceField`
|
||||||
|
* :class:`~mongoengine.GeoPointField`
|
||||||
|
* :class:`~mongoengine.ImageField`
|
||||||
|
* :class:`~mongoengine.IntField`
|
||||||
|
* :class:`~mongoengine.ListField`
|
||||||
* :class:`~mongoengine.MapField`
|
* :class:`~mongoengine.MapField`
|
||||||
* :class:`~mongoengine.ObjectIdField`
|
* :class:`~mongoengine.ObjectIdField`
|
||||||
* :class:`~mongoengine.ReferenceField`
|
* :class:`~mongoengine.ReferenceField`
|
||||||
* :class:`~mongoengine.GenericReferenceField`
|
|
||||||
* :class:`~mongoengine.EmbeddedDocumentField`
|
|
||||||
* :class:`~mongoengine.GenericEmbeddedDocumentField`
|
|
||||||
* :class:`~mongoengine.BooleanField`
|
|
||||||
* :class:`~mongoengine.FileField`
|
|
||||||
* :class:`~mongoengine.BinaryField`
|
|
||||||
* :class:`~mongoengine.GeoPointField`
|
|
||||||
* :class:`~mongoengine.SequenceField`
|
* :class:`~mongoengine.SequenceField`
|
||||||
|
* :class:`~mongoengine.SortedListField`
|
||||||
|
* :class:`~mongoengine.StringField`
|
||||||
|
* :class:`~mongoengine.URLField`
|
||||||
|
* :class:`~mongoengine.UUIDField`
|
||||||
|
|
||||||
Field arguments
|
Field arguments
|
||||||
---------------
|
---------------
|
||||||
@@ -98,7 +101,7 @@ arguments can be set on all fields:
|
|||||||
|
|
||||||
:attr:`required` (Default: False)
|
:attr:`required` (Default: False)
|
||||||
If set to True and the field is not set on the document instance, a
|
If set to True and the field is not set on the document instance, a
|
||||||
:class:`~mongoengine.base.ValidationError` will be raised when the document is
|
:class:`~mongoengine.ValidationError` will be raised when the document is
|
||||||
validated.
|
validated.
|
||||||
|
|
||||||
:attr:`default` (Default: None)
|
:attr:`default` (Default: None)
|
||||||
@@ -256,6 +259,35 @@ as the constructor's argument::
|
|||||||
content = StringField()
|
content = StringField()
|
||||||
|
|
||||||
|
|
||||||
|
.. _one-to-many-with-listfields:
|
||||||
|
|
||||||
|
One to Many with ListFields
|
||||||
|
'''''''''''''''''''''''''''
|
||||||
|
|
||||||
|
If you are implementing a one to many relationship via a list of references,
|
||||||
|
then the references are stored as DBRefs and to query you need to pass an
|
||||||
|
instance of the object to the query::
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Page(Document):
|
||||||
|
content = StringField()
|
||||||
|
authors = ListField(ReferenceField(User))
|
||||||
|
|
||||||
|
bob = User(name="Bob Jones").save()
|
||||||
|
john = User(name="John Smith").save()
|
||||||
|
|
||||||
|
Page(content="Test Page", authors=[bob, john]).save()
|
||||||
|
Page(content="Another Page", authors=[john]).save()
|
||||||
|
|
||||||
|
# Find all pages Bob authored
|
||||||
|
Page.objects(authors__in=[bob])
|
||||||
|
|
||||||
|
# Find all pages that both Bob and John have authored
|
||||||
|
Page.objects(authors__all=[bob, john])
|
||||||
|
|
||||||
|
|
||||||
Dealing with deletion of referred documents
|
Dealing with deletion of referred documents
|
||||||
'''''''''''''''''''''''''''''''''''''''''''
|
'''''''''''''''''''''''''''''''''''''''''''
|
||||||
By default, MongoDB doesn't check the integrity of your data, so deleting
|
By default, MongoDB doesn't check the integrity of your data, so deleting
|
||||||
@@ -289,6 +321,10 @@ Its value can take any of the following constants:
|
|||||||
:const:`mongoengine.CASCADE`
|
:const:`mongoengine.CASCADE`
|
||||||
Any object containing fields that are refererring to the object being deleted
|
Any object containing fields that are refererring to the object being deleted
|
||||||
are deleted first.
|
are deleted first.
|
||||||
|
:const:`mongoengine.PULL`
|
||||||
|
Removes the reference to the object (using MongoDB's "pull" operation)
|
||||||
|
from any object's fields of
|
||||||
|
:class:`~mongoengine.ListField` (:class:`~mongoengine.ReferenceField`).
|
||||||
|
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
@@ -431,11 +467,16 @@ If a dictionary is passed then the following options are available:
|
|||||||
:attr:`unique` (Default: False)
|
:attr:`unique` (Default: False)
|
||||||
Whether the index should be sparse.
|
Whether the index should be sparse.
|
||||||
|
|
||||||
|
.. note ::
|
||||||
|
|
||||||
|
To index embedded files / dictionary fields use 'dot' notation eg:
|
||||||
|
`rank.title`
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
|
|
||||||
|
Inheritance adds extra indices.
|
||||||
Inheritance adds extra indices.
|
If don't need inheritance for a document turn inheritance off -
|
||||||
If don't need inheritance for a document turn inheritance off - see :ref:`document-inheritance`.
|
see :ref:`document-inheritance`.
|
||||||
|
|
||||||
|
|
||||||
Geospatial indexes
|
Geospatial indexes
|
||||||
|
@@ -91,5 +91,5 @@ is an alias to :attr:`id`::
|
|||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
If you define your own primary key field, the field implicitly becomes
|
If you define your own primary key field, the field implicitly becomes
|
||||||
required, so a :class:`ValidationError` will be thrown if you don't provide
|
required, so a :class:`~mongoengine.ValidationError` will be thrown if
|
||||||
it.
|
you don't provide it.
|
||||||
|
@@ -65,7 +65,7 @@ Deleting stored files is achieved with the :func:`delete` method::
|
|||||||
|
|
||||||
marmot.photo.delete()
|
marmot.photo.delete()
|
||||||
|
|
||||||
.. note::
|
.. warning::
|
||||||
|
|
||||||
The FileField in a Document actually only stores the ID of a file in a
|
The FileField in a Document actually only stores the ID of a file in a
|
||||||
separate GridFS collection. This means that deleting a document
|
separate GridFS collection. This means that deleting a document
|
||||||
|
@@ -232,7 +232,7 @@ custom manager methods as you like::
|
|||||||
BlogPost(title='test1', published=False).save()
|
BlogPost(title='test1', published=False).save()
|
||||||
BlogPost(title='test2', published=True).save()
|
BlogPost(title='test2', published=True).save()
|
||||||
assert len(BlogPost.objects) == 2
|
assert len(BlogPost.objects) == 2
|
||||||
assert len(BlogPost.live_posts) == 1
|
assert len(BlogPost.live_posts()) == 1
|
||||||
|
|
||||||
Custom QuerySets
|
Custom QuerySets
|
||||||
================
|
================
|
||||||
@@ -243,11 +243,16 @@ a document, set ``queryset_class`` to the custom class in a
|
|||||||
:class:`~mongoengine.Document`\ s ``meta`` dictionary::
|
:class:`~mongoengine.Document`\ s ``meta`` dictionary::
|
||||||
|
|
||||||
class AwesomerQuerySet(QuerySet):
|
class AwesomerQuerySet(QuerySet):
|
||||||
pass
|
|
||||||
|
def get_awesome(self):
|
||||||
|
return self.filter(awesome=True)
|
||||||
|
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
meta = {'queryset_class': AwesomerQuerySet}
|
meta = {'queryset_class': AwesomerQuerySet}
|
||||||
|
|
||||||
|
# To call:
|
||||||
|
Page.objects.get_awesome()
|
||||||
|
|
||||||
.. versionadded:: 0.4
|
.. versionadded:: 0.4
|
||||||
|
|
||||||
Aggregation
|
Aggregation
|
||||||
|
@@ -2,18 +2,79 @@
|
|||||||
Upgrading
|
Upgrading
|
||||||
=========
|
=========
|
||||||
|
|
||||||
|
0.6 to 0.7
|
||||||
|
==========
|
||||||
|
|
||||||
|
Cascade saves
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Saves will raise a `FutureWarning` if they cascade and cascade hasn't been set
|
||||||
|
to True. This is because in 0.8 it will default to False. If you require
|
||||||
|
cascading saves then either set it in the `meta` or pass
|
||||||
|
via `save` eg ::
|
||||||
|
|
||||||
|
# At the class level:
|
||||||
|
class Person(Document):
|
||||||
|
meta = {'cascade': True}
|
||||||
|
|
||||||
|
# Or in code:
|
||||||
|
my_document.save(cascade=True)
|
||||||
|
|
||||||
|
.. note ::
|
||||||
|
Remember: cascading saves **do not** cascade through lists.
|
||||||
|
|
||||||
|
ReferenceFields
|
||||||
|
---------------
|
||||||
|
|
||||||
|
ReferenceFields now can store references as ObjectId strings instead of DBRefs.
|
||||||
|
This will become the default in 0.8 and if `dbref` is not set a `FutureWarning`
|
||||||
|
will be raised.
|
||||||
|
|
||||||
|
|
||||||
|
To explicitly continue to use DBRefs change the `dbref` flag
|
||||||
|
to True ::
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
groups = ListField(ReferenceField(Group, dbref=True))
|
||||||
|
|
||||||
|
To migrate to using strings instead of DBRefs you will have to manually
|
||||||
|
migrate ::
|
||||||
|
|
||||||
|
# Step 1 - Migrate the model definition
|
||||||
|
class Group(Document):
|
||||||
|
author = ReferenceField(User, dbref=False)
|
||||||
|
members = ListField(ReferenceField(User, dbref=False))
|
||||||
|
|
||||||
|
# Step 2 - Migrate the data
|
||||||
|
for g in Group.objects():
|
||||||
|
g.author = g.author
|
||||||
|
g.members = g.members
|
||||||
|
g.save()
|
||||||
|
|
||||||
|
|
||||||
|
item_frequencies
|
||||||
|
----------------
|
||||||
|
|
||||||
|
In the 0.6 series we added support for null / zero / false values in
|
||||||
|
item_frequencies. A side effect was to return keys in the value they are
|
||||||
|
stored in rather than as string representations. Your code may need to be
|
||||||
|
updated to handle native types rather than strings keys for the results of
|
||||||
|
item frequency queries.
|
||||||
|
|
||||||
0.5 to 0.6
|
0.5 to 0.6
|
||||||
==========
|
==========
|
||||||
|
|
||||||
Embedded Documents - if you had a `pk` field you will have to rename it from `_id`
|
Embedded Documents - if you had a `pk` field you will have to rename it from
|
||||||
to `pk` as pk is no longer a property of Embedded Documents.
|
`_id` to `pk` as pk is no longer a property of Embedded Documents.
|
||||||
|
|
||||||
Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw
|
Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw
|
||||||
an InvalidDocument error as they aren't currently supported.
|
an InvalidDocument error as they aren't currently supported.
|
||||||
|
|
||||||
Document._get_subclasses - Is no longer used and the class method has been removed.
|
Document._get_subclasses - Is no longer used and the class method has been
|
||||||
|
removed.
|
||||||
|
|
||||||
Document.objects.with_id - now raises an InvalidQueryError if used with a filter.
|
Document.objects.with_id - now raises an InvalidQueryError if used with a
|
||||||
|
filter.
|
||||||
|
|
||||||
FutureWarning - A future warning has been added to all inherited classes that
|
FutureWarning - A future warning has been added to all inherited classes that
|
||||||
don't define `allow_inheritance` in their meta.
|
don't define `allow_inheritance` in their meta.
|
||||||
@@ -37,11 +98,11 @@ human-readable name for the option.
|
|||||||
PyMongo / MongoDB
|
PyMongo / MongoDB
|
||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
map reduce now requires pymongo 1.11+- The pymongo merge_output and reduce_output
|
map reduce now requires pymongo 1.11+- The pymongo `merge_output` and
|
||||||
parameters, have been depreciated.
|
`reduce_output` parameters, have been depreciated.
|
||||||
|
|
||||||
More methods now use map_reduce as db.eval is not supported for sharding as such
|
More methods now use map_reduce as db.eval is not supported for sharding as
|
||||||
the following have been changed:
|
such the following have been changed:
|
||||||
|
|
||||||
* :meth:`~mongoengine.queryset.QuerySet.sum`
|
* :meth:`~mongoengine.queryset.QuerySet.sum`
|
||||||
* :meth:`~mongoengine.queryset.QuerySet.average`
|
* :meth:`~mongoengine.queryset.QuerySet.average`
|
||||||
@@ -51,8 +112,8 @@ the following have been changed:
|
|||||||
Default collection naming
|
Default collection naming
|
||||||
-------------------------
|
-------------------------
|
||||||
|
|
||||||
Previously it was just lowercase, its now much more pythonic and readable as its
|
Previously it was just lowercase, its now much more pythonic and readable as
|
||||||
lowercase and underscores, previously ::
|
its lowercase and underscores, previously ::
|
||||||
|
|
||||||
class MyAceDocument(Document):
|
class MyAceDocument(Document):
|
||||||
pass
|
pass
|
||||||
@@ -88,7 +149,8 @@ Alternatively, you can rename your collections eg ::
|
|||||||
|
|
||||||
failure = False
|
failure = False
|
||||||
|
|
||||||
collection_names = [d._get_collection_name() for d in _document_registry.values()]
|
collection_names = [d._get_collection_name()
|
||||||
|
for d in _document_registry.values()]
|
||||||
|
|
||||||
for new_style_name in collection_names:
|
for new_style_name in collection_names:
|
||||||
if not new_style_name: # embedded documents don't have collections
|
if not new_style_name: # embedded documents don't have collections
|
||||||
@@ -106,7 +168,8 @@ Alternatively, you can rename your collections eg ::
|
|||||||
old_style_name, new_style_name)
|
old_style_name, new_style_name)
|
||||||
else:
|
else:
|
||||||
db[old_style_name].rename(new_style_name)
|
db[old_style_name].rename(new_style_name)
|
||||||
print "Renamed: %s to %s" % (old_style_name, new_style_name)
|
print "Renamed: %s to %s" % (old_style_name,
|
||||||
|
new_style_name)
|
||||||
|
|
||||||
if failure:
|
if failure:
|
||||||
print "Upgrading collection names failed"
|
print "Upgrading collection names failed"
|
||||||
|
@@ -12,13 +12,12 @@ from signals import *
|
|||||||
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
||||||
queryset.__all__ + signals.__all__)
|
queryset.__all__ + signals.__all__)
|
||||||
|
|
||||||
VERSION = (0, 6, 3)
|
VERSION = (0, '7rc1')
|
||||||
|
|
||||||
|
|
||||||
def get_version():
|
def get_version():
|
||||||
version = '%s.%s' % (VERSION[0], VERSION[1])
|
if isinstance(VERSION[-1], basestring):
|
||||||
if VERSION[2]:
|
return '.'.join(map(str, VERSION[:-1])) + VERSION[-1]
|
||||||
version = '%s.%s' % (version, VERSION[2])
|
return '.'.join(map(str, VERSION))
|
||||||
return version
|
|
||||||
|
|
||||||
__version__ = get_version()
|
__version__ = get_version()
|
||||||
|
File diff suppressed because it is too large
Load Diff
@@ -39,22 +39,7 @@ def register_connection(alias, name, host='localhost', port=27017,
|
|||||||
"""
|
"""
|
||||||
global _connection_settings
|
global _connection_settings
|
||||||
|
|
||||||
# Handle uri style connections
|
conn_settings = {
|
||||||
if "://" in host:
|
|
||||||
uri_dict = uri_parser.parse_uri(host)
|
|
||||||
if uri_dict.get('database') is None:
|
|
||||||
raise ConnectionError("If using URI style connection include "\
|
|
||||||
"database name in string")
|
|
||||||
_connection_settings[alias] = {
|
|
||||||
'host': host,
|
|
||||||
'name': uri_dict.get('database'),
|
|
||||||
'username': uri_dict.get('username'),
|
|
||||||
'password': uri_dict.get('password')
|
|
||||||
}
|
|
||||||
_connection_settings[alias].update(kwargs)
|
|
||||||
return
|
|
||||||
|
|
||||||
_connection_settings[alias] = {
|
|
||||||
'name': name,
|
'name': name,
|
||||||
'host': host,
|
'host': host,
|
||||||
'port': port,
|
'port': port,
|
||||||
@@ -64,7 +49,25 @@ def register_connection(alias, name, host='localhost', port=27017,
|
|||||||
'password': password,
|
'password': password,
|
||||||
'read_preference': read_preference
|
'read_preference': read_preference
|
||||||
}
|
}
|
||||||
_connection_settings[alias].update(kwargs)
|
|
||||||
|
# Handle uri style connections
|
||||||
|
if "://" in host:
|
||||||
|
uri_dict = uri_parser.parse_uri(host)
|
||||||
|
if uri_dict.get('database') is None:
|
||||||
|
raise ConnectionError("If using URI style connection include "\
|
||||||
|
"database name in string")
|
||||||
|
conn_settings.update({
|
||||||
|
'host': host,
|
||||||
|
'name': uri_dict.get('database'),
|
||||||
|
'username': uri_dict.get('username'),
|
||||||
|
'password': uri_dict.get('password'),
|
||||||
|
'read_preference': read_preference,
|
||||||
|
})
|
||||||
|
if "replicaSet" in host:
|
||||||
|
conn_settings['replicaSet'] = True
|
||||||
|
|
||||||
|
conn_settings.update(kwargs)
|
||||||
|
_connection_settings[alias] = conn_settings
|
||||||
|
|
||||||
|
|
||||||
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
||||||
@@ -112,7 +115,11 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
|||||||
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
|
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
|
||||||
# Discard port since it can't be used on ReplicaSetConnection
|
# Discard port since it can't be used on ReplicaSetConnection
|
||||||
conn_settings.pop('port', None)
|
conn_settings.pop('port', None)
|
||||||
|
# Discard replicaSet if not base string
|
||||||
|
if not isinstance(conn_settings['replicaSet'], basestring):
|
||||||
|
conn_settings.pop('replicaSet', None)
|
||||||
connection_class = ReplicaSetConnection
|
connection_class = ReplicaSetConnection
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_connections[alias] = connection_class(**conn_settings)
|
_connections[alias] = connection_class(**conn_settings)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
|
@@ -34,12 +34,31 @@ class DeReference(object):
|
|||||||
|
|
||||||
doc_type = None
|
doc_type = None
|
||||||
if instance and instance._fields:
|
if instance and instance._fields:
|
||||||
doc_type = instance._fields[name].field
|
doc_type = instance._fields[name]
|
||||||
|
if hasattr(doc_type, 'field'):
|
||||||
|
doc_type = doc_type.field
|
||||||
|
|
||||||
if isinstance(doc_type, ReferenceField):
|
if isinstance(doc_type, ReferenceField):
|
||||||
|
field = doc_type
|
||||||
doc_type = doc_type.document_type
|
doc_type = doc_type.document_type
|
||||||
if all([i.__class__ == doc_type for i in items]):
|
is_list = not hasattr(items, 'items')
|
||||||
|
|
||||||
|
if is_list and all([i.__class__ == doc_type for i in items]):
|
||||||
return items
|
return items
|
||||||
|
elif not is_list and all([i.__class__ == doc_type
|
||||||
|
for i in items.values()]):
|
||||||
|
return items
|
||||||
|
elif not field.dbref:
|
||||||
|
if not hasattr(items, 'items'):
|
||||||
|
items = [field.to_python(v)
|
||||||
|
if not isinstance(v, (DBRef, Document)) else v
|
||||||
|
for v in items]
|
||||||
|
else:
|
||||||
|
items = dict([
|
||||||
|
(k, field.to_python(v))
|
||||||
|
if not isinstance(v, (DBRef, Document)) else (k, v)
|
||||||
|
for k, v in items.iteritems()]
|
||||||
|
)
|
||||||
|
|
||||||
self.reference_map = self._find_references(items)
|
self.reference_map = self._find_references(items)
|
||||||
self.object_map = self._fetch_objects(doc_type=doc_type)
|
self.object_map = self._fetch_objects(doc_type=doc_type)
|
||||||
@@ -112,6 +131,10 @@ class DeReference(object):
|
|||||||
for ref in references:
|
for ref in references:
|
||||||
if '_cls' in ref:
|
if '_cls' in ref:
|
||||||
doc = get_document(ref["_cls"])._from_son(ref)
|
doc = get_document(ref["_cls"])._from_son(ref)
|
||||||
|
elif doc_type is None:
|
||||||
|
doc = get_document(
|
||||||
|
''.join(x.capitalize()
|
||||||
|
for x in col.split('_')))._from_son(ref)
|
||||||
else:
|
else:
|
||||||
doc = doc_type._from_son(ref)
|
doc = doc_type._from_son(ref)
|
||||||
object_map[doc.id] = doc
|
object_map[doc.id] = doc
|
||||||
@@ -162,7 +185,7 @@ class DeReference(object):
|
|||||||
else:
|
else:
|
||||||
data[k] = v
|
data[k] = v
|
||||||
|
|
||||||
if k in self.object_map:
|
if k in self.object_map and not is_list:
|
||||||
data[k] = self.object_map[k]
|
data[k] = self.object_map[k]
|
||||||
elif hasattr(v, '_fields'):
|
elif hasattr(v, '_fields'):
|
||||||
for field_name, field in v._fields.iteritems():
|
for field_name, field in v._fields.iteritems():
|
||||||
|
@@ -1,11 +1,36 @@
|
|||||||
|
import datetime
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
|
|
||||||
from django.utils.encoding import smart_str
|
from django.utils.encoding import smart_str
|
||||||
from django.contrib.auth.models import AnonymousUser
|
from django.contrib.auth.models import AnonymousUser
|
||||||
from django.contrib.auth.hashers import check_password, make_password
|
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
import datetime
|
try:
|
||||||
|
from django.contrib.auth.hashers import check_password, make_password
|
||||||
|
except ImportError:
|
||||||
|
"""Handle older versions of Django"""
|
||||||
|
from django.utils.hashcompat import md5_constructor, sha_constructor
|
||||||
|
|
||||||
|
def get_hexdigest(algorithm, salt, raw_password):
|
||||||
|
raw_password, salt = smart_str(raw_password), smart_str(salt)
|
||||||
|
if algorithm == 'md5':
|
||||||
|
return md5_constructor(salt + raw_password).hexdigest()
|
||||||
|
elif algorithm == 'sha1':
|
||||||
|
return sha_constructor(salt + raw_password).hexdigest()
|
||||||
|
raise ValueError('Got unknown password algorithm type in password')
|
||||||
|
|
||||||
|
def check_password(raw_password, password):
|
||||||
|
algo, salt, hash = password.split('$')
|
||||||
|
return hash == get_hexdigest(algo, salt, raw_password)
|
||||||
|
|
||||||
|
def make_password(raw_password):
|
||||||
|
from random import random
|
||||||
|
algo = 'sha1'
|
||||||
|
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
|
||||||
|
hash = get_hexdigest(algo, salt, raw_password)
|
||||||
|
return '%s$%s$%s' % (algo, salt, hash)
|
||||||
|
|
||||||
|
|
||||||
REDIRECT_FIELD_NAME = 'next'
|
REDIRECT_FIELD_NAME = 'next'
|
||||||
|
|
||||||
@@ -41,6 +66,7 @@ class User(Document):
|
|||||||
verbose_name=_('date joined'))
|
verbose_name=_('date joined'))
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
|
'allow_inheritance': True,
|
||||||
'indexes': [
|
'indexes': [
|
||||||
{'fields': ['username'], 'unique': True}
|
{'fields': ['username'], 'unique': True}
|
||||||
]
|
]
|
||||||
|
@@ -1,3 +1,6 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
from django.contrib.sessions.backends.base import SessionBase, CreateError
|
from django.contrib.sessions.backends.base import SessionBase, CreateError
|
||||||
from django.core.exceptions import SuspiciousOperation
|
from django.core.exceptions import SuspiciousOperation
|
||||||
from django.utils.encoding import force_unicode
|
from django.utils.encoding import force_unicode
|
||||||
@@ -6,13 +9,13 @@ from mongoengine.document import Document
|
|||||||
from mongoengine import fields
|
from mongoengine import fields
|
||||||
from mongoengine.queryset import OperationError
|
from mongoengine.queryset import OperationError
|
||||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME
|
from mongoengine.connection import DEFAULT_CONNECTION_NAME
|
||||||
from django.conf import settings
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
MONGOENGINE_SESSION_DB_ALIAS = getattr(
|
MONGOENGINE_SESSION_DB_ALIAS = getattr(
|
||||||
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
|
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
|
||||||
DEFAULT_CONNECTION_NAME)
|
DEFAULT_CONNECTION_NAME)
|
||||||
|
|
||||||
|
|
||||||
class MongoSession(Document):
|
class MongoSession(Document):
|
||||||
session_key = fields.StringField(primary_key=True, max_length=40)
|
session_key = fields.StringField(primary_key=True, max_length=40)
|
||||||
session_data = fields.StringField()
|
session_data = fields.StringField()
|
||||||
@@ -51,9 +54,9 @@ class SessionStore(SessionBase):
|
|||||||
return
|
return
|
||||||
|
|
||||||
def save(self, must_create=False):
|
def save(self, must_create=False):
|
||||||
if self._session_key is None:
|
if self.session_key is None:
|
||||||
self.create()
|
self._session_key = self._get_new_session_key()
|
||||||
s = MongoSession(session_key=self._session_key)
|
s = MongoSession(session_key=self.session_key)
|
||||||
s.session_data = self.encode(self._get_session(no_load=must_create))
|
s.session_data = self.encode(self._get_session(no_load=must_create))
|
||||||
s.expire_date = self.get_expiry_date()
|
s.expire_date = self.get_expiry_date()
|
||||||
try:
|
try:
|
||||||
|
@@ -1,4 +1,3 @@
|
|||||||
from django.http import Http404
|
|
||||||
from mongoengine.queryset import QuerySet
|
from mongoengine.queryset import QuerySet
|
||||||
from mongoengine.base import BaseDocument
|
from mongoengine.base import BaseDocument
|
||||||
from mongoengine.base import ValidationError
|
from mongoengine.base import ValidationError
|
||||||
@@ -27,6 +26,7 @@ def get_document_or_404(cls, *args, **kwargs):
|
|||||||
try:
|
try:
|
||||||
return queryset.get(*args, **kwargs)
|
return queryset.get(*args, **kwargs)
|
||||||
except (queryset._document.DoesNotExist, ValidationError):
|
except (queryset._document.DoesNotExist, ValidationError):
|
||||||
|
from django.http import Http404
|
||||||
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
||||||
|
|
||||||
def get_list_or_404(cls, *args, **kwargs):
|
def get_list_or_404(cls, *args, **kwargs):
|
||||||
@@ -42,5 +42,6 @@ def get_list_or_404(cls, *args, **kwargs):
|
|||||||
queryset = _get_queryset(cls)
|
queryset = _get_queryset(cls)
|
||||||
obj_list = list(queryset.filter(*args, **kwargs))
|
obj_list = list(queryset.filter(*args, **kwargs))
|
||||||
if not obj_list:
|
if not obj_list:
|
||||||
|
from django.http import Http404
|
||||||
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
||||||
return obj_list
|
return obj_list
|
||||||
|
@@ -1,16 +1,34 @@
|
|||||||
#coding: utf-8
|
#coding: utf-8
|
||||||
from django.test import TestCase
|
from nose.plugins.skip import SkipTest
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
|
from mongoengine.python_support import PY3
|
||||||
from mongoengine import connect
|
from mongoengine import connect
|
||||||
|
|
||||||
|
try:
|
||||||
|
from django.test import TestCase
|
||||||
|
from django.conf import settings
|
||||||
|
except Exception as err:
|
||||||
|
if PY3:
|
||||||
|
from unittest import TestCase
|
||||||
|
# Dummy value so no error
|
||||||
|
class settings:
|
||||||
|
MONGO_DATABASE_NAME = 'dummy'
|
||||||
|
else:
|
||||||
|
raise err
|
||||||
|
|
||||||
|
|
||||||
class MongoTestCase(TestCase):
|
class MongoTestCase(TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('django does not have Python 3 support')
|
||||||
|
|
||||||
"""
|
"""
|
||||||
TestCase class that clear the collection between the tests
|
TestCase class that clear the collection between the tests
|
||||||
"""
|
"""
|
||||||
db_name = 'test_%s' % settings.MONGO_DATABASE_NAME
|
db_name = 'test_%s' % settings.MONGO_DATABASE_NAME
|
||||||
def __init__(self, methodName='runtest'):
|
def __init__(self, methodName='runtest'):
|
||||||
self.db = connect(self.db_name)
|
self.db = connect(self.db_name).get_db()
|
||||||
super(MongoTestCase, self).__init__(methodName)
|
super(MongoTestCase, self).__init__(methodName)
|
||||||
|
|
||||||
def _post_teardown(self):
|
def _post_teardown(self):
|
||||||
|
@@ -1,14 +1,19 @@
|
|||||||
import pymongo
|
import warnings
|
||||||
from bson.dbref import DBRef
|
|
||||||
|
import pymongo
|
||||||
|
import re
|
||||||
|
|
||||||
|
from bson.dbref import DBRef
|
||||||
|
from mongoengine import signals, queryset
|
||||||
|
|
||||||
from mongoengine import signals
|
|
||||||
from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument,
|
from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument,
|
||||||
BaseDict, BaseList)
|
BaseDict, BaseList)
|
||||||
from queryset import OperationError
|
from queryset import OperationError, NotUniqueError
|
||||||
from connection import get_db, DEFAULT_CONNECTION_NAME
|
from connection import get_db, DEFAULT_CONNECTION_NAME
|
||||||
|
|
||||||
__all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument',
|
__all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument',
|
||||||
'DynamicEmbeddedDocument', 'OperationError', 'InvalidCollectionError']
|
'DynamicEmbeddedDocument', 'OperationError',
|
||||||
|
'InvalidCollectionError', 'NotUniqueError']
|
||||||
|
|
||||||
|
|
||||||
class InvalidCollectionError(Exception):
|
class InvalidCollectionError(Exception):
|
||||||
@@ -22,6 +27,9 @@ class EmbeddedDocument(BaseDocument):
|
|||||||
:class:`~mongoengine.EmbeddedDocumentField` field type.
|
:class:`~mongoengine.EmbeddedDocumentField` field type.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||||
|
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||||
|
my_metaclass = DocumentMetaclass
|
||||||
__metaclass__ = DocumentMetaclass
|
__metaclass__ = DocumentMetaclass
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
@@ -39,6 +47,11 @@ class EmbeddedDocument(BaseDocument):
|
|||||||
else:
|
else:
|
||||||
super(EmbeddedDocument, self).__delattr__(*args, **kwargs)
|
super(EmbeddedDocument, self).__delattr__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, self.__class__):
|
||||||
|
return self._data == other._data
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class Document(BaseDocument):
|
class Document(BaseDocument):
|
||||||
"""The base class used for defining the structure and properties of
|
"""The base class used for defining the structure and properties of
|
||||||
@@ -74,14 +87,23 @@ class Document(BaseDocument):
|
|||||||
names. Index direction may be specified by prefixing the field names with
|
names. Index direction may be specified by prefixing the field names with
|
||||||
a **+** or **-** sign.
|
a **+** or **-** sign.
|
||||||
|
|
||||||
|
Automatic index creation can be disabled by specifying
|
||||||
|
attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to
|
||||||
|
False then indexes will not be created by MongoEngine. This is useful in
|
||||||
|
production systems where index creation is performed as part of a deployment
|
||||||
|
system.
|
||||||
|
|
||||||
By default, _types will be added to the start of every index (that
|
By default, _types will be added to the start of every index (that
|
||||||
doesn't contain a list) if allow_inheritence is True. This can be
|
doesn't contain a list) if allow_inheritance is True. This can be
|
||||||
disabled by either setting types to False on the specific index or
|
disabled by either setting types to False on the specific index or
|
||||||
by setting index_types to False on the meta dictionary for the document.
|
by setting index_types to False on the meta dictionary for the document.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||||
|
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||||
|
my_metaclass = TopLevelDocumentMetaclass
|
||||||
__metaclass__ = TopLevelDocumentMetaclass
|
__metaclass__ = TopLevelDocumentMetaclass
|
||||||
|
|
||||||
@apply
|
|
||||||
def pk():
|
def pk():
|
||||||
"""Primary key alias
|
"""Primary key alias
|
||||||
"""
|
"""
|
||||||
@@ -90,6 +112,7 @@ class Document(BaseDocument):
|
|||||||
def fset(self, value):
|
def fset(self, value):
|
||||||
return setattr(self, self._meta['id_field'], value)
|
return setattr(self, self._meta['id_field'], value)
|
||||||
return property(fget, fset)
|
return property(fget, fset)
|
||||||
|
pk = pk()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_db(cls):
|
def _get_db(cls):
|
||||||
@@ -115,8 +138,9 @@ class Document(BaseDocument):
|
|||||||
options = cls._collection.options()
|
options = cls._collection.options()
|
||||||
if options.get('max') != max_documents or \
|
if options.get('max') != max_documents or \
|
||||||
options.get('size') != max_size:
|
options.get('size') != max_size:
|
||||||
msg = ('Cannot create collection "%s" as a capped '
|
msg = (('Cannot create collection "%s" as a capped '
|
||||||
'collection as it already exists') % cls._collection
|
'collection as it already exists')
|
||||||
|
% cls._collection)
|
||||||
raise InvalidCollectionError(msg)
|
raise InvalidCollectionError(msg)
|
||||||
else:
|
else:
|
||||||
# Create the collection as a capped collection
|
# Create the collection as a capped collection
|
||||||
@@ -130,8 +154,9 @@ class Document(BaseDocument):
|
|||||||
cls._collection = db[collection_name]
|
cls._collection = db[collection_name]
|
||||||
return cls._collection
|
return cls._collection
|
||||||
|
|
||||||
def save(self, safe=True, force_insert=False, validate=True, write_options=None,
|
def save(self, safe=True, force_insert=False, validate=True,
|
||||||
cascade=None, cascade_kwargs=None, _refs=None):
|
write_options=None, cascade=None, cascade_kwargs=None,
|
||||||
|
_refs=None):
|
||||||
"""Save the :class:`~mongoengine.Document` to the database. If the
|
"""Save the :class:`~mongoengine.Document` to the database. If the
|
||||||
document already exists, it will be updated, otherwise it will be
|
document already exists, it will be updated, otherwise it will be
|
||||||
created.
|
created.
|
||||||
@@ -144,26 +169,30 @@ class Document(BaseDocument):
|
|||||||
updates of existing documents
|
updates of existing documents
|
||||||
:param validate: validates the document; set to ``False`` to skip.
|
:param validate: validates the document; set to ``False`` to skip.
|
||||||
:param write_options: Extra keyword arguments are passed down to
|
:param write_options: Extra keyword arguments are passed down to
|
||||||
:meth:`~pymongo.collection.Collection.save` OR
|
:meth:`~pymongo.collection.Collection.save` OR
|
||||||
:meth:`~pymongo.collection.Collection.insert`
|
:meth:`~pymongo.collection.Collection.insert`
|
||||||
which will be used as options for the resultant ``getLastError`` command.
|
which will be used as options for the resultant
|
||||||
For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers
|
``getLastError`` command. For example,
|
||||||
have recorded the write and will force an fsync on each server being written to.
|
``save(..., write_options={w: 2, fsync: True}, ...)`` will
|
||||||
:param cascade: Sets the flag for cascading saves. You can set a default by setting
|
wait until at least two servers have recorded the write and
|
||||||
"cascade" in the document __meta__
|
will force an fsync on the primary server.
|
||||||
:param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves
|
:param cascade: Sets the flag for cascading saves. You can set a
|
||||||
|
default by setting "cascade" in the document __meta__
|
||||||
|
:param cascade_kwargs: optional kwargs dictionary to be passed throw
|
||||||
|
to cascading saves
|
||||||
:param _refs: A list of processed references used in cascading saves
|
:param _refs: A list of processed references used in cascading saves
|
||||||
|
|
||||||
.. versionchanged:: 0.5
|
.. versionchanged:: 0.5
|
||||||
In existing documents it only saves changed fields using set / unset
|
In existing documents it only saves changed fields using
|
||||||
Saves are cascaded and any :class:`~bson.dbref.DBRef` objects
|
set / unset. Saves are cascaded and any
|
||||||
that have changes are saved as well.
|
:class:`~bson.dbref.DBRef` objects that have changes are
|
||||||
|
saved as well.
|
||||||
.. versionchanged:: 0.6
|
.. versionchanged:: 0.6
|
||||||
Cascade saves are optional = defaults to True, if you want fine grain
|
Cascade saves are optional = defaults to True, if you want
|
||||||
control then you can turn off using document meta['cascade'] = False
|
fine grain control then you can turn off using document
|
||||||
Also you can pass different kwargs to the cascade save using cascade_kwargs
|
meta['cascade'] = False Also you can pass different kwargs to
|
||||||
which overwrites the existing kwargs with custom values
|
the cascade save using cascade_kwargs which overwrites the
|
||||||
|
existing kwargs with custom values
|
||||||
"""
|
"""
|
||||||
signals.pre_save.send(self.__class__, document=self)
|
signals.pre_save.send(self.__class__, document=self)
|
||||||
|
|
||||||
@@ -181,13 +210,14 @@ class Document(BaseDocument):
|
|||||||
collection = self.__class__.objects._collection
|
collection = self.__class__.objects._collection
|
||||||
if created:
|
if created:
|
||||||
if force_insert:
|
if force_insert:
|
||||||
object_id = collection.insert(doc, safe=safe, **write_options)
|
object_id = collection.insert(doc, safe=safe,
|
||||||
|
**write_options)
|
||||||
else:
|
else:
|
||||||
object_id = collection.save(doc, safe=safe, **write_options)
|
object_id = collection.save(doc, safe=safe,
|
||||||
|
**write_options)
|
||||||
else:
|
else:
|
||||||
object_id = doc['_id']
|
object_id = doc['_id']
|
||||||
updates, removals = self._delta()
|
updates, removals = self._delta()
|
||||||
|
|
||||||
# Need to add shard key to query, or you get an error
|
# Need to add shard key to query, or you get an error
|
||||||
select_dict = {'_id': object_id}
|
select_dict = {'_id': object_id}
|
||||||
shard_key = self.__class__._meta.get('shard_key', tuple())
|
shard_key = self.__class__._meta.get('shard_key', tuple())
|
||||||
@@ -197,11 +227,15 @@ class Document(BaseDocument):
|
|||||||
|
|
||||||
upsert = self._created
|
upsert = self._created
|
||||||
if updates:
|
if updates:
|
||||||
collection.update(select_dict, {"$set": updates}, upsert=upsert, safe=safe, **write_options)
|
collection.update(select_dict, {"$set": updates},
|
||||||
|
upsert=upsert, safe=safe, **write_options)
|
||||||
if removals:
|
if removals:
|
||||||
collection.update(select_dict, {"$unset": removals}, upsert=upsert, safe=safe, **write_options)
|
collection.update(select_dict, {"$unset": removals},
|
||||||
|
upsert=upsert, safe=safe, **write_options)
|
||||||
|
|
||||||
cascade = self._meta.get('cascade', True) if cascade is None else cascade
|
warn_cascade = not cascade and 'cascade' not in self._meta
|
||||||
|
cascade = (self._meta.get('cascade', True)
|
||||||
|
if cascade is None else cascade)
|
||||||
if cascade:
|
if cascade:
|
||||||
kwargs = {
|
kwargs = {
|
||||||
"safe": safe,
|
"safe": safe,
|
||||||
@@ -213,32 +247,49 @@ class Document(BaseDocument):
|
|||||||
if cascade_kwargs: # Allow granular control over cascades
|
if cascade_kwargs: # Allow granular control over cascades
|
||||||
kwargs.update(cascade_kwargs)
|
kwargs.update(cascade_kwargs)
|
||||||
kwargs['_refs'] = _refs
|
kwargs['_refs'] = _refs
|
||||||
self.cascade_save(**kwargs)
|
self.cascade_save(warn_cascade=warn_cascade, **kwargs)
|
||||||
|
|
||||||
except pymongo.errors.OperationFailure, err:
|
except pymongo.errors.OperationFailure, err:
|
||||||
message = 'Could not save document (%s)'
|
message = 'Could not save document (%s)'
|
||||||
if u'duplicate key' in unicode(err):
|
if re.match('^E1100[01] duplicate key', unicode(err)):
|
||||||
|
# E11000 - duplicate key error index
|
||||||
|
# E11001 - duplicate key on update
|
||||||
message = u'Tried to save duplicate unique keys (%s)'
|
message = u'Tried to save duplicate unique keys (%s)'
|
||||||
|
raise NotUniqueError(message % unicode(err))
|
||||||
raise OperationError(message % unicode(err))
|
raise OperationError(message % unicode(err))
|
||||||
id_field = self._meta['id_field']
|
id_field = self._meta['id_field']
|
||||||
self[id_field] = self._fields[id_field].to_python(object_id)
|
if id_field not in self._meta.get('shard_key', []):
|
||||||
|
self[id_field] = self._fields[id_field].to_python(object_id)
|
||||||
|
|
||||||
self._changed_fields = []
|
self._changed_fields = []
|
||||||
self._created = False
|
self._created = False
|
||||||
signals.post_save.send(self.__class__, document=self, created=created)
|
signals.post_save.send(self.__class__, document=self, created=created)
|
||||||
|
return self
|
||||||
|
|
||||||
def cascade_save(self, *args, **kwargs):
|
def cascade_save(self, warn_cascade=None, *args, **kwargs):
|
||||||
"""Recursively saves any references / generic references on an object"""
|
"""Recursively saves any references /
|
||||||
from fields import ReferenceField, GenericReferenceField
|
generic references on an objects"""
|
||||||
|
import fields
|
||||||
_refs = kwargs.get('_refs', []) or []
|
_refs = kwargs.get('_refs', []) or []
|
||||||
|
|
||||||
for name, cls in self._fields.items():
|
for name, cls in self._fields.items():
|
||||||
if not isinstance(cls, (ReferenceField, GenericReferenceField)):
|
if not isinstance(cls, (fields.ReferenceField,
|
||||||
|
fields.GenericReferenceField)):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ref = getattr(self, name)
|
ref = getattr(self, name)
|
||||||
if not ref:
|
if not ref or isinstance(ref, DBRef):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if not getattr(ref, '_changed_fields', True):
|
||||||
|
continue
|
||||||
|
|
||||||
ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
|
ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
|
||||||
if ref and ref_id not in _refs:
|
if ref and ref_id not in _refs:
|
||||||
|
if warn_cascade:
|
||||||
|
msg = ("Cascading saves will default to off in 0.8, "
|
||||||
|
"please explicitly set `.save(cascade=True)`")
|
||||||
|
warnings.warn(msg, FutureWarning)
|
||||||
_refs.append(ref_id)
|
_refs.append(ref_id)
|
||||||
kwargs["_refs"] = _refs
|
kwargs["_refs"] = _refs
|
||||||
ref.save(**kwargs)
|
ref.save(**kwargs)
|
||||||
@@ -283,8 +334,8 @@ class Document(BaseDocument):
|
|||||||
|
|
||||||
.. versionadded:: 0.5
|
.. versionadded:: 0.5
|
||||||
"""
|
"""
|
||||||
from dereference import DeReference
|
import dereference
|
||||||
self._data = DeReference()(self._data, max_depth)
|
self._data = dereference.DeReference()(self._data, max_depth)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def reload(self, max_depth=1):
|
def reload(self, max_depth=1):
|
||||||
@@ -332,17 +383,18 @@ class Document(BaseDocument):
|
|||||||
"""This method registers the delete rules to apply when removing this
|
"""This method registers the delete rules to apply when removing this
|
||||||
object.
|
object.
|
||||||
"""
|
"""
|
||||||
cls._meta['delete_rules'][(document_cls, field_name)] = rule
|
delete_rules = cls._meta.get('delete_rules') or {}
|
||||||
|
delete_rules[(document_cls, field_name)] = rule
|
||||||
|
cls._meta['delete_rules'] = delete_rules
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def drop_collection(cls):
|
def drop_collection(cls):
|
||||||
"""Drops the entire collection associated with this
|
"""Drops the entire collection associated with this
|
||||||
:class:`~mongoengine.Document` type from the database.
|
:class:`~mongoengine.Document` type from the database.
|
||||||
"""
|
"""
|
||||||
from mongoengine.queryset import QuerySet
|
|
||||||
db = cls._get_db()
|
db = cls._get_db()
|
||||||
db.drop_collection(cls._get_collection_name())
|
db.drop_collection(cls._get_collection_name())
|
||||||
QuerySet._reset_already_indexed(cls)
|
queryset.QuerySet._reset_already_indexed(cls)
|
||||||
|
|
||||||
|
|
||||||
class DynamicDocument(Document):
|
class DynamicDocument(Document):
|
||||||
@@ -351,14 +403,19 @@ class DynamicDocument(Document):
|
|||||||
way as an ordinary document but has expando style properties. Any data
|
way as an ordinary document but has expando style properties. Any data
|
||||||
passed or set against the :class:`~mongoengine.DynamicDocument` that is
|
passed or set against the :class:`~mongoengine.DynamicDocument` that is
|
||||||
not a field is automatically converted into a
|
not a field is automatically converted into a
|
||||||
:class:`~mongoengine.BaseDynamicField` and data can be attributed to that
|
:class:`~mongoengine.DynamicField` and data can be attributed to that
|
||||||
field.
|
field.
|
||||||
|
|
||||||
..note::
|
.. note::
|
||||||
|
|
||||||
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||||
|
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||||
|
my_metaclass = TopLevelDocumentMetaclass
|
||||||
__metaclass__ = TopLevelDocumentMetaclass
|
__metaclass__ = TopLevelDocumentMetaclass
|
||||||
|
|
||||||
_dynamic = True
|
_dynamic = True
|
||||||
|
|
||||||
def __delattr__(self, *args, **kwargs):
|
def __delattr__(self, *args, **kwargs):
|
||||||
@@ -377,7 +434,11 @@ class DynamicEmbeddedDocument(EmbeddedDocument):
|
|||||||
information about dynamic documents.
|
information about dynamic documents.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||||
|
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||||
|
my_metaclass = DocumentMetaclass
|
||||||
__metaclass__ = DocumentMetaclass
|
__metaclass__ = DocumentMetaclass
|
||||||
|
|
||||||
_dynamic = True
|
_dynamic = True
|
||||||
|
|
||||||
def __delattr__(self, *args, **kwargs):
|
def __delattr__(self, *args, **kwargs):
|
||||||
|
@@ -1,18 +1,22 @@
|
|||||||
import datetime
|
import datetime
|
||||||
import time
|
|
||||||
import decimal
|
import decimal
|
||||||
import gridfs
|
|
||||||
import re
|
import re
|
||||||
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
|
import warnings
|
||||||
|
import itertools
|
||||||
|
from operator import itemgetter
|
||||||
|
|
||||||
|
import gridfs
|
||||||
from bson import Binary, DBRef, SON, ObjectId
|
from bson import Binary, DBRef, SON, ObjectId
|
||||||
|
|
||||||
|
from mongoengine.python_support import (PY3, bin_type, txt_type,
|
||||||
|
str_types, StringIO)
|
||||||
from base import (BaseField, ComplexBaseField, ObjectIdField,
|
from base import (BaseField, ComplexBaseField, ObjectIdField,
|
||||||
ValidationError, get_document, BaseDocument)
|
ValidationError, get_document, BaseDocument)
|
||||||
from queryset import DO_NOTHING, QuerySet
|
from queryset import DO_NOTHING, QuerySet
|
||||||
from document import Document, EmbeddedDocument
|
from document import Document, EmbeddedDocument
|
||||||
from connection import get_db, DEFAULT_CONNECTION_NAME
|
from connection import get_db, DEFAULT_CONNECTION_NAME
|
||||||
from operator import itemgetter
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -21,16 +25,10 @@ except ImportError:
|
|||||||
Image = None
|
Image = None
|
||||||
ImageOps = None
|
ImageOps = None
|
||||||
|
|
||||||
try:
|
|
||||||
from cStringIO import StringIO
|
|
||||||
except ImportError:
|
|
||||||
from StringIO import StringIO
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField',
|
__all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField',
|
||||||
'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField',
|
'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField',
|
||||||
'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField',
|
'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField',
|
||||||
'DecimalField', 'ComplexDateTimeField', 'URLField',
|
'DecimalField', 'ComplexDateTimeField', 'URLField', 'DynamicField',
|
||||||
'GenericReferenceField', 'FileField', 'BinaryField',
|
'GenericReferenceField', 'FileField', 'BinaryField',
|
||||||
'SortedListField', 'EmailField', 'GeoPointField', 'ImageField',
|
'SortedListField', 'EmailField', 'GeoPointField', 'ImageField',
|
||||||
'SequenceField', 'UUIDField', 'GenericEmbeddedDocumentField']
|
'SequenceField', 'UUIDField', 'GenericEmbeddedDocumentField']
|
||||||
@@ -49,10 +47,16 @@ class StringField(BaseField):
|
|||||||
super(StringField, self).__init__(**kwargs)
|
super(StringField, self).__init__(**kwargs)
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
return unicode(value)
|
if isinstance(value, unicode):
|
||||||
|
return value
|
||||||
|
try:
|
||||||
|
value = value.decode('utf-8')
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
if not isinstance(value, (str, unicode)):
|
if not isinstance(value, basestring):
|
||||||
self.error('StringField only accepts string values')
|
self.error('StringField only accepts string values')
|
||||||
|
|
||||||
if self.max_length is not None and len(value) > self.max_length:
|
if self.max_length is not None and len(value) > self.max_length:
|
||||||
@@ -149,7 +153,11 @@ class IntField(BaseField):
|
|||||||
super(IntField, self).__init__(**kwargs)
|
super(IntField, self).__init__(**kwargs)
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
return int(value)
|
try:
|
||||||
|
value = int(value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
try:
|
try:
|
||||||
@@ -164,6 +172,9 @@ class IntField(BaseField):
|
|||||||
self.error('Integer value is too large')
|
self.error('Integer value is too large')
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
|
if value is None:
|
||||||
|
return value
|
||||||
|
|
||||||
return int(value)
|
return int(value)
|
||||||
|
|
||||||
|
|
||||||
@@ -176,13 +187,17 @@ class FloatField(BaseField):
|
|||||||
super(FloatField, self).__init__(**kwargs)
|
super(FloatField, self).__init__(**kwargs)
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
return float(value)
|
try:
|
||||||
|
value = float(value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
if isinstance(value, int):
|
if isinstance(value, int):
|
||||||
value = float(value)
|
value = float(value)
|
||||||
if not isinstance(value, float):
|
if not isinstance(value, float):
|
||||||
self.error('FoatField only accepts float values')
|
self.error('FloatField only accepts float values')
|
||||||
|
|
||||||
if self.min_value is not None and value < self.min_value:
|
if self.min_value is not None and value < self.min_value:
|
||||||
self.error('Float value is too small')
|
self.error('Float value is too small')
|
||||||
@@ -191,6 +206,9 @@ class FloatField(BaseField):
|
|||||||
self.error('Float value is too large')
|
self.error('Float value is too large')
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
|
if value is None:
|
||||||
|
return value
|
||||||
|
|
||||||
return float(value)
|
return float(value)
|
||||||
|
|
||||||
|
|
||||||
@@ -205,9 +223,14 @@ class DecimalField(BaseField):
|
|||||||
super(DecimalField, self).__init__(**kwargs)
|
super(DecimalField, self).__init__(**kwargs)
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
|
original_value = value
|
||||||
if not isinstance(value, basestring):
|
if not isinstance(value, basestring):
|
||||||
value = unicode(value)
|
value = unicode(value)
|
||||||
return decimal.Decimal(value)
|
try:
|
||||||
|
value = decimal.Decimal(value)
|
||||||
|
except ValueError:
|
||||||
|
return original_value
|
||||||
|
return value
|
||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
return unicode(value)
|
return unicode(value)
|
||||||
@@ -235,7 +258,11 @@ class BooleanField(BaseField):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
return bool(value)
|
try:
|
||||||
|
value = bool(value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
if not isinstance(value, bool):
|
if not isinstance(value, bool):
|
||||||
@@ -366,10 +393,12 @@ class ComplexDateTimeField(StringField):
|
|||||||
data = super(ComplexDateTimeField, self).__get__(instance, owner)
|
data = super(ComplexDateTimeField, self).__get__(instance, owner)
|
||||||
if data == None:
|
if data == None:
|
||||||
return datetime.datetime.now()
|
return datetime.datetime.now()
|
||||||
|
if isinstance(data, datetime.datetime):
|
||||||
|
return data
|
||||||
return self._convert_from_string(data)
|
return self._convert_from_string(data)
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
value = self._convert_from_datetime(value)
|
value = self._convert_from_datetime(value) if value else value
|
||||||
return super(ComplexDateTimeField, self).__set__(instance, value)
|
return super(ComplexDateTimeField, self).__set__(instance, value)
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
@@ -378,7 +407,11 @@ class ComplexDateTimeField(StringField):
|
|||||||
'ComplexDateTimeField')
|
'ComplexDateTimeField')
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
return self._convert_from_string(value)
|
original_value = value
|
||||||
|
try:
|
||||||
|
return self._convert_from_string(value)
|
||||||
|
except:
|
||||||
|
return original_value
|
||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
return self._convert_from_datetime(value)
|
return self._convert_from_datetime(value)
|
||||||
@@ -441,6 +474,10 @@ class GenericEmbeddedDocumentField(BaseField):
|
|||||||
:class:`~mongoengine.EmbeddedDocument` to be stored.
|
:class:`~mongoengine.EmbeddedDocument` to be stored.
|
||||||
|
|
||||||
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
|
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
|
||||||
|
|
||||||
|
.. note ::
|
||||||
|
You can use the choices param to limit the acceptable
|
||||||
|
EmbeddedDocument types
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
@@ -470,10 +507,56 @@ class GenericEmbeddedDocumentField(BaseField):
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
class DynamicField(BaseField):
|
||||||
|
"""A truly dynamic field type capable of handling different and varying
|
||||||
|
types of data.
|
||||||
|
|
||||||
|
Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
|
||||||
|
|
||||||
|
def to_mongo(self, value):
|
||||||
|
"""Convert a Python type to a MongoDBcompatible type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(value, basestring):
|
||||||
|
return value
|
||||||
|
|
||||||
|
if hasattr(value, 'to_mongo'):
|
||||||
|
return value.to_mongo()
|
||||||
|
|
||||||
|
if not isinstance(value, (dict, list, tuple)):
|
||||||
|
return value
|
||||||
|
|
||||||
|
is_list = False
|
||||||
|
if not hasattr(value, 'items'):
|
||||||
|
is_list = True
|
||||||
|
value = dict([(k, v) for k, v in enumerate(value)])
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
for k, v in value.items():
|
||||||
|
data[k] = self.to_mongo(v)
|
||||||
|
|
||||||
|
if is_list: # Convert back to a list
|
||||||
|
value = [v for k, v in sorted(data.items(), key=itemgetter(0))]
|
||||||
|
else:
|
||||||
|
value = data
|
||||||
|
return value
|
||||||
|
|
||||||
|
def lookup_member(self, member_name):
|
||||||
|
return member_name
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
if isinstance(value, basestring):
|
||||||
|
from mongoengine.fields import StringField
|
||||||
|
return StringField().prepare_query_value(op, value)
|
||||||
|
return self.to_mongo(value)
|
||||||
|
|
||||||
|
|
||||||
class ListField(ComplexBaseField):
|
class ListField(ComplexBaseField):
|
||||||
"""A list field that wraps a standard field, allowing multiple instances
|
"""A list field that wraps a standard field, allowing multiple instances
|
||||||
of the field to be used as a list in the database.
|
of the field to be used as a list in the database.
|
||||||
|
|
||||||
|
If using with ReferenceFields see: :ref:`one-to-many-with-listfields`
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
Required means it cannot be empty - as the default for ListFields is []
|
Required means it cannot be empty - as the default for ListFields is []
|
||||||
"""
|
"""
|
||||||
@@ -612,13 +695,29 @@ class ReferenceField(BaseField):
|
|||||||
* NULLIFY - Updates the reference to null.
|
* NULLIFY - Updates the reference to null.
|
||||||
* CASCADE - Deletes the documents associated with the reference.
|
* CASCADE - Deletes the documents associated with the reference.
|
||||||
* DENY - Prevent the deletion of the reference object.
|
* DENY - Prevent the deletion of the reference object.
|
||||||
|
* PULL - Pull the reference from a :class:`~mongoengine.ListField`
|
||||||
|
of references
|
||||||
|
|
||||||
|
Alternative syntax for registering delete rules (useful when implementing
|
||||||
|
bi-directional delete rules)
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class Bar(Document):
|
||||||
|
content = StringField()
|
||||||
|
foo = ReferenceField('Foo')
|
||||||
|
|
||||||
|
Bar.register_delete_rule(Foo, 'bar', NULLIFY)
|
||||||
|
|
||||||
.. versionchanged:: 0.5 added `reverse_delete_rule`
|
.. versionchanged:: 0.5 added `reverse_delete_rule`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, document_type, reverse_delete_rule=DO_NOTHING, **kwargs):
|
def __init__(self, document_type, dbref=None,
|
||||||
|
reverse_delete_rule=DO_NOTHING, **kwargs):
|
||||||
"""Initialises the Reference Field.
|
"""Initialises the Reference Field.
|
||||||
|
|
||||||
|
:param dbref: Store the reference as :class:`~pymongo.dbref.DBRef`
|
||||||
|
or as the :class:`~pymongo.objectid.ObjectId`.id .
|
||||||
:param reverse_delete_rule: Determines what to do when the referring
|
:param reverse_delete_rule: Determines what to do when the referring
|
||||||
object is deleted
|
object is deleted
|
||||||
"""
|
"""
|
||||||
@@ -626,6 +725,13 @@ class ReferenceField(BaseField):
|
|||||||
if not issubclass(document_type, (Document, basestring)):
|
if not issubclass(document_type, (Document, basestring)):
|
||||||
self.error('Argument to ReferenceField constructor must be a '
|
self.error('Argument to ReferenceField constructor must be a '
|
||||||
'document class or a string')
|
'document class or a string')
|
||||||
|
|
||||||
|
if dbref is None:
|
||||||
|
msg = ("ReferenceFields will default to using ObjectId "
|
||||||
|
" strings in 0.8, set DBRef=True if this isn't desired")
|
||||||
|
warnings.warn(msg, FutureWarning)
|
||||||
|
|
||||||
|
self.dbref = dbref if dbref is not None else True # To change in 0.8
|
||||||
self.document_type_obj = document_type
|
self.document_type_obj = document_type
|
||||||
self.reverse_delete_rule = reverse_delete_rule
|
self.reverse_delete_rule = reverse_delete_rule
|
||||||
super(ReferenceField, self).__init__(**kwargs)
|
super(ReferenceField, self).__init__(**kwargs)
|
||||||
@@ -648,8 +754,9 @@ class ReferenceField(BaseField):
|
|||||||
|
|
||||||
# Get value from document instance if available
|
# Get value from document instance if available
|
||||||
value = instance._data.get(self.name)
|
value = instance._data.get(self.name)
|
||||||
|
|
||||||
# Dereference DBRefs
|
# Dereference DBRefs
|
||||||
if isinstance(value, (DBRef)):
|
if isinstance(value, DBRef):
|
||||||
value = self.document_type._get_db().dereference(value)
|
value = self.document_type._get_db().dereference(value)
|
||||||
if value is not None:
|
if value is not None:
|
||||||
instance._data[self.name] = self.document_type._from_son(value)
|
instance._data[self.name] = self.document_type._from_son(value)
|
||||||
@@ -657,12 +764,19 @@ class ReferenceField(BaseField):
|
|||||||
return super(ReferenceField, self).__get__(instance, owner)
|
return super(ReferenceField, self).__get__(instance, owner)
|
||||||
|
|
||||||
def to_mongo(self, document):
|
def to_mongo(self, document):
|
||||||
|
if isinstance(document, DBRef):
|
||||||
|
if not self.dbref:
|
||||||
|
return "%s" % DBRef.id
|
||||||
|
return document
|
||||||
|
elif not self.dbref and isinstance(document, basestring):
|
||||||
|
return document
|
||||||
|
|
||||||
id_field_name = self.document_type._meta['id_field']
|
id_field_name = self.document_type._meta['id_field']
|
||||||
id_field = self.document_type._fields[id_field_name]
|
id_field = self.document_type._fields[id_field_name]
|
||||||
|
|
||||||
if isinstance(document, Document):
|
if isinstance(document, Document):
|
||||||
# We need the id from the saved object to create the DBRef
|
# We need the id from the saved object to create the DBRef
|
||||||
id_ = document.id
|
id_ = document.pk
|
||||||
if id_ is None:
|
if id_ is None:
|
||||||
self.error('You can only reference documents once they have'
|
self.error('You can only reference documents once they have'
|
||||||
' been saved to the database')
|
' been saved to the database')
|
||||||
@@ -670,18 +784,30 @@ class ReferenceField(BaseField):
|
|||||||
id_ = document
|
id_ = document
|
||||||
|
|
||||||
id_ = id_field.to_mongo(id_)
|
id_ = id_field.to_mongo(id_)
|
||||||
collection = self.document_type._get_collection_name()
|
if self.dbref:
|
||||||
return DBRef(collection, id_)
|
collection = self.document_type._get_collection_name()
|
||||||
|
return DBRef(collection, id_)
|
||||||
|
|
||||||
|
return "%s" % id_
|
||||||
|
|
||||||
|
def to_python(self, value):
|
||||||
|
"""Convert a MongoDB-compatible type to a Python type.
|
||||||
|
"""
|
||||||
|
if (not self.dbref and
|
||||||
|
not isinstance(value, (DBRef, Document, EmbeddedDocument))):
|
||||||
|
collection = self.document_type._get_collection_name()
|
||||||
|
value = DBRef(collection, self.document_type.id.to_python(value))
|
||||||
|
return value
|
||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
if value is None:
|
if value is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return self.to_mongo(value)
|
return self.to_mongo(value)
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
|
|
||||||
if not isinstance(value, (self.document_type, DBRef)):
|
if not isinstance(value, (self.document_type, DBRef)):
|
||||||
self.error('A ReferenceField only accepts DBRef')
|
self.error("A ReferenceField only accepts DBRef or documents")
|
||||||
|
|
||||||
if isinstance(value, Document) and value.id is None:
|
if isinstance(value, Document) and value.id is None:
|
||||||
self.error('You can only reference documents once they have been '
|
self.error('You can only reference documents once they have been '
|
||||||
@@ -695,8 +821,12 @@ class GenericReferenceField(BaseField):
|
|||||||
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
|
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
|
||||||
that will be automatically dereferenced on access (lazily).
|
that will be automatically dereferenced on access (lazily).
|
||||||
|
|
||||||
..note :: Any documents used as a generic reference must be registered in the
|
.. note ::
|
||||||
document registry. Importing the model will automatically register it.
|
* Any documents used as a generic reference must be registered in the
|
||||||
|
document registry. Importing the model will automatically register
|
||||||
|
it.
|
||||||
|
|
||||||
|
* You can use the choices param to limit the acceptable Document types
|
||||||
|
|
||||||
.. versionadded:: 0.3
|
.. versionadded:: 0.3
|
||||||
"""
|
"""
|
||||||
@@ -732,6 +862,9 @@ class GenericReferenceField(BaseField):
|
|||||||
if document is None:
|
if document is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
if isinstance(document, (dict, SON)):
|
||||||
|
return document
|
||||||
|
|
||||||
id_field_name = document.__class__._meta['id_field']
|
id_field_name = document.__class__._meta['id_field']
|
||||||
id_field = document.__class__._fields[id_field_name]
|
id_field = document.__class__._fields[id_field_name]
|
||||||
|
|
||||||
@@ -764,16 +897,20 @@ class BinaryField(BaseField):
|
|||||||
self.max_bytes = max_bytes
|
self.max_bytes = max_bytes
|
||||||
super(BinaryField, self).__init__(**kwargs)
|
super(BinaryField, self).__init__(**kwargs)
|
||||||
|
|
||||||
|
def __set__(self, instance, value):
|
||||||
|
"""Handle bytearrays in python 3.1"""
|
||||||
|
if PY3 and isinstance(value, bytearray):
|
||||||
|
value = bin_type(value)
|
||||||
|
return super(BinaryField, self).__set__(instance, value)
|
||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
return Binary(value)
|
return Binary(value)
|
||||||
|
|
||||||
def to_python(self, value):
|
|
||||||
# Returns str not unicode as this is binary data
|
|
||||||
return str(value)
|
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
if not isinstance(value, str):
|
if not isinstance(value, (bin_type, txt_type, Binary)):
|
||||||
self.error('BinaryField only accepts string values')
|
self.error("BinaryField only accepts instances of "
|
||||||
|
"(%s, %s, Binary)" % (
|
||||||
|
bin_type.__name__, txt_type.__name__))
|
||||||
|
|
||||||
if self.max_bytes is not None and len(value) > self.max_bytes:
|
if self.max_bytes is not None and len(value) > self.max_bytes:
|
||||||
self.error('Binary value is too long')
|
self.error('Binary value is too long')
|
||||||
@@ -826,6 +963,17 @@ class GridFSProxy(object):
|
|||||||
self_dict['_fs'] = None
|
self_dict['_fs'] = None
|
||||||
return self_dict
|
return self_dict
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '<%s: %s>' % (self.__class__.__name__, self.grid_id)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, GridFSProxy):
|
||||||
|
return ((self.grid_id == other.grid_id) and
|
||||||
|
(self.collection_name == other.collection_name) and
|
||||||
|
(self.db_alias == other.db_alias))
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def fs(self):
|
def fs(self):
|
||||||
if not self._fs:
|
if not self._fs:
|
||||||
@@ -872,10 +1020,14 @@ class GridFSProxy(object):
|
|||||||
self.newfile.writelines(lines)
|
self.newfile.writelines(lines)
|
||||||
|
|
||||||
def read(self, size=-1):
|
def read(self, size=-1):
|
||||||
try:
|
gridout = self.get()
|
||||||
return self.get().read(size)
|
if gridout is None:
|
||||||
except:
|
|
||||||
return None
|
return None
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
return gridout.read(size)
|
||||||
|
except:
|
||||||
|
return ""
|
||||||
|
|
||||||
def delete(self):
|
def delete(self):
|
||||||
# Delete file from GridFS, FileField still remains
|
# Delete file from GridFS, FileField still remains
|
||||||
@@ -920,19 +1072,21 @@ class FileField(BaseField):
|
|||||||
|
|
||||||
# Check if a file already exists for this model
|
# Check if a file already exists for this model
|
||||||
grid_file = instance._data.get(self.name)
|
grid_file = instance._data.get(self.name)
|
||||||
self.grid_file = grid_file
|
if not isinstance(grid_file, self.proxy_class):
|
||||||
if isinstance(self.grid_file, self.proxy_class):
|
grid_file = self.proxy_class(key=self.name, instance=instance,
|
||||||
if not self.grid_file.key:
|
db_alias=self.db_alias,
|
||||||
self.grid_file.key = self.name
|
collection_name=self.collection_name)
|
||||||
self.grid_file.instance = instance
|
instance._data[self.name] = grid_file
|
||||||
return self.grid_file
|
|
||||||
return self.proxy_class(key=self.name, instance=instance,
|
if not grid_file.key:
|
||||||
db_alias=self.db_alias,
|
grid_file.key = self.name
|
||||||
collection_name=self.collection_name)
|
grid_file.instance = instance
|
||||||
|
return grid_file
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
key = self.name
|
key = self.name
|
||||||
if isinstance(value, file) or isinstance(value, str):
|
if ((hasattr(value, 'read') and not
|
||||||
|
isinstance(value, GridFSProxy)) or isinstance(value, str_types)):
|
||||||
# using "FileField() = file/string" notation
|
# using "FileField() = file/string" notation
|
||||||
grid_file = instance._data.get(self.name)
|
grid_file = instance._data.get(self.name)
|
||||||
# If a file already exists, delete it
|
# If a file already exists, delete it
|
||||||
@@ -988,6 +1142,7 @@ class ImageGridFsProxy(GridFSProxy):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
img = Image.open(file_obj)
|
img = Image.open(file_obj)
|
||||||
|
img_format = img.format
|
||||||
except:
|
except:
|
||||||
raise ValidationError('Invalid image')
|
raise ValidationError('Invalid image')
|
||||||
|
|
||||||
@@ -1022,20 +1177,20 @@ class ImageGridFsProxy(GridFSProxy):
|
|||||||
|
|
||||||
if thumbnail:
|
if thumbnail:
|
||||||
thumb_id = self._put_thumbnail(thumbnail,
|
thumb_id = self._put_thumbnail(thumbnail,
|
||||||
img.format)
|
img_format)
|
||||||
else:
|
else:
|
||||||
thumb_id = None
|
thumb_id = None
|
||||||
|
|
||||||
w, h = img.size
|
w, h = img.size
|
||||||
|
|
||||||
io = StringIO()
|
io = StringIO()
|
||||||
img.save(io, img.format)
|
img.save(io, img_format)
|
||||||
io.seek(0)
|
io.seek(0)
|
||||||
|
|
||||||
return super(ImageGridFsProxy, self).put(io,
|
return super(ImageGridFsProxy, self).put(io,
|
||||||
width=w,
|
width=w,
|
||||||
height=h,
|
height=h,
|
||||||
format=img.format,
|
format=img_format,
|
||||||
thumbnail_id=thumb_id,
|
thumbnail_id=thumb_id,
|
||||||
**kwargs)
|
**kwargs)
|
||||||
|
|
||||||
@@ -1121,11 +1276,15 @@ class ImageField(FileField):
|
|||||||
params_size = ('width', 'height', 'force')
|
params_size = ('width', 'height', 'force')
|
||||||
extra_args = dict(size=size, thumbnail_size=thumbnail_size)
|
extra_args = dict(size=size, thumbnail_size=thumbnail_size)
|
||||||
for att_name, att in extra_args.items():
|
for att_name, att in extra_args.items():
|
||||||
if att and (isinstance(att, tuple) or isinstance(att, list)):
|
value = None
|
||||||
setattr(self, att_name, dict(
|
if isinstance(att, (tuple, list)):
|
||||||
map(None, params_size, att)))
|
if PY3:
|
||||||
else:
|
value = dict(itertools.zip_longest(params_size, att,
|
||||||
setattr(self, att_name, None)
|
fillvalue=None))
|
||||||
|
else:
|
||||||
|
value = dict(map(None, params_size, att))
|
||||||
|
|
||||||
|
setattr(self, att_name, value)
|
||||||
|
|
||||||
super(ImageField, self).__init__(
|
super(ImageField, self).__init__(
|
||||||
collection_name=collection_name,
|
collection_name=collection_name,
|
||||||
@@ -1167,18 +1326,19 @@ class SequenceField(IntField):
|
|||||||
|
|
||||||
.. versionadded:: 0.5
|
.. versionadded:: 0.5
|
||||||
"""
|
"""
|
||||||
def __init__(self, collection_name=None, db_alias = None, *args, **kwargs):
|
def __init__(self, collection_name=None, db_alias = None, sequence_name = None, *args, **kwargs):
|
||||||
self.collection_name = collection_name or 'mongoengine.counters'
|
self.collection_name = collection_name or 'mongoengine.counters'
|
||||||
self.db_alias = db_alias or DEFAULT_CONNECTION_NAME
|
self.db_alias = db_alias or DEFAULT_CONNECTION_NAME
|
||||||
|
self.sequence_name = sequence_name
|
||||||
return super(SequenceField, self).__init__(*args, **kwargs)
|
return super(SequenceField, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
def generate_new_value(self):
|
def generate_new_value(self):
|
||||||
"""
|
"""
|
||||||
Generate and Increment the counter
|
Generate and Increment the counter
|
||||||
"""
|
"""
|
||||||
sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(),
|
sequence_name = self.sequence_name or self.owner_document._get_collection_name()
|
||||||
self.name)
|
sequence_id = "%s.%s" % (sequence_name, self.name)
|
||||||
collection = get_db(alias = self.db_alias )[self.collection_name]
|
collection = get_db(alias=self.db_alias)[self.collection_name]
|
||||||
counter = collection.find_and_modify(query={"_id": sequence_id},
|
counter = collection.find_and_modify(query={"_id": sequence_id},
|
||||||
update={"$inc": {"next": 1}},
|
update={"$inc": {"next": 1}},
|
||||||
new=True,
|
new=True,
|
||||||
@@ -1200,7 +1360,7 @@ class SequenceField(IntField):
|
|||||||
instance._data[self.name] = value
|
instance._data[self.name] = value
|
||||||
instance._mark_as_changed(self.name)
|
instance._mark_as_changed(self.name)
|
||||||
|
|
||||||
return value
|
return int(value) if value else None
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
|
|
||||||
@@ -1220,17 +1380,44 @@ class UUIDField(BaseField):
|
|||||||
|
|
||||||
.. versionadded:: 0.6
|
.. versionadded:: 0.6
|
||||||
"""
|
"""
|
||||||
|
_binary = None
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, binary=None, **kwargs):
|
||||||
|
"""
|
||||||
|
Store UUID data in the database
|
||||||
|
|
||||||
|
:param binary: (optional) boolean store as binary.
|
||||||
|
|
||||||
|
.. versionchanged:: 0.6.19
|
||||||
|
"""
|
||||||
|
if binary is None:
|
||||||
|
binary = False
|
||||||
|
msg = ("UUIDFields will soon default to store as binary, please "
|
||||||
|
"configure binary=False if you wish to store as a string")
|
||||||
|
warnings.warn(msg, FutureWarning)
|
||||||
|
self._binary = binary
|
||||||
super(UUIDField, self).__init__(**kwargs)
|
super(UUIDField, self).__init__(**kwargs)
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
if not isinstance(value, basestring):
|
if not self._binary:
|
||||||
value = unicode(value)
|
original_value = value
|
||||||
return uuid.UUID(value)
|
try:
|
||||||
|
if not isinstance(value, basestring):
|
||||||
|
value = unicode(value)
|
||||||
|
return uuid.UUID(value)
|
||||||
|
except:
|
||||||
|
return original_value
|
||||||
|
return value
|
||||||
|
|
||||||
def to_mongo(self, value):
|
def to_mongo(self, value):
|
||||||
return unicode(value)
|
if not self._binary:
|
||||||
|
return unicode(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
def prepare_query_value(self, op, value):
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
return self.to_mongo(value)
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
if not isinstance(value, uuid.UUID):
|
if not isinstance(value, uuid.UUID):
|
||||||
|
60
mongoengine/python_support.py
Normal file
60
mongoengine/python_support.py
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
"""Helper functions and types to aid with Python 2.5 - 3 support."""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
PY3 = sys.version_info[0] == 3
|
||||||
|
PY25 = sys.version_info[:2] == (2, 5)
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
import codecs
|
||||||
|
from io import BytesIO as StringIO
|
||||||
|
# return s converted to binary. b('test') should be equivalent to b'test'
|
||||||
|
def b(s):
|
||||||
|
return codecs.latin_1_encode(s)[0]
|
||||||
|
|
||||||
|
bin_type = bytes
|
||||||
|
txt_type = str
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
from cStringIO import StringIO
|
||||||
|
except ImportError:
|
||||||
|
from StringIO import StringIO
|
||||||
|
|
||||||
|
# Conversion to binary only necessary in Python 3
|
||||||
|
def b(s):
|
||||||
|
return s
|
||||||
|
|
||||||
|
bin_type = str
|
||||||
|
txt_type = unicode
|
||||||
|
|
||||||
|
str_types = (bin_type, txt_type)
|
||||||
|
|
||||||
|
if PY25:
|
||||||
|
def product(*args, **kwds):
|
||||||
|
pools = map(tuple, args) * kwds.get('repeat', 1)
|
||||||
|
result = [[]]
|
||||||
|
for pool in pools:
|
||||||
|
result = [x + [y] for x in result for y in pool]
|
||||||
|
for prod in result:
|
||||||
|
yield tuple(prod)
|
||||||
|
reduce = reduce
|
||||||
|
else:
|
||||||
|
from itertools import product
|
||||||
|
from functools import reduce
|
||||||
|
|
||||||
|
|
||||||
|
# For use with Python 2.5
|
||||||
|
# converts all keys from unicode to str for d and all nested dictionaries
|
||||||
|
def to_str_keys_recursive(d):
|
||||||
|
if isinstance(d, list):
|
||||||
|
for val in d:
|
||||||
|
if isinstance(val, (dict, list)):
|
||||||
|
to_str_keys_recursive(val)
|
||||||
|
elif isinstance(d, dict):
|
||||||
|
for key, val in d.items():
|
||||||
|
if isinstance(val, (dict, list)):
|
||||||
|
to_str_keys_recursive(val)
|
||||||
|
if isinstance(key, unicode):
|
||||||
|
d[str(key)] = d.pop(key)
|
||||||
|
else:
|
||||||
|
raise ValueError("non list/dict parameter not allowed")
|
@@ -4,13 +4,18 @@ import copy
|
|||||||
import itertools
|
import itertools
|
||||||
import operator
|
import operator
|
||||||
|
|
||||||
|
from collections import defaultdict
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from mongoengine.python_support import product, reduce
|
||||||
|
|
||||||
import pymongo
|
import pymongo
|
||||||
from bson.code import Code
|
from bson.code import Code
|
||||||
|
|
||||||
from mongoengine import signals
|
from mongoengine import signals
|
||||||
|
|
||||||
__all__ = ['queryset_manager', 'Q', 'InvalidQueryError',
|
__all__ = ['queryset_manager', 'Q', 'InvalidQueryError',
|
||||||
'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY']
|
'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL']
|
||||||
|
|
||||||
|
|
||||||
# The maximum number of items to display in a QuerySet.__repr__
|
# The maximum number of items to display in a QuerySet.__repr__
|
||||||
@@ -21,6 +26,7 @@ DO_NOTHING = 0
|
|||||||
NULLIFY = 1
|
NULLIFY = 1
|
||||||
CASCADE = 2
|
CASCADE = 2
|
||||||
DENY = 3
|
DENY = 3
|
||||||
|
PULL = 4
|
||||||
|
|
||||||
|
|
||||||
class DoesNotExist(Exception):
|
class DoesNotExist(Exception):
|
||||||
@@ -39,6 +45,10 @@ class OperationError(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NotUniqueError(OperationError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
RE_TYPE = type(re.compile(''))
|
RE_TYPE = type(re.compile(''))
|
||||||
|
|
||||||
|
|
||||||
@@ -117,7 +127,7 @@ class QueryTreeTransformerVisitor(QNodeVisitor):
|
|||||||
# the necessary parts. Then for each $or part, create a new query
|
# the necessary parts. Then for each $or part, create a new query
|
||||||
# that ANDs the necessary part with the $or part.
|
# that ANDs the necessary part with the $or part.
|
||||||
clauses = []
|
clauses = []
|
||||||
for or_group in itertools.product(*or_groups):
|
for or_group in product(*or_groups):
|
||||||
q_object = reduce(lambda a, b: a & b, and_parts, Q())
|
q_object = reduce(lambda a, b: a & b, and_parts, Q())
|
||||||
q_object = reduce(lambda a, b: a & b, or_group, q_object)
|
q_object = reduce(lambda a, b: a & b, or_group, q_object)
|
||||||
clauses.append(q_object)
|
clauses.append(q_object)
|
||||||
@@ -326,6 +336,7 @@ class QuerySet(object):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
__already_indexed = set()
|
__already_indexed = set()
|
||||||
|
__dereference = False
|
||||||
|
|
||||||
def __init__(self, document, collection):
|
def __init__(self, document, collection):
|
||||||
self._document = document
|
self._document = document
|
||||||
@@ -340,11 +351,12 @@ class QuerySet(object):
|
|||||||
self._timeout = True
|
self._timeout = True
|
||||||
self._class_check = True
|
self._class_check = True
|
||||||
self._slave_okay = False
|
self._slave_okay = False
|
||||||
|
self._iter = False
|
||||||
self._scalar = []
|
self._scalar = []
|
||||||
|
|
||||||
# If inheritance is allowed, only return instances and instances of
|
# If inheritance is allowed, only return instances and instances of
|
||||||
# subclasses of the class being used
|
# subclasses of the class being used
|
||||||
if document._meta.get('allow_inheritance'):
|
if document._meta.get('allow_inheritance') != False:
|
||||||
self._initial_query = {'_types': self._document._class_name}
|
self._initial_query = {'_types': self._document._class_name}
|
||||||
self._loaded_fields = QueryFieldList(always_include=['_cls'])
|
self._loaded_fields = QueryFieldList(always_include=['_cls'])
|
||||||
self._cursor_obj = None
|
self._cursor_obj = None
|
||||||
@@ -394,61 +406,6 @@ class QuerySet(object):
|
|||||||
unique=index_spec.get('unique', False))
|
unique=index_spec.get('unique', False))
|
||||||
return self
|
return self
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _build_index_spec(cls, doc_cls, spec):
|
|
||||||
"""Build a PyMongo index spec from a MongoEngine index spec.
|
|
||||||
"""
|
|
||||||
if isinstance(spec, basestring):
|
|
||||||
spec = {'fields': [spec]}
|
|
||||||
if isinstance(spec, (list, tuple)):
|
|
||||||
spec = {'fields': spec}
|
|
||||||
|
|
||||||
index_list = []
|
|
||||||
use_types = doc_cls._meta.get('allow_inheritance', True)
|
|
||||||
for key in spec['fields']:
|
|
||||||
# Get ASCENDING direction from +, DESCENDING from -, and GEO2D from *
|
|
||||||
direction = pymongo.ASCENDING
|
|
||||||
if key.startswith("-"):
|
|
||||||
direction = pymongo.DESCENDING
|
|
||||||
elif key.startswith("*"):
|
|
||||||
direction = pymongo.GEO2D
|
|
||||||
if key.startswith(("+", "-", "*")):
|
|
||||||
key = key[1:]
|
|
||||||
|
|
||||||
# Use real field name, do it manually because we need field
|
|
||||||
# objects for the next part (list field checking)
|
|
||||||
parts = key.split('.')
|
|
||||||
fields = QuerySet._lookup_field(doc_cls, parts)
|
|
||||||
parts = [field.db_field for field in fields]
|
|
||||||
key = '.'.join(parts)
|
|
||||||
index_list.append((key, direction))
|
|
||||||
|
|
||||||
# Check if a list field is being used, don't use _types if it is
|
|
||||||
if use_types and not all(f._index_with_types for f in fields):
|
|
||||||
use_types = False
|
|
||||||
|
|
||||||
# If _types is being used, prepend it to every specified index
|
|
||||||
index_types = doc_cls._meta.get('index_types', True)
|
|
||||||
allow_inheritance = doc_cls._meta.get('allow_inheritance')
|
|
||||||
if spec.get('types', index_types) and allow_inheritance and use_types and direction is not pymongo.GEO2D:
|
|
||||||
index_list.insert(0, ('_types', 1))
|
|
||||||
|
|
||||||
spec['fields'] = index_list
|
|
||||||
|
|
||||||
if spec.get('sparse', False) and len(spec['fields']) > 1:
|
|
||||||
raise ValueError(
|
|
||||||
'Sparse indexes can only have one field in them. '
|
|
||||||
'See https://jira.mongodb.org/browse/SERVER-2193')
|
|
||||||
|
|
||||||
return spec
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _reset_already_indexed(cls, document=None):
|
|
||||||
"""Helper to reset already indexed, can be useful for testing purposes"""
|
|
||||||
if document:
|
|
||||||
cls.__already_indexed.discard(document)
|
|
||||||
cls.__already_indexed.clear()
|
|
||||||
|
|
||||||
def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query):
|
def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query):
|
||||||
"""Filter the selected documents by calling the
|
"""Filter the selected documents by calling the
|
||||||
:class:`~mongoengine.queryset.QuerySet` with a query.
|
:class:`~mongoengine.queryset.QuerySet` with a query.
|
||||||
@@ -481,13 +438,133 @@ class QuerySet(object):
|
|||||||
"""Returns all documents."""
|
"""Returns all documents."""
|
||||||
return self.__call__()
|
return self.__call__()
|
||||||
|
|
||||||
|
def _ensure_indexes(self):
|
||||||
|
"""Checks the document meta data and ensures all the indexes exist.
|
||||||
|
|
||||||
|
.. note:: You can disable automatic index creation by setting
|
||||||
|
`auto_create_index` to False in the documents meta data
|
||||||
|
"""
|
||||||
|
background = self._document._meta.get('index_background', False)
|
||||||
|
drop_dups = self._document._meta.get('index_drop_dups', False)
|
||||||
|
index_opts = self._document._meta.get('index_opts') or {}
|
||||||
|
index_types = self._document._meta.get('index_types', True)
|
||||||
|
|
||||||
|
# determine if an index which we are creating includes
|
||||||
|
# _type as its first field; if so, we can avoid creating
|
||||||
|
# an extra index on _type, as mongodb will use the existing
|
||||||
|
# index to service queries against _type
|
||||||
|
types_indexed = False
|
||||||
|
|
||||||
|
def includes_types(fields):
|
||||||
|
first_field = None
|
||||||
|
if len(fields):
|
||||||
|
if isinstance(fields[0], basestring):
|
||||||
|
first_field = fields[0]
|
||||||
|
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
|
||||||
|
first_field = fields[0][0]
|
||||||
|
return first_field == '_types'
|
||||||
|
|
||||||
|
# Ensure indexes created by uniqueness constraints
|
||||||
|
for index in self._document._meta['unique_indexes']:
|
||||||
|
types_indexed = types_indexed or includes_types(index)
|
||||||
|
self._collection.ensure_index(index, unique=True,
|
||||||
|
background=background, drop_dups=drop_dups, **index_opts)
|
||||||
|
|
||||||
|
# Ensure document-defined indexes are created
|
||||||
|
if self._document._meta['index_specs']:
|
||||||
|
for spec in self._document._meta['index_specs']:
|
||||||
|
types_indexed = types_indexed or includes_types(spec['fields'])
|
||||||
|
opts = index_opts.copy()
|
||||||
|
opts['unique'] = spec.get('unique', False)
|
||||||
|
opts['sparse'] = spec.get('sparse', False)
|
||||||
|
self._collection.ensure_index(spec['fields'],
|
||||||
|
background=background, **opts)
|
||||||
|
|
||||||
|
# If _types is being used (for polymorphism), it needs an index,
|
||||||
|
# only if another index doesn't begin with _types
|
||||||
|
if index_types and '_types' in self._query and not types_indexed:
|
||||||
|
self._collection.ensure_index('_types',
|
||||||
|
background=background, **index_opts)
|
||||||
|
|
||||||
|
# Add geo indicies
|
||||||
|
for field in self._document._geo_indices():
|
||||||
|
index_spec = [(field.db_field, pymongo.GEO2D)]
|
||||||
|
self._collection.ensure_index(index_spec,
|
||||||
|
background=background, **index_opts)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _build_index_spec(cls, doc_cls, spec):
|
||||||
|
"""Build a PyMongo index spec from a MongoEngine index spec.
|
||||||
|
"""
|
||||||
|
if isinstance(spec, basestring):
|
||||||
|
spec = {'fields': [spec]}
|
||||||
|
if isinstance(spec, (list, tuple)):
|
||||||
|
spec = {'fields': spec}
|
||||||
|
|
||||||
|
index_list = []
|
||||||
|
direction = None
|
||||||
|
|
||||||
|
allow_inheritance = doc_cls._meta.get('allow_inheritance') != False
|
||||||
|
|
||||||
|
# If sparse - dont include types
|
||||||
|
use_types = allow_inheritance and not spec.get('sparse', False)
|
||||||
|
|
||||||
|
for key in spec['fields']:
|
||||||
|
# Get ASCENDING direction from +, DESCENDING from -, and GEO2D from *
|
||||||
|
direction = pymongo.ASCENDING
|
||||||
|
if key.startswith("-"):
|
||||||
|
direction = pymongo.DESCENDING
|
||||||
|
elif key.startswith("*"):
|
||||||
|
direction = pymongo.GEO2D
|
||||||
|
if key.startswith(("+", "-", "*")):
|
||||||
|
key = key[1:]
|
||||||
|
|
||||||
|
# Use real field name, do it manually because we need field
|
||||||
|
# objects for the next part (list field checking)
|
||||||
|
parts = key.split('.')
|
||||||
|
if parts in (['pk'], ['id'], ['_id']):
|
||||||
|
key = '_id'
|
||||||
|
fields = []
|
||||||
|
else:
|
||||||
|
fields = QuerySet._lookup_field(doc_cls, parts)
|
||||||
|
parts = [field if field == '_id' else field.db_field
|
||||||
|
for field in fields]
|
||||||
|
key = '.'.join(parts)
|
||||||
|
index_list.append((key, direction))
|
||||||
|
|
||||||
|
# Check if a list field is being used, don't use _types if it is
|
||||||
|
if use_types and not all(f._index_with_types for f in fields):
|
||||||
|
use_types = False
|
||||||
|
|
||||||
|
# If _types is being used, prepend it to every specified index
|
||||||
|
index_types = doc_cls._meta.get('index_types', True)
|
||||||
|
|
||||||
|
if (spec.get('types', index_types) and use_types
|
||||||
|
and direction is not pymongo.GEO2D):
|
||||||
|
index_list.insert(0, ('_types', 1))
|
||||||
|
|
||||||
|
spec['fields'] = index_list
|
||||||
|
if spec.get('sparse', False) and len(spec['fields']) > 1:
|
||||||
|
raise ValueError(
|
||||||
|
'Sparse indexes can only have one field in them. '
|
||||||
|
'See https://jira.mongodb.org/browse/SERVER-2193')
|
||||||
|
|
||||||
|
return spec
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _reset_already_indexed(cls, document=None):
|
||||||
|
"""Helper to reset already indexed, can be useful for testing purposes"""
|
||||||
|
if document:
|
||||||
|
cls.__already_indexed.discard(document)
|
||||||
|
cls.__already_indexed.clear()
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _collection(self):
|
def _collection(self):
|
||||||
"""Property that returns the collection object. This allows us to
|
"""Property that returns the collection object. This allows us to
|
||||||
perform operations only if the collection is accessed.
|
perform operations only if the collection is accessed.
|
||||||
"""
|
"""
|
||||||
if self._document not in QuerySet.__already_indexed:
|
if self._document not in QuerySet.__already_indexed:
|
||||||
|
|
||||||
# Ensure collection exists
|
# Ensure collection exists
|
||||||
db = self._document._get_db()
|
db = self._document._get_db()
|
||||||
if self._collection_obj.name not in db.collection_names():
|
if self._collection_obj.name not in db.collection_names():
|
||||||
@@ -496,52 +573,8 @@ class QuerySet(object):
|
|||||||
|
|
||||||
QuerySet.__already_indexed.add(self._document)
|
QuerySet.__already_indexed.add(self._document)
|
||||||
|
|
||||||
background = self._document._meta.get('index_background', False)
|
if self._document._meta.get('auto_create_index', True):
|
||||||
drop_dups = self._document._meta.get('index_drop_dups', False)
|
self._ensure_indexes()
|
||||||
index_opts = self._document._meta.get('index_options', {})
|
|
||||||
index_types = self._document._meta.get('index_types', True)
|
|
||||||
|
|
||||||
# determine if an index which we are creating includes
|
|
||||||
# _type as its first field; if so, we can avoid creating
|
|
||||||
# an extra index on _type, as mongodb will use the existing
|
|
||||||
# index to service queries against _type
|
|
||||||
types_indexed = False
|
|
||||||
def includes_types(fields):
|
|
||||||
first_field = None
|
|
||||||
if len(fields):
|
|
||||||
if isinstance(fields[0], basestring):
|
|
||||||
first_field = fields[0]
|
|
||||||
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
|
|
||||||
first_field = fields[0][0]
|
|
||||||
return first_field == '_types'
|
|
||||||
|
|
||||||
# Ensure indexes created by uniqueness constraints
|
|
||||||
for index in self._document._meta['unique_indexes']:
|
|
||||||
types_indexed = types_indexed or includes_types(index)
|
|
||||||
self._collection.ensure_index(index, unique=True,
|
|
||||||
background=background, drop_dups=drop_dups, **index_opts)
|
|
||||||
|
|
||||||
# Ensure document-defined indexes are created
|
|
||||||
if self._document._meta['indexes']:
|
|
||||||
for spec in self._document._meta['indexes']:
|
|
||||||
types_indexed = types_indexed or includes_types(spec['fields'])
|
|
||||||
opts = index_opts.copy()
|
|
||||||
opts['unique'] = spec.get('unique', False)
|
|
||||||
opts['sparse'] = spec.get('sparse', False)
|
|
||||||
self._collection.ensure_index(spec['fields'],
|
|
||||||
background=background, **opts)
|
|
||||||
|
|
||||||
# If _types is being used (for polymorphism), it needs an index,
|
|
||||||
# only if another index doesn't begin with _types
|
|
||||||
if index_types and '_types' in self._query and not types_indexed:
|
|
||||||
self._collection.ensure_index('_types',
|
|
||||||
background=background, **index_opts)
|
|
||||||
|
|
||||||
# Add geo indicies
|
|
||||||
for field in self._document._geo_indices():
|
|
||||||
index_spec = [(field.db_field, pymongo.GEO2D)]
|
|
||||||
self._collection.ensure_index(index_spec,
|
|
||||||
background=background, **index_opts)
|
|
||||||
|
|
||||||
return self._collection_obj
|
return self._collection_obj
|
||||||
|
|
||||||
@@ -580,7 +613,6 @@ class QuerySet(object):
|
|||||||
|
|
||||||
if self._hint != -1:
|
if self._hint != -1:
|
||||||
self._cursor_obj.hint(self._hint)
|
self._cursor_obj.hint(self._hint)
|
||||||
|
|
||||||
return self._cursor_obj
|
return self._cursor_obj
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -603,6 +635,7 @@ class QuerySet(object):
|
|||||||
"Can't use index on unsubscriptable field (%s)" % err)
|
"Can't use index on unsubscriptable field (%s)" % err)
|
||||||
fields.append(field_name)
|
fields.append(field_name)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if field is None:
|
if field is None:
|
||||||
# Look up first field from the document
|
# Look up first field from the document
|
||||||
if field_name == 'pk':
|
if field_name == 'pk':
|
||||||
@@ -611,8 +644,8 @@ class QuerySet(object):
|
|||||||
if field_name in document._fields:
|
if field_name in document._fields:
|
||||||
field = document._fields[field_name]
|
field = document._fields[field_name]
|
||||||
elif document._dynamic:
|
elif document._dynamic:
|
||||||
from base import BaseDynamicField
|
from fields import DynamicField
|
||||||
field = BaseDynamicField(db_field=field_name)
|
field = DynamicField(db_field=field_name)
|
||||||
else:
|
else:
|
||||||
raise InvalidQueryError('Cannot resolve field "%s"'
|
raise InvalidQueryError('Cannot resolve field "%s"'
|
||||||
% field_name)
|
% field_name)
|
||||||
@@ -620,8 +653,11 @@ class QuerySet(object):
|
|||||||
from mongoengine.fields import ReferenceField, GenericReferenceField
|
from mongoengine.fields import ReferenceField, GenericReferenceField
|
||||||
if isinstance(field, (ReferenceField, GenericReferenceField)):
|
if isinstance(field, (ReferenceField, GenericReferenceField)):
|
||||||
raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts))
|
raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts))
|
||||||
# Look up subfield on the previous field
|
if hasattr(getattr(field, 'field', None), 'lookup_member'):
|
||||||
new_field = field.lookup_member(field_name)
|
new_field = field.field.lookup_member(field_name)
|
||||||
|
else:
|
||||||
|
# Look up subfield on the previous field
|
||||||
|
new_field = field.lookup_member(field_name)
|
||||||
from base import ComplexBaseField
|
from base import ComplexBaseField
|
||||||
if not new_field and isinstance(field, ComplexBaseField):
|
if not new_field and isinstance(field, ComplexBaseField):
|
||||||
fields.append(field_name)
|
fields.append(field_name)
|
||||||
@@ -654,6 +690,7 @@ class QuerySet(object):
|
|||||||
custom_operators = ['match']
|
custom_operators = ['match']
|
||||||
|
|
||||||
mongo_query = {}
|
mongo_query = {}
|
||||||
|
merge_query = defaultdict(list)
|
||||||
for key, value in query.items():
|
for key, value in query.items():
|
||||||
if key == "__raw__":
|
if key == "__raw__":
|
||||||
mongo_query.update(value)
|
mongo_query.update(value)
|
||||||
@@ -680,7 +717,7 @@ class QuerySet(object):
|
|||||||
cleaned_fields = []
|
cleaned_fields = []
|
||||||
for field in fields:
|
for field in fields:
|
||||||
append_field = True
|
append_field = True
|
||||||
if isinstance(field, str):
|
if isinstance(field, basestring):
|
||||||
parts.append(field)
|
parts.append(field)
|
||||||
append_field = False
|
append_field = False
|
||||||
else:
|
else:
|
||||||
@@ -741,8 +778,23 @@ class QuerySet(object):
|
|||||||
key = '.'.join(parts)
|
key = '.'.join(parts)
|
||||||
if op is None or key not in mongo_query:
|
if op is None or key not in mongo_query:
|
||||||
mongo_query[key] = value
|
mongo_query[key] = value
|
||||||
elif key in mongo_query and isinstance(mongo_query[key], dict):
|
elif key in mongo_query:
|
||||||
mongo_query[key].update(value)
|
if key in mongo_query and isinstance(mongo_query[key], dict):
|
||||||
|
mongo_query[key].update(value)
|
||||||
|
else:
|
||||||
|
# Store for manually merging later
|
||||||
|
merge_query[key].append(value)
|
||||||
|
|
||||||
|
# The queryset has been filter in such a way we must manually merge
|
||||||
|
for k, v in merge_query.items():
|
||||||
|
merge_query[k].append(mongo_query[k])
|
||||||
|
del mongo_query[k]
|
||||||
|
if isinstance(v, list):
|
||||||
|
value = [{k:val} for val in v]
|
||||||
|
if '$and' in mongo_query.keys():
|
||||||
|
mongo_query['$and'].append(value)
|
||||||
|
else:
|
||||||
|
mongo_query['$and'] = value
|
||||||
|
|
||||||
return mongo_query
|
return mongo_query
|
||||||
|
|
||||||
@@ -781,15 +833,19 @@ class QuerySet(object):
|
|||||||
dictionary of default values for the new document may be provided as a
|
dictionary of default values for the new document may be provided as a
|
||||||
keyword argument called :attr:`defaults`.
|
keyword argument called :attr:`defaults`.
|
||||||
|
|
||||||
|
.. note:: This requires two separate operations and therefore a
|
||||||
|
race condition exists. Because there are no transactions in mongoDB
|
||||||
|
other approaches should be investigated, to ensure you don't
|
||||||
|
accidently duplicate data when using this method.
|
||||||
|
|
||||||
:param write_options: optional extra keyword arguments used if we
|
:param write_options: optional extra keyword arguments used if we
|
||||||
have to create a new document.
|
have to create a new document.
|
||||||
Passes any write_options onto :meth:`~mongoengine.Document.save`
|
Passes any write_options onto :meth:`~mongoengine.Document.save`
|
||||||
|
|
||||||
.. versionadded:: 0.3
|
|
||||||
|
|
||||||
:param auto_save: if the object is to be saved automatically if not found.
|
:param auto_save: if the object is to be saved automatically if not found.
|
||||||
|
|
||||||
.. versionadded:: 0.6
|
.. versionchanged:: 0.6 - added `auto_save`
|
||||||
|
.. versionadded:: 0.3
|
||||||
"""
|
"""
|
||||||
defaults = query.get('defaults', {})
|
defaults = query.get('defaults', {})
|
||||||
if 'defaults' in query:
|
if 'defaults' in query:
|
||||||
@@ -824,11 +880,21 @@ class QuerySet(object):
|
|||||||
result = None
|
result = None
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def insert(self, doc_or_docs, load_bulk=True):
|
def insert(self, doc_or_docs, load_bulk=True, safe=False, write_options=None):
|
||||||
"""bulk insert documents
|
"""bulk insert documents
|
||||||
|
|
||||||
|
If ``safe=True`` and the operation is unsuccessful, an
|
||||||
|
:class:`~mongoengine.OperationError` will be raised.
|
||||||
|
|
||||||
:param docs_or_doc: a document or list of documents to be inserted
|
:param docs_or_doc: a document or list of documents to be inserted
|
||||||
:param load_bulk (optional): If True returns the list of document instances
|
:param load_bulk (optional): If True returns the list of document instances
|
||||||
|
:param safe: check if the operation succeeded before returning
|
||||||
|
:param write_options: Extra keyword arguments are passed down to
|
||||||
|
:meth:`~pymongo.collection.Collection.insert`
|
||||||
|
which will be used as options for the resultant ``getLastError`` command.
|
||||||
|
For example, ``insert(..., {w: 2, fsync: True})`` will wait until at least two
|
||||||
|
servers have recorded the write and will force an fsync on each server being
|
||||||
|
written to.
|
||||||
|
|
||||||
By default returns document instances, set ``load_bulk`` to False to
|
By default returns document instances, set ``load_bulk`` to False to
|
||||||
return just ``ObjectIds``
|
return just ``ObjectIds``
|
||||||
@@ -837,6 +903,10 @@ class QuerySet(object):
|
|||||||
"""
|
"""
|
||||||
from document import Document
|
from document import Document
|
||||||
|
|
||||||
|
if not write_options:
|
||||||
|
write_options = {}
|
||||||
|
write_options.update({'safe': safe})
|
||||||
|
|
||||||
docs = doc_or_docs
|
docs = doc_or_docs
|
||||||
return_one = False
|
return_one = False
|
||||||
if isinstance(docs, Document) or issubclass(docs.__class__, Document):
|
if isinstance(docs, Document) or issubclass(docs.__class__, Document):
|
||||||
@@ -854,7 +924,16 @@ class QuerySet(object):
|
|||||||
raw.append(doc.to_mongo())
|
raw.append(doc.to_mongo())
|
||||||
|
|
||||||
signals.pre_bulk_insert.send(self._document, documents=docs)
|
signals.pre_bulk_insert.send(self._document, documents=docs)
|
||||||
ids = self._collection.insert(raw)
|
try:
|
||||||
|
ids = self._collection.insert(raw, **write_options)
|
||||||
|
except pymongo.errors.OperationFailure, err:
|
||||||
|
message = 'Could not save document (%s)'
|
||||||
|
if re.match('^E1100[01] duplicate key', unicode(err)):
|
||||||
|
# E11000 - duplicate key error index
|
||||||
|
# E11001 - duplicate key on update
|
||||||
|
message = u'Tried to save duplicate unique keys (%s)'
|
||||||
|
raise NotUniqueError(message % unicode(err))
|
||||||
|
raise OperationError(message % unicode(err))
|
||||||
|
|
||||||
if not load_bulk:
|
if not load_bulk:
|
||||||
signals.post_bulk_insert.send(
|
signals.post_bulk_insert.send(
|
||||||
@@ -907,6 +986,7 @@ class QuerySet(object):
|
|||||||
def next(self):
|
def next(self):
|
||||||
"""Wrap the result in a :class:`~mongoengine.Document` object.
|
"""Wrap the result in a :class:`~mongoengine.Document` object.
|
||||||
"""
|
"""
|
||||||
|
self._iter = True
|
||||||
try:
|
try:
|
||||||
if self._limit == 0:
|
if self._limit == 0:
|
||||||
raise StopIteration
|
raise StopIteration
|
||||||
@@ -923,6 +1003,7 @@ class QuerySet(object):
|
|||||||
|
|
||||||
.. versionadded:: 0.3
|
.. versionadded:: 0.3
|
||||||
"""
|
"""
|
||||||
|
self._iter = False
|
||||||
self._cursor.rewind()
|
self._cursor.rewind()
|
||||||
|
|
||||||
def count(self):
|
def count(self):
|
||||||
@@ -951,6 +1032,8 @@ class QuerySet(object):
|
|||||||
:class:`~bson.code.Code` or string
|
:class:`~bson.code.Code` or string
|
||||||
:param output: output collection name, if set to 'inline' will try to
|
:param output: output collection name, if set to 'inline' will try to
|
||||||
use :class:`~pymongo.collection.Collection.inline_map_reduce`
|
use :class:`~pymongo.collection.Collection.inline_map_reduce`
|
||||||
|
This can also be a dictionary containing output options
|
||||||
|
see: http://docs.mongodb.org/manual/reference/commands/#mapReduce
|
||||||
:param finalize_f: finalize function, an optional function that
|
:param finalize_f: finalize function, an optional function that
|
||||||
performs any post-reduction processing.
|
performs any post-reduction processing.
|
||||||
:param scope: values to insert into map/reduce global scope. Optional.
|
:param scope: values to insert into map/reduce global scope. Optional.
|
||||||
@@ -1102,9 +1185,10 @@ class QuerySet(object):
|
|||||||
|
|
||||||
.. versionadded:: 0.4
|
.. versionadded:: 0.4
|
||||||
.. versionchanged:: 0.5 - Fixed handling references
|
.. versionchanged:: 0.5 - Fixed handling references
|
||||||
|
.. versionchanged:: 0.6 - Improved db_field refrence handling
|
||||||
"""
|
"""
|
||||||
from dereference import DeReference
|
return self._dereference(self._cursor.distinct(field), 1,
|
||||||
return DeReference()(self._cursor.distinct(field), 1)
|
name=field, instance=self._document)
|
||||||
|
|
||||||
def only(self, *fields):
|
def only(self, *fields):
|
||||||
"""Load only a subset of this document's fields. ::
|
"""Load only a subset of this document's fields. ::
|
||||||
@@ -1259,9 +1343,10 @@ class QuerySet(object):
|
|||||||
"""
|
"""
|
||||||
doc = self._document
|
doc = self._document
|
||||||
|
|
||||||
|
delete_rules = doc._meta.get('delete_rules') or {}
|
||||||
# Check for DENY rules before actually deleting/nullifying any other
|
# Check for DENY rules before actually deleting/nullifying any other
|
||||||
# references
|
# references
|
||||||
for rule_entry in doc._meta['delete_rules']:
|
for rule_entry in delete_rules:
|
||||||
document_cls, field_name = rule_entry
|
document_cls, field_name = rule_entry
|
||||||
rule = doc._meta['delete_rules'][rule_entry]
|
rule = doc._meta['delete_rules'][rule_entry]
|
||||||
if rule == DENY and document_cls.objects(**{field_name + '__in': self}).count() > 0:
|
if rule == DENY and document_cls.objects(**{field_name + '__in': self}).count() > 0:
|
||||||
@@ -1269,15 +1354,23 @@ class QuerySet(object):
|
|||||||
(document_cls.__name__, field_name)
|
(document_cls.__name__, field_name)
|
||||||
raise OperationError(msg)
|
raise OperationError(msg)
|
||||||
|
|
||||||
for rule_entry in doc._meta['delete_rules']:
|
for rule_entry in delete_rules:
|
||||||
document_cls, field_name = rule_entry
|
document_cls, field_name = rule_entry
|
||||||
rule = doc._meta['delete_rules'][rule_entry]
|
rule = doc._meta['delete_rules'][rule_entry]
|
||||||
if rule == CASCADE:
|
if rule == CASCADE:
|
||||||
document_cls.objects(**{field_name + '__in': self}).delete(safe=safe)
|
ref_q = document_cls.objects(**{field_name + '__in': self})
|
||||||
|
ref_q_count = ref_q.count()
|
||||||
|
if (doc != document_cls and ref_q_count > 0
|
||||||
|
or (doc == document_cls and ref_q_count > 0)):
|
||||||
|
ref_q.delete(safe=safe)
|
||||||
elif rule == NULLIFY:
|
elif rule == NULLIFY:
|
||||||
document_cls.objects(**{field_name + '__in': self}).update(
|
document_cls.objects(**{field_name + '__in': self}).update(
|
||||||
safe_update=safe,
|
safe_update=safe,
|
||||||
**{'unset__%s' % field_name: 1})
|
**{'unset__%s' % field_name: 1})
|
||||||
|
elif rule == PULL:
|
||||||
|
document_cls.objects(**{field_name + '__in': self}).update(
|
||||||
|
safe_update=safe,
|
||||||
|
**{'pull_all__%s' % field_name: self})
|
||||||
|
|
||||||
self._collection.remove(self._query, safe=safe)
|
self._collection.remove(self._query, safe=safe)
|
||||||
|
|
||||||
@@ -1318,7 +1411,7 @@ class QuerySet(object):
|
|||||||
cleaned_fields = []
|
cleaned_fields = []
|
||||||
for field in fields:
|
for field in fields:
|
||||||
append_field = True
|
append_field = True
|
||||||
if isinstance(field, str):
|
if isinstance(field, basestring):
|
||||||
# Convert the S operator to $
|
# Convert the S operator to $
|
||||||
if field == 'S':
|
if field == 'S':
|
||||||
field = '$'
|
field = '$'
|
||||||
@@ -1332,20 +1425,36 @@ class QuerySet(object):
|
|||||||
# Convert value to proper value
|
# Convert value to proper value
|
||||||
field = cleaned_fields[-1]
|
field = cleaned_fields[-1]
|
||||||
|
|
||||||
if op in (None, 'set', 'push', 'pull', 'addToSet'):
|
if op in (None, 'set', 'push', 'pull'):
|
||||||
if field.required or value is not None:
|
if field.required or value is not None:
|
||||||
value = field.prepare_query_value(op, value)
|
value = field.prepare_query_value(op, value)
|
||||||
elif op in ('pushAll', 'pullAll'):
|
elif op in ('pushAll', 'pullAll'):
|
||||||
value = [field.prepare_query_value(op, v) for v in value]
|
value = [field.prepare_query_value(op, v) for v in value]
|
||||||
|
elif op == 'addToSet':
|
||||||
|
if isinstance(value, (list, tuple, set)):
|
||||||
|
value = [field.prepare_query_value(op, v) for v in value]
|
||||||
|
elif field.required or value is not None:
|
||||||
|
value = field.prepare_query_value(op, value)
|
||||||
|
|
||||||
key = '.'.join(parts)
|
key = '.'.join(parts)
|
||||||
|
|
||||||
if not op:
|
if not op:
|
||||||
raise InvalidQueryError("Updates must supply an operation eg: set__FIELD=value")
|
raise InvalidQueryError("Updates must supply an operation eg: set__FIELD=value")
|
||||||
|
|
||||||
if op:
|
if 'pull' in op and '.' in key:
|
||||||
|
# Dot operators don't work on pull operations
|
||||||
|
# it uses nested dict syntax
|
||||||
|
if op == 'pullAll':
|
||||||
|
raise InvalidQueryError("pullAll operations only support a single field depth")
|
||||||
|
|
||||||
|
parts.reverse()
|
||||||
|
for key in parts:
|
||||||
|
value = {key: value}
|
||||||
|
elif op == 'addToSet' and isinstance(value, list):
|
||||||
|
value = {key: {"$each": value}}
|
||||||
|
else:
|
||||||
value = {key: value}
|
value = {key: value}
|
||||||
key = '$' + op
|
key = '$' + op
|
||||||
|
|
||||||
if key not in mongo_update:
|
if key not in mongo_update:
|
||||||
mongo_update[key] = value
|
mongo_update[key] = value
|
||||||
@@ -1435,8 +1544,6 @@ class QuerySet(object):
|
|||||||
def lookup(obj, name):
|
def lookup(obj, name):
|
||||||
chunks = name.split('__')
|
chunks = name.split('__')
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
if hasattr(obj, '_db_field_map'):
|
|
||||||
chunk = obj._db_field_map.get(chunk, chunk)
|
|
||||||
obj = getattr(obj, chunk)
|
obj = getattr(obj, chunk)
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
@@ -1648,10 +1755,11 @@ class QuerySet(object):
|
|||||||
def _item_frequencies_map_reduce(self, field, normalize=False):
|
def _item_frequencies_map_reduce(self, field, normalize=False):
|
||||||
map_func = """
|
map_func = """
|
||||||
function() {
|
function() {
|
||||||
path = '{{~%(field)s}}'.split('.');
|
var path = '{{~%(field)s}}'.split('.');
|
||||||
field = this;
|
var field = this;
|
||||||
|
|
||||||
for (p in path) {
|
for (p in path) {
|
||||||
if (field)
|
if (typeof field != 'undefined')
|
||||||
field = field[path[p]];
|
field = field[path[p]];
|
||||||
else
|
else
|
||||||
break;
|
break;
|
||||||
@@ -1660,7 +1768,7 @@ class QuerySet(object):
|
|||||||
field.forEach(function(item) {
|
field.forEach(function(item) {
|
||||||
emit(item, 1);
|
emit(item, 1);
|
||||||
});
|
});
|
||||||
} else if (field) {
|
} else if (typeof field != 'undefined') {
|
||||||
emit(field, 1);
|
emit(field, 1);
|
||||||
} else {
|
} else {
|
||||||
emit(null, 1);
|
emit(null, 1);
|
||||||
@@ -1684,12 +1792,12 @@ class QuerySet(object):
|
|||||||
if isinstance(key, float):
|
if isinstance(key, float):
|
||||||
if int(key) == key:
|
if int(key) == key:
|
||||||
key = int(key)
|
key = int(key)
|
||||||
key = str(key)
|
frequencies[key] = int(f.value)
|
||||||
frequencies[key] = f.value
|
|
||||||
|
|
||||||
if normalize:
|
if normalize:
|
||||||
count = sum(frequencies.values())
|
count = sum(frequencies.values())
|
||||||
frequencies = dict([(k, v / count) for k, v in frequencies.items()])
|
frequencies = dict([(k, float(v) / count)
|
||||||
|
for k, v in frequencies.items()])
|
||||||
|
|
||||||
return frequencies
|
return frequencies
|
||||||
|
|
||||||
@@ -1697,31 +1805,28 @@ class QuerySet(object):
|
|||||||
"""Uses exec_js to execute"""
|
"""Uses exec_js to execute"""
|
||||||
freq_func = """
|
freq_func = """
|
||||||
function(path) {
|
function(path) {
|
||||||
path = path.split('.');
|
var path = path.split('.');
|
||||||
|
|
||||||
if (options.normalize) {
|
var total = 0.0;
|
||||||
var total = 0.0;
|
db[collection].find(query).forEach(function(doc) {
|
||||||
db[collection].find(query).forEach(function(doc) {
|
var field = doc;
|
||||||
field = doc;
|
for (p in path) {
|
||||||
for (p in path) {
|
if (field)
|
||||||
if (field)
|
field = field[path[p]];
|
||||||
field = field[path[p]];
|
else
|
||||||
else
|
break;
|
||||||
break;
|
}
|
||||||
}
|
if (field && field.constructor == Array) {
|
||||||
if (field && field.constructor == Array) {
|
total += field.length;
|
||||||
total += field.length;
|
} else {
|
||||||
} else {
|
total++;
|
||||||
total++;
|
}
|
||||||
}
|
});
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
var frequencies = {};
|
var frequencies = {};
|
||||||
|
var types = {};
|
||||||
var inc = 1.0;
|
var inc = 1.0;
|
||||||
if (options.normalize) {
|
|
||||||
inc /= total;
|
|
||||||
}
|
|
||||||
db[collection].find(query).forEach(function(doc) {
|
db[collection].find(query).forEach(function(doc) {
|
||||||
field = doc;
|
field = doc;
|
||||||
for (p in path) {
|
for (p in path) {
|
||||||
@@ -1736,34 +1841,48 @@ class QuerySet(object):
|
|||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
var item = field;
|
var item = field;
|
||||||
|
types[item] = item;
|
||||||
frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]);
|
frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
return frequencies;
|
return [total, frequencies, types];
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
data = self.exec_js(freq_func, field, normalize=normalize)
|
total, data, types = self.exec_js(freq_func, field)
|
||||||
if 'undefined' in data:
|
values = dict([(types.get(k), int(v)) for k, v in data.iteritems()])
|
||||||
data[None] = data['undefined']
|
|
||||||
del(data['undefined'])
|
if normalize:
|
||||||
return data
|
values = dict([(k, float(v) / total) for k, v in values.items()])
|
||||||
|
|
||||||
|
frequencies = {}
|
||||||
|
for k, v in values.iteritems():
|
||||||
|
if isinstance(k, float):
|
||||||
|
if int(k) == k:
|
||||||
|
k = int(k)
|
||||||
|
|
||||||
|
frequencies[k] = v
|
||||||
|
|
||||||
|
return frequencies
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
limit = REPR_OUTPUT_SIZE + 1
|
"""Provides the string representation of the QuerySet
|
||||||
start = (0 if self._skip is None else self._skip)
|
|
||||||
if self._limit is None:
|
.. versionchanged:: 0.6.13 Now doesnt modify the cursor
|
||||||
stop = start + limit
|
"""
|
||||||
if self._limit is not None:
|
|
||||||
if self._limit - start > limit:
|
if self._iter:
|
||||||
stop = start + limit
|
return '.. queryset mid-iteration ..'
|
||||||
else:
|
|
||||||
stop = self._limit
|
data = []
|
||||||
try:
|
for i in xrange(REPR_OUTPUT_SIZE + 1):
|
||||||
data = list(self[start:stop])
|
try:
|
||||||
except pymongo.errors.InvalidOperation:
|
data.append(self.next())
|
||||||
return ".. queryset mid-iteration .."
|
except StopIteration:
|
||||||
|
break
|
||||||
if len(data) > REPR_OUTPUT_SIZE:
|
if len(data) > REPR_OUTPUT_SIZE:
|
||||||
data[-1] = "...(remaining elements truncated)..."
|
data[-1] = "...(remaining elements truncated)..."
|
||||||
|
|
||||||
|
self.rewind()
|
||||||
return repr(data)
|
return repr(data)
|
||||||
|
|
||||||
def select_related(self, max_depth=1):
|
def select_related(self, max_depth=1):
|
||||||
@@ -1772,13 +1891,30 @@ class QuerySet(object):
|
|||||||
|
|
||||||
.. versionadded:: 0.5
|
.. versionadded:: 0.5
|
||||||
"""
|
"""
|
||||||
from dereference import DeReference
|
|
||||||
# Make select related work the same for querysets
|
# Make select related work the same for querysets
|
||||||
max_depth += 1
|
max_depth += 1
|
||||||
return DeReference()(self, max_depth=max_depth)
|
return self._dereference(self, max_depth=max_depth)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _dereference(self):
|
||||||
|
if not self.__dereference:
|
||||||
|
from dereference import DeReference
|
||||||
|
self.__dereference = DeReference() # Cached
|
||||||
|
return self.__dereference
|
||||||
|
|
||||||
|
|
||||||
class QuerySetManager(object):
|
class QuerySetManager(object):
|
||||||
|
"""
|
||||||
|
The default QuerySet Manager.
|
||||||
|
|
||||||
|
Custom QuerySet Manager functions can extend this class and users can
|
||||||
|
add extra queryset functionality. Any custom manager methods must accept a
|
||||||
|
:class:`~mongoengine.Document` class as its first argument, and a
|
||||||
|
:class:`~mongoengine.queryset.QuerySet` as its second argument.
|
||||||
|
|
||||||
|
The method function should return a :class:`~mongoengine.queryset.QuerySet`
|
||||||
|
, probably the same one that was passed in, but modified in some way.
|
||||||
|
"""
|
||||||
|
|
||||||
get_queryset = None
|
get_queryset = None
|
||||||
|
|
||||||
@@ -1796,13 +1932,16 @@ class QuerySetManager(object):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
# owner is the document that contains the QuerySetManager
|
# owner is the document that contains the QuerySetManager
|
||||||
queryset_class = owner._meta['queryset_class'] or QuerySet
|
queryset_class = owner._meta.get('queryset_class') or QuerySet
|
||||||
queryset = queryset_class(owner, owner._get_collection())
|
queryset = queryset_class(owner, owner._get_collection())
|
||||||
if self.get_queryset:
|
if self.get_queryset:
|
||||||
if self.get_queryset.func_code.co_argcount == 1:
|
arg_count = self.get_queryset.func_code.co_argcount
|
||||||
|
if arg_count == 1:
|
||||||
queryset = self.get_queryset(queryset)
|
queryset = self.get_queryset(queryset)
|
||||||
else:
|
elif arg_count == 2:
|
||||||
queryset = self.get_queryset(owner, queryset)
|
queryset = self.get_queryset(owner, queryset)
|
||||||
|
else:
|
||||||
|
queryset = partial(self.get_queryset, owner, queryset)
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
|
@@ -5,7 +5,7 @@
|
|||||||
%define srcname mongoengine
|
%define srcname mongoengine
|
||||||
|
|
||||||
Name: python-%{srcname}
|
Name: python-%{srcname}
|
||||||
Version: 0.6.3
|
Version: 0.7rc1
|
||||||
Release: 1%{?dist}
|
Release: 1%{?dist}
|
||||||
Summary: A Python Document-Object Mapper for working with MongoDB
|
Summary: A Python Document-Object Mapper for working with MongoDB
|
||||||
|
|
||||||
@@ -51,12 +51,4 @@ rm -rf $RPM_BUILD_ROOT
|
|||||||
# %{python_sitearch}/*
|
# %{python_sitearch}/*
|
||||||
|
|
||||||
%changelog
|
%changelog
|
||||||
* Mon Mar 05 2012 Ross Lawley <ross.lawley@gmail.com> 0.6
|
* See: http://readthedocs.org/docs/mongoengine-odm/en/latest/changelog.html
|
||||||
- 0.6 released
|
|
||||||
* Thu Oct 27 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.3-1
|
|
||||||
- Update to latest dev version
|
|
||||||
- Add PIL dependency for ImageField
|
|
||||||
* Wed Oct 12 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.2-1
|
|
||||||
- Update version
|
|
||||||
* Fri Sep 23 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.0-1
|
|
||||||
- Initial version
|
|
11
setup.cfg
Normal file
11
setup.cfg
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
[nosetests]
|
||||||
|
verbosity = 3
|
||||||
|
detailed-errors = 1
|
||||||
|
#with-coverage = 1
|
||||||
|
#cover-erase = 1
|
||||||
|
#cover-html = 1
|
||||||
|
#cover-html-dir = ../htmlcov
|
||||||
|
#cover-package = mongoengine
|
||||||
|
py3where = build
|
||||||
|
where = tests
|
||||||
|
#tests = test_bugfix.py
|
47
setup.py
47
setup.py
@@ -1,5 +1,12 @@
|
|||||||
from setuptools import setup, find_packages
|
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
from setuptools import setup, find_packages
|
||||||
|
|
||||||
|
# Hack to silence atexit traceback in newer python versions
|
||||||
|
try:
|
||||||
|
import multiprocessing
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
DESCRIPTION = "A Python Document-Object Mapper for working with MongoDB"
|
DESCRIPTION = "A Python Document-Object Mapper for working with MongoDB"
|
||||||
|
|
||||||
@@ -9,19 +16,20 @@ try:
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def get_version(version_tuple):
|
def get_version(version_tuple):
|
||||||
version = '%s.%s' % (version_tuple[0], version_tuple[1])
|
if not isinstance(version_tuple[-1], int):
|
||||||
if version_tuple[2]:
|
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
|
||||||
version = '%s.%s' % (version, version_tuple[2])
|
return '.'.join(map(str, version_tuple))
|
||||||
return version
|
|
||||||
|
|
||||||
# Dirty hack to get version number from monogengine/__init__.py - we can't
|
# Dirty hack to get version number from monogengine/__init__.py - we can't
|
||||||
# import it as it depends on PyMongo and PyMongo isn't installed until this
|
# import it as it depends on PyMongo and PyMongo isn't installed until this
|
||||||
# file is read
|
# file is read
|
||||||
init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
|
init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py')
|
||||||
version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0]
|
version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0]
|
||||||
|
|
||||||
VERSION = get_version(eval(version_line.split('=')[-1]))
|
VERSION = get_version(eval(version_line.split('=')[-1]))
|
||||||
print VERSION
|
print(VERSION)
|
||||||
|
|
||||||
CLASSIFIERS = [
|
CLASSIFIERS = [
|
||||||
'Development Status :: 4 - Beta',
|
'Development Status :: 4 - Beta',
|
||||||
@@ -29,13 +37,32 @@ CLASSIFIERS = [
|
|||||||
'License :: OSI Approved :: MIT License',
|
'License :: OSI Approved :: MIT License',
|
||||||
'Operating System :: OS Independent',
|
'Operating System :: OS Independent',
|
||||||
'Programming Language :: Python',
|
'Programming Language :: Python',
|
||||||
|
"Programming Language :: Python :: 2",
|
||||||
|
"Programming Language :: Python :: 2.5",
|
||||||
|
"Programming Language :: Python :: 2.6",
|
||||||
|
"Programming Language :: Python :: 2.7",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.1",
|
||||||
|
"Programming Language :: Python :: 3.2",
|
||||||
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
'Topic :: Database',
|
'Topic :: Database',
|
||||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
extra_opts = {}
|
||||||
|
if sys.version_info[0] == 3:
|
||||||
|
extra_opts['use_2to3'] = True
|
||||||
|
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker']
|
||||||
|
extra_opts['packages'] = find_packages(exclude=('tests',))
|
||||||
|
if "test" in sys.argv or "nosetests" in sys.argv:
|
||||||
|
extra_opts['packages'].append("tests")
|
||||||
|
extra_opts['package_data'] = {"tests": ["mongoengine.png"]}
|
||||||
|
else:
|
||||||
|
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL']
|
||||||
|
extra_opts['packages'] = find_packages(exclude=('tests',))
|
||||||
|
|
||||||
setup(name='mongoengine',
|
setup(name='mongoengine',
|
||||||
version=VERSION,
|
version=VERSION,
|
||||||
packages=find_packages(),
|
|
||||||
author='Harry Marr',
|
author='Harry Marr',
|
||||||
author_email='harry.marr@{nospam}gmail.com',
|
author_email='harry.marr@{nospam}gmail.com',
|
||||||
maintainer="Ross Lawley",
|
maintainer="Ross Lawley",
|
||||||
@@ -48,6 +75,6 @@ setup(name='mongoengine',
|
|||||||
platforms=['any'],
|
platforms=['any'],
|
||||||
classifiers=CLASSIFIERS,
|
classifiers=CLASSIFIERS,
|
||||||
install_requires=['pymongo'],
|
install_requires=['pymongo'],
|
||||||
test_suite='tests',
|
test_suite='nose.collector',
|
||||||
tests_require=['blinker', 'django>=1.3', 'PIL']
|
**extra_opts
|
||||||
)
|
)
|
||||||
|
96
tests/test_all_warnings.py
Normal file
96
tests/test_all_warnings.py
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
import unittest
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.tests import query_counter
|
||||||
|
|
||||||
|
|
||||||
|
class TestWarnings(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
conn = connect(db='mongoenginetest')
|
||||||
|
self.warning_list = []
|
||||||
|
self.showwarning_default = warnings.showwarning
|
||||||
|
warnings.showwarning = self.append_to_warning_list
|
||||||
|
|
||||||
|
def append_to_warning_list(self, message, category, *args):
|
||||||
|
self.warning_list.append({"message": message,
|
||||||
|
"category": category})
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
# restore default handling of warnings
|
||||||
|
warnings.showwarning = self.showwarning_default
|
||||||
|
|
||||||
|
def test_allow_inheritance_future_warning(self):
|
||||||
|
"""Add FutureWarning for future allow_inhertiance default change.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class SimpleBase(Document):
|
||||||
|
a = IntField()
|
||||||
|
|
||||||
|
class InheritedClass(SimpleBase):
|
||||||
|
b = IntField()
|
||||||
|
|
||||||
|
InheritedClass()
|
||||||
|
self.assertEqual(len(self.warning_list), 1)
|
||||||
|
warning = self.warning_list[0]
|
||||||
|
self.assertEqual(FutureWarning, warning["category"])
|
||||||
|
self.assertTrue("InheritedClass" in str(warning["message"]))
|
||||||
|
|
||||||
|
def test_dbref_reference_field_future_warning(self):
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
parent = ReferenceField('self')
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
p1 = Person()
|
||||||
|
p1.parent = None
|
||||||
|
p1.save()
|
||||||
|
|
||||||
|
p2 = Person(name="Wilson Jr")
|
||||||
|
p2.parent = p1
|
||||||
|
p2.save(cascade=False)
|
||||||
|
|
||||||
|
self.assertEqual(len(self.warning_list), 1)
|
||||||
|
warning = self.warning_list[0]
|
||||||
|
self.assertEqual(FutureWarning, warning["category"])
|
||||||
|
self.assertTrue("ReferenceFields will default to using ObjectId"
|
||||||
|
in str(warning["message"]))
|
||||||
|
|
||||||
|
def test_document_save_cascade_future_warning(self):
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
parent = ReferenceField('self')
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
p1 = Person(name="Wilson Snr")
|
||||||
|
p1.parent = None
|
||||||
|
p1.save()
|
||||||
|
|
||||||
|
p2 = Person(name="Wilson Jr")
|
||||||
|
p2.parent = p1
|
||||||
|
p2.parent.name = "Poppa Wilson"
|
||||||
|
p2.save()
|
||||||
|
|
||||||
|
self.assertEqual(len(self.warning_list), 1)
|
||||||
|
warning = self.warning_list[0]
|
||||||
|
self.assertEqual(FutureWarning, warning["category"])
|
||||||
|
self.assertTrue("Cascading saves will default to off in 0.8"
|
||||||
|
in str(warning["message"]))
|
||||||
|
|
||||||
|
def test_document_collection_syntax_warning(self):
|
||||||
|
|
||||||
|
class NonAbstractBase(Document):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class InheritedDocumentFailTest(NonAbstractBase):
|
||||||
|
meta = {'collection': 'fail'}
|
||||||
|
|
||||||
|
warning = self.warning_list[0]
|
||||||
|
self.assertEqual(SyntaxWarning, warning["category"])
|
||||||
|
self.assertEqual('non_abstract_base',
|
||||||
|
InheritedDocumentFailTest._get_collection_name())
|
@@ -1,8 +1,11 @@
|
|||||||
import unittest
|
import datetime
|
||||||
import pymongo
|
import pymongo
|
||||||
|
import unittest
|
||||||
|
|
||||||
import mongoengine.connection
|
import mongoengine.connection
|
||||||
|
|
||||||
|
from bson.tz_util import utc
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.connection import get_db, get_connection, ConnectionError
|
from mongoengine.connection import get_db, get_connection, ConnectionError
|
||||||
|
|
||||||
@@ -65,6 +68,31 @@ class ConnectionTest(unittest.TestCase):
|
|||||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||||
self.assertEqual(db.name, 'mongoenginetest2')
|
self.assertEqual(db.name, 'mongoenginetest2')
|
||||||
|
|
||||||
|
def test_connection_kwargs(self):
|
||||||
|
"""Ensure that connection kwargs get passed to pymongo.
|
||||||
|
"""
|
||||||
|
connect('mongoenginetest', alias='t1', tz_aware=True)
|
||||||
|
conn = get_connection('t1')
|
||||||
|
|
||||||
|
self.assertTrue(conn.tz_aware)
|
||||||
|
|
||||||
|
connect('mongoenginetest2', alias='t2')
|
||||||
|
conn = get_connection('t2')
|
||||||
|
self.assertFalse(conn.tz_aware)
|
||||||
|
|
||||||
|
def test_datetime(self):
|
||||||
|
connect('mongoenginetest', tz_aware=True)
|
||||||
|
d = datetime.datetime(2010, 5, 5, tzinfo=utc)
|
||||||
|
|
||||||
|
class DateDoc(Document):
|
||||||
|
the_date = DateTimeField(required=True)
|
||||||
|
|
||||||
|
DateDoc.drop_collection()
|
||||||
|
DateDoc(the_date=d).save()
|
||||||
|
|
||||||
|
date_doc = DateDoc.objects.first()
|
||||||
|
self.assertEqual(d, date_doc.the_date)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
@@ -1,5 +1,8 @@
|
|||||||
|
from __future__ import with_statement
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
|
from bson import DBRef
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.connection import get_db
|
from mongoengine.connection import get_db
|
||||||
from mongoengine.tests import query_counter
|
from mongoengine.tests import query_counter
|
||||||
@@ -63,6 +66,130 @@ class FieldTest(unittest.TestCase):
|
|||||||
User.drop_collection()
|
User.drop_collection()
|
||||||
Group.drop_collection()
|
Group.drop_collection()
|
||||||
|
|
||||||
|
def test_list_item_dereference_dref_false(self):
|
||||||
|
"""Ensure that DBRef items in ListFields are dereferenced.
|
||||||
|
"""
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
members = ListField(ReferenceField(User, dbref=False))
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
for i in xrange(1, 51):
|
||||||
|
user = User(name='user %s' % i)
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
group = Group(members=User.objects)
|
||||||
|
group.save()
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first()
|
||||||
|
self.assertEqual(q, 1)
|
||||||
|
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
# Document select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
group_obj = Group.objects.first().select_related()
|
||||||
|
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
# Queryset select_related
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
group_objs = Group.objects.select_related()
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
for group_obj in group_objs:
|
||||||
|
[m for m in group_obj.members]
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
def test_handle_old_style_references(self):
|
||||||
|
"""Ensure that DBRef items in ListFields are dereferenced.
|
||||||
|
"""
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
members = ListField(ReferenceField(User, dbref=True))
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
for i in xrange(1, 26):
|
||||||
|
user = User(name='user %s' % i)
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
group = Group(members=User.objects)
|
||||||
|
group.save()
|
||||||
|
|
||||||
|
group = Group._get_collection().find_one()
|
||||||
|
|
||||||
|
# Update the model to change the reference
|
||||||
|
class Group(Document):
|
||||||
|
members = ListField(ReferenceField(User, dbref=False))
|
||||||
|
|
||||||
|
group = Group.objects.first()
|
||||||
|
group.members.append(User(name="String!").save())
|
||||||
|
group.save()
|
||||||
|
|
||||||
|
group = Group.objects.first()
|
||||||
|
self.assertEqual(group.members[0].name, 'user 1')
|
||||||
|
self.assertEqual(group.members[-1].name, 'String!')
|
||||||
|
|
||||||
|
def test_migrate_references(self):
|
||||||
|
"""Example of migrating ReferenceField storage
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Create some sample data
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Group(Document):
|
||||||
|
author = ReferenceField(User, dbref=True)
|
||||||
|
members = ListField(ReferenceField(User, dbref=True))
|
||||||
|
|
||||||
|
User.drop_collection()
|
||||||
|
Group.drop_collection()
|
||||||
|
|
||||||
|
user = User(name="Ross").save()
|
||||||
|
group = Group(author=user, members=[user]).save()
|
||||||
|
|
||||||
|
raw_data = Group._get_collection().find_one()
|
||||||
|
self.assertTrue(isinstance(raw_data['author'], DBRef))
|
||||||
|
self.assertTrue(isinstance(raw_data['members'][0], DBRef))
|
||||||
|
|
||||||
|
# Migrate the model definition
|
||||||
|
class Group(Document):
|
||||||
|
author = ReferenceField(User, dbref=False)
|
||||||
|
members = ListField(ReferenceField(User, dbref=False))
|
||||||
|
|
||||||
|
# Migrate the data
|
||||||
|
for g in Group.objects():
|
||||||
|
g.author = g.author
|
||||||
|
g.members = g.members
|
||||||
|
g.save()
|
||||||
|
|
||||||
|
group = Group.objects.first()
|
||||||
|
self.assertEqual(group.author, user)
|
||||||
|
self.assertEqual(group.members, [user])
|
||||||
|
|
||||||
|
raw_data = Group._get_collection().find_one()
|
||||||
|
self.assertTrue(isinstance(raw_data['author'], basestring))
|
||||||
|
self.assertTrue(isinstance(raw_data['members'][0], basestring))
|
||||||
|
|
||||||
def test_recursive_reference(self):
|
def test_recursive_reference(self):
|
||||||
"""Ensure that ReferenceFields can reference their own documents.
|
"""Ensure that ReferenceFields can reference their own documents.
|
||||||
"""
|
"""
|
||||||
@@ -109,10 +236,10 @@ class FieldTest(unittest.TestCase):
|
|||||||
peter = Employee.objects.with_id(peter.id).select_related()
|
peter = Employee.objects.with_id(peter.id).select_related()
|
||||||
self.assertEqual(q, 2)
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
self.assertEquals(peter.boss, bill)
|
self.assertEqual(peter.boss, bill)
|
||||||
self.assertEqual(q, 2)
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
self.assertEquals(peter.friends, friends)
|
self.assertEqual(peter.friends, friends)
|
||||||
self.assertEqual(q, 2)
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
# Queryset select_related
|
# Queryset select_related
|
||||||
@@ -123,10 +250,10 @@ class FieldTest(unittest.TestCase):
|
|||||||
self.assertEqual(q, 2)
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
for employee in employees:
|
for employee in employees:
|
||||||
self.assertEquals(employee.boss, bill)
|
self.assertEqual(employee.boss, bill)
|
||||||
self.assertEqual(q, 2)
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
self.assertEquals(employee.friends, friends)
|
self.assertEqual(employee.friends, friends)
|
||||||
self.assertEqual(q, 2)
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
def test_circular_reference(self):
|
def test_circular_reference(self):
|
||||||
@@ -160,7 +287,7 @@ class FieldTest(unittest.TestCase):
|
|||||||
daughter.relations.append(self_rel)
|
daughter.relations.append(self_rel)
|
||||||
daughter.save()
|
daughter.save()
|
||||||
|
|
||||||
self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
|
self.assertEqual("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
|
||||||
|
|
||||||
def test_circular_reference_on_self(self):
|
def test_circular_reference_on_self(self):
|
||||||
"""Ensure you can handle circular references
|
"""Ensure you can handle circular references
|
||||||
@@ -186,7 +313,7 @@ class FieldTest(unittest.TestCase):
|
|||||||
daughter.relations.append(daughter)
|
daughter.relations.append(daughter)
|
||||||
daughter.save()
|
daughter.save()
|
||||||
|
|
||||||
self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
|
self.assertEqual("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
|
||||||
|
|
||||||
def test_circular_tree_reference(self):
|
def test_circular_tree_reference(self):
|
||||||
"""Ensure you can handle circular references with more than one level
|
"""Ensure you can handle circular references with more than one level
|
||||||
@@ -228,7 +355,7 @@ class FieldTest(unittest.TestCase):
|
|||||||
anna.other.name = "Anna's friends"
|
anna.other.name = "Anna's friends"
|
||||||
anna.save()
|
anna.save()
|
||||||
|
|
||||||
self.assertEquals(
|
self.assertEqual(
|
||||||
"[<Person: Paul>, <Person: Maria>, <Person: Julia>, <Person: Anna>]",
|
"[<Person: Paul>, <Person: Maria>, <Person: Julia>, <Person: Anna>]",
|
||||||
"%s" % Person.objects()
|
"%s" % Person.objects()
|
||||||
)
|
)
|
||||||
@@ -781,8 +908,8 @@ class FieldTest(unittest.TestCase):
|
|||||||
root.save()
|
root.save()
|
||||||
|
|
||||||
root = root.reload()
|
root = root.reload()
|
||||||
self.assertEquals(root.children, [company])
|
self.assertEqual(root.children, [company])
|
||||||
self.assertEquals(company.parents, [root])
|
self.assertEqual(company.parents, [root])
|
||||||
|
|
||||||
def test_dict_in_dbref_instance(self):
|
def test_dict_in_dbref_instance(self):
|
||||||
|
|
||||||
@@ -808,5 +935,58 @@ class FieldTest(unittest.TestCase):
|
|||||||
room_101.save()
|
room_101.save()
|
||||||
|
|
||||||
room = Room.objects.first().select_related()
|
room = Room.objects.first().select_related()
|
||||||
self.assertEquals(room.staffs_with_position[0]['staff'], sarah)
|
self.assertEqual(room.staffs_with_position[0]['staff'], sarah)
|
||||||
self.assertEquals(room.staffs_with_position[1]['staff'], bob)
|
self.assertEqual(room.staffs_with_position[1]['staff'], bob)
|
||||||
|
|
||||||
|
def test_document_reload_no_inheritance(self):
|
||||||
|
class Foo(Document):
|
||||||
|
meta = {'allow_inheritance': False}
|
||||||
|
bar = ReferenceField('Bar')
|
||||||
|
baz = ReferenceField('Baz')
|
||||||
|
|
||||||
|
class Bar(Document):
|
||||||
|
meta = {'allow_inheritance': False}
|
||||||
|
msg = StringField(required=True, default='Blammo!')
|
||||||
|
|
||||||
|
class Baz(Document):
|
||||||
|
meta = {'allow_inheritance': False}
|
||||||
|
msg = StringField(required=True, default='Kaboom!')
|
||||||
|
|
||||||
|
Foo.drop_collection()
|
||||||
|
Bar.drop_collection()
|
||||||
|
Baz.drop_collection()
|
||||||
|
|
||||||
|
bar = Bar()
|
||||||
|
bar.save()
|
||||||
|
baz = Baz()
|
||||||
|
baz.save()
|
||||||
|
foo = Foo()
|
||||||
|
foo.bar = bar
|
||||||
|
foo.baz = baz
|
||||||
|
foo.save()
|
||||||
|
foo.reload()
|
||||||
|
|
||||||
|
self.assertEqual(type(foo.bar), Bar)
|
||||||
|
self.assertEqual(type(foo.baz), Baz)
|
||||||
|
|
||||||
|
def test_list_lookup_not_checked_in_map(self):
|
||||||
|
"""Ensure we dereference list data correctly
|
||||||
|
"""
|
||||||
|
class Comment(Document):
|
||||||
|
id = IntField(primary_key=True)
|
||||||
|
text = StringField()
|
||||||
|
|
||||||
|
class Message(Document):
|
||||||
|
id = IntField(primary_key=True)
|
||||||
|
comments = ListField(ReferenceField(Comment))
|
||||||
|
|
||||||
|
Comment.drop_collection()
|
||||||
|
Message.drop_collection()
|
||||||
|
|
||||||
|
c1 = Comment(id=0, text='zero').save()
|
||||||
|
c2 = Comment(id=1, text='one').save()
|
||||||
|
Message(id=1, comments=[c1, c2]).save()
|
||||||
|
|
||||||
|
msg = Message.objects.get(id=1)
|
||||||
|
self.assertEqual(0, msg.comments[0].id)
|
||||||
|
self.assertEqual(1, msg.comments[1].id)
|
@@ -1,20 +1,34 @@
|
|||||||
# -*- coding: utf-8 -*-
|
from __future__ import with_statement
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
|
from nose.plugins.skip import SkipTest
|
||||||
|
from mongoengine.python_support import PY3
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.django.shortcuts import get_document_or_404
|
|
||||||
|
|
||||||
from django.http import Http404
|
try:
|
||||||
from django.template import Context, Template
|
from mongoengine.django.shortcuts import get_document_or_404
|
||||||
from django.conf import settings
|
|
||||||
from django.core.paginator import Paginator
|
from django.http import Http404
|
||||||
|
from django.template import Context, Template
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.paginator import Paginator
|
||||||
|
|
||||||
|
settings.configure()
|
||||||
|
|
||||||
|
from django.contrib.sessions.tests import SessionTestsMixin
|
||||||
|
from mongoengine.django.sessions import SessionStore, MongoSession
|
||||||
|
except Exception, err:
|
||||||
|
if PY3:
|
||||||
|
SessionTestsMixin = type # dummy value so no error
|
||||||
|
SessionStore = None # dummy value so no error
|
||||||
|
else:
|
||||||
|
raise err
|
||||||
|
|
||||||
settings.configure()
|
|
||||||
|
|
||||||
class QuerySetTest(unittest.TestCase):
|
class QuerySetTest(unittest.TestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('django does not have Python 3 support')
|
||||||
connect(db='mongoenginetest')
|
connect(db='mongoenginetest')
|
||||||
|
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
@@ -88,3 +102,21 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
end = p * 2
|
end = p * 2
|
||||||
start = end - 1
|
start = end - 1
|
||||||
self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end))
|
self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase):
|
||||||
|
backend = SessionStore
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
if PY3:
|
||||||
|
raise SkipTest('django does not have Python 3 support')
|
||||||
|
connect(db='mongoenginetest')
|
||||||
|
MongoSession.drop_collection()
|
||||||
|
super(MongoDBSessionTest, self).setUp()
|
||||||
|
|
||||||
|
def test_first_save(self):
|
||||||
|
session = SessionStore()
|
||||||
|
session['test'] = True
|
||||||
|
session.save()
|
||||||
|
self.assertTrue('test' in session)
|
File diff suppressed because it is too large
Load Diff
@@ -25,14 +25,14 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
p.name = "James"
|
p.name = "James"
|
||||||
p.age = 34
|
p.age = 34
|
||||||
|
|
||||||
self.assertEquals(p.to_mongo(),
|
self.assertEqual(p.to_mongo(),
|
||||||
{"_types": ["Person"], "_cls": "Person",
|
{"_types": ["Person"], "_cls": "Person",
|
||||||
"name": "James", "age": 34}
|
"name": "James", "age": 34}
|
||||||
)
|
)
|
||||||
|
|
||||||
p.save()
|
p.save()
|
||||||
|
|
||||||
self.assertEquals(self.Person.objects.first().age, 34)
|
self.assertEqual(self.Person.objects.first().age, 34)
|
||||||
|
|
||||||
# Confirm no changes to self.Person
|
# Confirm no changes to self.Person
|
||||||
self.assertFalse(hasattr(self.Person, 'age'))
|
self.assertFalse(hasattr(self.Person, 'age'))
|
||||||
@@ -40,11 +40,11 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
def test_dynamic_document_delta(self):
|
def test_dynamic_document_delta(self):
|
||||||
"""Ensures simple dynamic documents can delta correctly"""
|
"""Ensures simple dynamic documents can delta correctly"""
|
||||||
p = self.Person(name="James", age=34)
|
p = self.Person(name="James", age=34)
|
||||||
self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {}))
|
self.assertEqual(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {}))
|
||||||
|
|
||||||
p.doc = 123
|
p.doc = 123
|
||||||
del(p.doc)
|
del(p.doc)
|
||||||
self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1}))
|
self.assertEqual(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1}))
|
||||||
|
|
||||||
def test_change_scope_of_variable(self):
|
def test_change_scope_of_variable(self):
|
||||||
"""Test changing the scope of a dynamic field has no adverse effects"""
|
"""Test changing the scope of a dynamic field has no adverse effects"""
|
||||||
@@ -58,7 +58,7 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
p.save()
|
p.save()
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
p = self.Person.objects.get()
|
||||||
self.assertEquals(p.misc, {'hello': 'world'})
|
self.assertEqual(p.misc, {'hello': 'world'})
|
||||||
|
|
||||||
def test_delete_dynamic_field(self):
|
def test_delete_dynamic_field(self):
|
||||||
"""Test deleting a dynamic field works"""
|
"""Test deleting a dynamic field works"""
|
||||||
@@ -73,10 +73,10 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
p.save()
|
p.save()
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
p = self.Person.objects.get()
|
||||||
self.assertEquals(p.misc, {'hello': 'world'})
|
self.assertEqual(p.misc, {'hello': 'world'})
|
||||||
collection = self.db[self.Person._get_collection_name()]
|
collection = self.db[self.Person._get_collection_name()]
|
||||||
obj = collection.find_one()
|
obj = collection.find_one()
|
||||||
self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name'])
|
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name'])
|
||||||
|
|
||||||
del(p.misc)
|
del(p.misc)
|
||||||
p.save()
|
p.save()
|
||||||
@@ -85,7 +85,7 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
self.assertFalse(hasattr(p, 'misc'))
|
self.assertFalse(hasattr(p, 'misc'))
|
||||||
|
|
||||||
obj = collection.find_one()
|
obj = collection.find_one()
|
||||||
self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'name'])
|
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', '_types', 'name'])
|
||||||
|
|
||||||
def test_dynamic_document_queries(self):
|
def test_dynamic_document_queries(self):
|
||||||
"""Ensure we can query dynamic fields"""
|
"""Ensure we can query dynamic fields"""
|
||||||
@@ -94,10 +94,10 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
p.age = 22
|
p.age = 22
|
||||||
p.save()
|
p.save()
|
||||||
|
|
||||||
self.assertEquals(1, self.Person.objects(age=22).count())
|
self.assertEqual(1, self.Person.objects(age=22).count())
|
||||||
p = self.Person.objects(age=22)
|
p = self.Person.objects(age=22)
|
||||||
p = p.get()
|
p = p.get()
|
||||||
self.assertEquals(22, p.age)
|
self.assertEqual(22, p.age)
|
||||||
|
|
||||||
def test_complex_dynamic_document_queries(self):
|
def test_complex_dynamic_document_queries(self):
|
||||||
class Person(DynamicDocument):
|
class Person(DynamicDocument):
|
||||||
@@ -117,8 +117,8 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
p2.age = 10
|
p2.age = 10
|
||||||
p2.save()
|
p2.save()
|
||||||
|
|
||||||
self.assertEquals(Person.objects(age__icontains='ten').count(), 2)
|
self.assertEqual(Person.objects(age__icontains='ten').count(), 2)
|
||||||
self.assertEquals(Person.objects(age__gte=10).count(), 1)
|
self.assertEqual(Person.objects(age__gte=10).count(), 1)
|
||||||
|
|
||||||
def test_complex_data_lookups(self):
|
def test_complex_data_lookups(self):
|
||||||
"""Ensure you can query dynamic document dynamic fields"""
|
"""Ensure you can query dynamic document dynamic fields"""
|
||||||
@@ -126,7 +126,7 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
p.misc = {'hello': 'world'}
|
p.misc = {'hello': 'world'}
|
||||||
p.save()
|
p.save()
|
||||||
|
|
||||||
self.assertEquals(1, self.Person.objects(misc__hello='world').count())
|
self.assertEqual(1, self.Person.objects(misc__hello='world').count())
|
||||||
|
|
||||||
def test_inheritance(self):
|
def test_inheritance(self):
|
||||||
"""Ensure that dynamic document plays nice with inheritance"""
|
"""Ensure that dynamic document plays nice with inheritance"""
|
||||||
@@ -146,8 +146,8 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
joe_bloggs.age = 20
|
joe_bloggs.age = 20
|
||||||
joe_bloggs.save()
|
joe_bloggs.save()
|
||||||
|
|
||||||
self.assertEquals(1, self.Person.objects(age=20).count())
|
self.assertEqual(1, self.Person.objects(age=20).count())
|
||||||
self.assertEquals(1, Employee.objects(age=20).count())
|
self.assertEqual(1, Employee.objects(age=20).count())
|
||||||
|
|
||||||
joe_bloggs = self.Person.objects.first()
|
joe_bloggs = self.Person.objects.first()
|
||||||
self.assertTrue(isinstance(joe_bloggs, Employee))
|
self.assertTrue(isinstance(joe_bloggs, Employee))
|
||||||
@@ -170,7 +170,7 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
doc.embedded_field = embedded_1
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
|
self.assertEqual(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
|
||||||
"embedded_field": {
|
"embedded_field": {
|
||||||
"_types": ['Embedded'], "_cls": "Embedded",
|
"_types": ['Embedded'], "_cls": "Embedded",
|
||||||
"string_field": "hello",
|
"string_field": "hello",
|
||||||
@@ -182,11 +182,11 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
doc.save()
|
doc.save()
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
doc = Doc.objects.first()
|
||||||
self.assertEquals(doc.embedded_field.__class__, Embedded)
|
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||||
self.assertEquals(doc.embedded_field.string_field, "hello")
|
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||||
self.assertEquals(doc.embedded_field.int_field, 1)
|
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||||
self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'})
|
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||||
self.assertEquals(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}])
|
self.assertEqual(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}])
|
||||||
|
|
||||||
def test_complex_embedded_documents(self):
|
def test_complex_embedded_documents(self):
|
||||||
"""Test complex dynamic embedded documents setups"""
|
"""Test complex dynamic embedded documents setups"""
|
||||||
@@ -213,7 +213,7 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
embedded_1.list_field = ['1', 2, embedded_2]
|
embedded_1.list_field = ['1', 2, embedded_2]
|
||||||
doc.embedded_field = embedded_1
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
|
self.assertEqual(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
|
||||||
"embedded_field": {
|
"embedded_field": {
|
||||||
"_types": ['Embedded'], "_cls": "Embedded",
|
"_types": ['Embedded'], "_cls": "Embedded",
|
||||||
"string_field": "hello",
|
"string_field": "hello",
|
||||||
@@ -230,20 +230,20 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
})
|
})
|
||||||
doc.save()
|
doc.save()
|
||||||
doc = Doc.objects.first()
|
doc = Doc.objects.first()
|
||||||
self.assertEquals(doc.embedded_field.__class__, Embedded)
|
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||||
self.assertEquals(doc.embedded_field.string_field, "hello")
|
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||||
self.assertEquals(doc.embedded_field.int_field, 1)
|
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||||
self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'})
|
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||||
self.assertEquals(doc.embedded_field.list_field[0], '1')
|
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||||
self.assertEquals(doc.embedded_field.list_field[1], 2)
|
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||||
|
|
||||||
embedded_field = doc.embedded_field.list_field[2]
|
embedded_field = doc.embedded_field.list_field[2]
|
||||||
|
|
||||||
self.assertEquals(embedded_field.__class__, Embedded)
|
self.assertEqual(embedded_field.__class__, Embedded)
|
||||||
self.assertEquals(embedded_field.string_field, "hello")
|
self.assertEqual(embedded_field.string_field, "hello")
|
||||||
self.assertEquals(embedded_field.int_field, 1)
|
self.assertEqual(embedded_field.int_field, 1)
|
||||||
self.assertEquals(embedded_field.dict_field, {'hello': 'world'})
|
self.assertEqual(embedded_field.dict_field, {'hello': 'world'})
|
||||||
self.assertEquals(embedded_field.list_field, ['1', 2, {'hello': 'world'}])
|
self.assertEqual(embedded_field.list_field, ['1', 2, {'hello': 'world'}])
|
||||||
|
|
||||||
def test_delta_for_dynamic_documents(self):
|
def test_delta_for_dynamic_documents(self):
|
||||||
p = self.Person()
|
p = self.Person()
|
||||||
@@ -252,18 +252,18 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
p.save()
|
p.save()
|
||||||
|
|
||||||
p.age = 24
|
p.age = 24
|
||||||
self.assertEquals(p.age, 24)
|
self.assertEqual(p.age, 24)
|
||||||
self.assertEquals(p._get_changed_fields(), ['age'])
|
self.assertEqual(p._get_changed_fields(), ['age'])
|
||||||
self.assertEquals(p._delta(), ({'age': 24}, {}))
|
self.assertEqual(p._delta(), ({'age': 24}, {}))
|
||||||
|
|
||||||
p = self.Person.objects(age=22).get()
|
p = self.Person.objects(age=22).get()
|
||||||
p.age = 24
|
p.age = 24
|
||||||
self.assertEquals(p.age, 24)
|
self.assertEqual(p.age, 24)
|
||||||
self.assertEquals(p._get_changed_fields(), ['age'])
|
self.assertEqual(p._get_changed_fields(), ['age'])
|
||||||
self.assertEquals(p._delta(), ({'age': 24}, {}))
|
self.assertEqual(p._delta(), ({'age': 24}, {}))
|
||||||
|
|
||||||
p.save()
|
p.save()
|
||||||
self.assertEquals(1, self.Person.objects(age=24).count())
|
self.assertEqual(1, self.Person.objects(age=24).count())
|
||||||
|
|
||||||
def test_delta(self):
|
def test_delta(self):
|
||||||
|
|
||||||
@@ -275,40 +275,40 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
doc.save()
|
doc.save()
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
doc = Doc.objects.first()
|
||||||
self.assertEquals(doc._get_changed_fields(), [])
|
self.assertEqual(doc._get_changed_fields(), [])
|
||||||
self.assertEquals(doc._delta(), ({}, {}))
|
self.assertEqual(doc._delta(), ({}, {}))
|
||||||
|
|
||||||
doc.string_field = 'hello'
|
doc.string_field = 'hello'
|
||||||
self.assertEquals(doc._get_changed_fields(), ['string_field'])
|
self.assertEqual(doc._get_changed_fields(), ['string_field'])
|
||||||
self.assertEquals(doc._delta(), ({'string_field': 'hello'}, {}))
|
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
|
||||||
|
|
||||||
doc._changed_fields = []
|
doc._changed_fields = []
|
||||||
doc.int_field = 1
|
doc.int_field = 1
|
||||||
self.assertEquals(doc._get_changed_fields(), ['int_field'])
|
self.assertEqual(doc._get_changed_fields(), ['int_field'])
|
||||||
self.assertEquals(doc._delta(), ({'int_field': 1}, {}))
|
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
|
||||||
|
|
||||||
doc._changed_fields = []
|
doc._changed_fields = []
|
||||||
dict_value = {'hello': 'world', 'ping': 'pong'}
|
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||||
doc.dict_field = dict_value
|
doc.dict_field = dict_value
|
||||||
self.assertEquals(doc._get_changed_fields(), ['dict_field'])
|
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||||
self.assertEquals(doc._delta(), ({'dict_field': dict_value}, {}))
|
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
|
||||||
|
|
||||||
doc._changed_fields = []
|
doc._changed_fields = []
|
||||||
list_value = ['1', 2, {'hello': 'world'}]
|
list_value = ['1', 2, {'hello': 'world'}]
|
||||||
doc.list_field = list_value
|
doc.list_field = list_value
|
||||||
self.assertEquals(doc._get_changed_fields(), ['list_field'])
|
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||||
self.assertEquals(doc._delta(), ({'list_field': list_value}, {}))
|
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
|
||||||
|
|
||||||
# Test unsetting
|
# Test unsetting
|
||||||
doc._changed_fields = []
|
doc._changed_fields = []
|
||||||
doc.dict_field = {}
|
doc.dict_field = {}
|
||||||
self.assertEquals(doc._get_changed_fields(), ['dict_field'])
|
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||||
self.assertEquals(doc._delta(), ({}, {'dict_field': 1}))
|
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
|
||||||
|
|
||||||
doc._changed_fields = []
|
doc._changed_fields = []
|
||||||
doc.list_field = []
|
doc.list_field = []
|
||||||
self.assertEquals(doc._get_changed_fields(), ['list_field'])
|
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||||
self.assertEquals(doc._delta(), ({}, {'list_field': 1}))
|
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
|
||||||
|
|
||||||
def test_delta_recursive(self):
|
def test_delta_recursive(self):
|
||||||
"""Testing deltaing works with dynamic documents"""
|
"""Testing deltaing works with dynamic documents"""
|
||||||
@@ -323,8 +323,8 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
doc.save()
|
doc.save()
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
doc = Doc.objects.first()
|
||||||
self.assertEquals(doc._get_changed_fields(), [])
|
self.assertEqual(doc._get_changed_fields(), [])
|
||||||
self.assertEquals(doc._delta(), ({}, {}))
|
self.assertEqual(doc._delta(), ({}, {}))
|
||||||
|
|
||||||
embedded_1 = Embedded()
|
embedded_1 = Embedded()
|
||||||
embedded_1.string_field = 'hello'
|
embedded_1.string_field = 'hello'
|
||||||
@@ -333,7 +333,7 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
doc.embedded_field = embedded_1
|
doc.embedded_field = embedded_1
|
||||||
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field'])
|
self.assertEqual(doc._get_changed_fields(), ['embedded_field'])
|
||||||
|
|
||||||
embedded_delta = {
|
embedded_delta = {
|
||||||
'string_field': 'hello',
|
'string_field': 'hello',
|
||||||
@@ -341,28 +341,28 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
'dict_field': {'hello': 'world'},
|
'dict_field': {'hello': 'world'},
|
||||||
'list_field': ['1', 2, {'hello': 'world'}]
|
'list_field': ['1', 2, {'hello': 'world'}]
|
||||||
}
|
}
|
||||||
self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {}))
|
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||||
embedded_delta.update({
|
embedded_delta.update({
|
||||||
'_types': ['Embedded'],
|
'_types': ['Embedded'],
|
||||||
'_cls': 'Embedded',
|
'_cls': 'Embedded',
|
||||||
})
|
})
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field': embedded_delta}, {}))
|
self.assertEqual(doc._delta(), ({'embedded_field': embedded_delta}, {}))
|
||||||
|
|
||||||
doc.save()
|
doc.save()
|
||||||
doc.reload()
|
doc.reload()
|
||||||
|
|
||||||
doc.embedded_field.dict_field = {}
|
doc.embedded_field.dict_field = {}
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.dict_field'])
|
self.assertEqual(doc._get_changed_fields(), ['embedded_field.dict_field'])
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
|
self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
|
||||||
|
|
||||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
|
self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
|
||||||
doc.save()
|
doc.save()
|
||||||
doc.reload()
|
doc.reload()
|
||||||
|
|
||||||
doc.embedded_field.list_field = []
|
doc.embedded_field.list_field = []
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
|
self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field'])
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1}))
|
self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1}))
|
||||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1}))
|
self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1}))
|
||||||
doc.save()
|
doc.save()
|
||||||
doc.reload()
|
doc.reload()
|
||||||
|
|
||||||
@@ -373,8 +373,8 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||||
|
|
||||||
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
|
self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field'])
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({
|
self.assertEqual(doc.embedded_field._delta(), ({
|
||||||
'list_field': ['1', 2, {
|
'list_field': ['1', 2, {
|
||||||
'_cls': 'Embedded',
|
'_cls': 'Embedded',
|
||||||
'_types': ['Embedded'],
|
'_types': ['Embedded'],
|
||||||
@@ -385,7 +385,7 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
}]
|
}]
|
||||||
}, {}))
|
}, {}))
|
||||||
|
|
||||||
self.assertEquals(doc._delta(), ({
|
self.assertEqual(doc._delta(), ({
|
||||||
'embedded_field.list_field': ['1', 2, {
|
'embedded_field.list_field': ['1', 2, {
|
||||||
'_cls': 'Embedded',
|
'_cls': 'Embedded',
|
||||||
'_types': ['Embedded'],
|
'_types': ['Embedded'],
|
||||||
@@ -398,25 +398,25 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
doc.save()
|
doc.save()
|
||||||
doc.reload()
|
doc.reload()
|
||||||
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2]._changed_fields, [])
|
self.assertEqual(doc.embedded_field.list_field[2]._changed_fields, [])
|
||||||
self.assertEquals(doc.embedded_field.list_field[0], '1')
|
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||||
self.assertEquals(doc.embedded_field.list_field[1], 2)
|
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||||
for k in doc.embedded_field.list_field[2]._fields:
|
for k in doc.embedded_field.list_field[2]._fields:
|
||||||
self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k])
|
self.assertEqual(doc.embedded_field.list_field[2][k], embedded_2[k])
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].string_field = 'world'
|
doc.embedded_field.list_field[2].string_field = 'world'
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field'])
|
self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field'])
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {}))
|
self.assertEqual(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {}))
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {}))
|
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {}))
|
||||||
doc.save()
|
doc.save()
|
||||||
doc.reload()
|
doc.reload()
|
||||||
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world')
|
self.assertEqual(doc.embedded_field.list_field[2].string_field, 'world')
|
||||||
|
|
||||||
# Test multiple assignments
|
# Test multiple assignments
|
||||||
doc.embedded_field.list_field[2].string_field = 'hello world'
|
doc.embedded_field.list_field[2].string_field = 'hello world'
|
||||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
|
self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field'])
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({
|
self.assertEqual(doc.embedded_field._delta(), ({
|
||||||
'list_field': ['1', 2, {
|
'list_field': ['1', 2, {
|
||||||
'_types': ['Embedded'],
|
'_types': ['Embedded'],
|
||||||
'_cls': 'Embedded',
|
'_cls': 'Embedded',
|
||||||
@@ -424,7 +424,7 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
'int_field': 1,
|
'int_field': 1,
|
||||||
'list_field': ['1', 2, {'hello': 'world'}],
|
'list_field': ['1', 2, {'hello': 'world'}],
|
||||||
'dict_field': {'hello': 'world'}}]}, {}))
|
'dict_field': {'hello': 'world'}}]}, {}))
|
||||||
self.assertEquals(doc._delta(), ({
|
self.assertEqual(doc._delta(), ({
|
||||||
'embedded_field.list_field': ['1', 2, {
|
'embedded_field.list_field': ['1', 2, {
|
||||||
'_types': ['Embedded'],
|
'_types': ['Embedded'],
|
||||||
'_cls': 'Embedded',
|
'_cls': 'Embedded',
|
||||||
@@ -435,32 +435,32 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
]}, {}))
|
]}, {}))
|
||||||
doc.save()
|
doc.save()
|
||||||
doc.reload()
|
doc.reload()
|
||||||
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world')
|
self.assertEqual(doc.embedded_field.list_field[2].string_field, 'hello world')
|
||||||
|
|
||||||
# Test list native methods
|
# Test list native methods
|
||||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {}))
|
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {}))
|
||||||
doc.save()
|
doc.save()
|
||||||
doc.reload()
|
doc.reload()
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].list_field.append(1)
|
doc.embedded_field.list_field[2].list_field.append(1)
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {}))
|
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {}))
|
||||||
doc.save()
|
doc.save()
|
||||||
doc.reload()
|
doc.reload()
|
||||||
self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1])
|
self.assertEqual(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1])
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].list_field.sort()
|
doc.embedded_field.list_field[2].list_field.sort(key=str)# use str as a key to allow comparing uncomperable types
|
||||||
doc.save()
|
doc.save()
|
||||||
doc.reload()
|
doc.reload()
|
||||||
self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}])
|
self.assertEqual(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}])
|
||||||
|
|
||||||
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
|
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
|
||||||
doc.save()
|
doc.save()
|
||||||
doc.reload()
|
doc.reload()
|
||||||
|
|
||||||
del(doc.embedded_field.list_field[2].list_field)
|
del(doc.embedded_field.list_field[2].list_field)
|
||||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1}))
|
self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1}))
|
||||||
|
|
||||||
doc.save()
|
doc.save()
|
||||||
doc.reload()
|
doc.reload()
|
||||||
@@ -470,8 +470,8 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
doc.reload()
|
doc.reload()
|
||||||
|
|
||||||
doc.dict_field['embedded'].string_field = 'Hello World'
|
doc.dict_field['embedded'].string_field = 'Hello World'
|
||||||
self.assertEquals(doc._get_changed_fields(), ['dict_field.embedded.string_field'])
|
self.assertEqual(doc._get_changed_fields(), ['dict_field.embedded.string_field'])
|
||||||
self.assertEquals(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {}))
|
self.assertEqual(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {}))
|
||||||
|
|
||||||
def test_indexes(self):
|
def test_indexes(self):
|
||||||
"""Ensure that indexes are used when meta[indexes] is specified.
|
"""Ensure that indexes are used when meta[indexes] is specified.
|
||||||
@@ -500,3 +500,34 @@ class DynamicDocTest(unittest.TestCase):
|
|||||||
self.assertTrue([('_types', 1), ('category', 1), ('date', -1)]
|
self.assertTrue([('_types', 1), ('category', 1), ('date', -1)]
|
||||||
in info)
|
in info)
|
||||||
self.assertTrue([('_types', 1), ('date', -1)] in info)
|
self.assertTrue([('_types', 1), ('date', -1)] in info)
|
||||||
|
|
||||||
|
def test_dynamic_and_embedded(self):
|
||||||
|
"""Ensure embedded documents play nicely"""
|
||||||
|
|
||||||
|
class Address(EmbeddedDocument):
|
||||||
|
city = StringField()
|
||||||
|
|
||||||
|
class Person(DynamicDocument):
|
||||||
|
name = StringField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
|
||||||
|
Person(name="Ross", address=Address(city="London")).save()
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.address.city = "Lundenne"
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
self.assertEqual(Person.objects.first().address.city, "Lundenne")
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.address = Address(city="Londinium")
|
||||||
|
person.save()
|
||||||
|
|
||||||
|
self.assertEqual(Person.objects.first().address.city, "Londinium")
|
||||||
|
|
||||||
|
person = Person.objects.first()
|
||||||
|
person.age = 35
|
||||||
|
person.save()
|
||||||
|
self.assertEqual(Person.objects.first().age, 35)
|
File diff suppressed because it is too large
Load Diff
@@ -1,16 +1,19 @@
|
|||||||
# -*- coding: utf-8 -*-
|
from __future__ import with_statement
|
||||||
import unittest
|
import unittest
|
||||||
import pymongo
|
|
||||||
from bson import ObjectId
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
import pymongo
|
||||||
|
|
||||||
|
from bson import ObjectId
|
||||||
|
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import get_connection
|
||||||
|
from mongoengine.python_support import PY3
|
||||||
|
from mongoengine.tests import query_counter
|
||||||
from mongoengine.queryset import (QuerySet, QuerySetManager,
|
from mongoengine.queryset import (QuerySet, QuerySetManager,
|
||||||
MultipleObjectsReturned, DoesNotExist,
|
MultipleObjectsReturned, DoesNotExist,
|
||||||
QueryFieldList)
|
QueryFieldList)
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine.connection import get_connection
|
|
||||||
from mongoengine.tests import query_counter
|
|
||||||
|
|
||||||
|
|
||||||
class QuerySetTest(unittest.TestCase):
|
class QuerySetTest(unittest.TestCase):
|
||||||
|
|
||||||
@@ -21,6 +24,8 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
name = StringField()
|
name = StringField()
|
||||||
age = IntField()
|
age = IntField()
|
||||||
meta = {'allow_inheritance': True}
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
self.Person = Person
|
self.Person = Person
|
||||||
|
|
||||||
def test_initialisation(self):
|
def test_initialisation(self):
|
||||||
@@ -239,11 +244,11 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
self.Person.objects.update(set__name='Ross', write_options=write_options)
|
self.Person.objects.update(set__name='Ross', write_options=write_options)
|
||||||
|
|
||||||
author = self.Person.objects.first()
|
author = self.Person.objects.first()
|
||||||
self.assertEquals(author.name, 'Ross')
|
self.assertEqual(author.name, 'Ross')
|
||||||
|
|
||||||
self.Person.objects.update_one(set__name='Test User', write_options=write_options)
|
self.Person.objects.update_one(set__name='Test User', write_options=write_options)
|
||||||
author = self.Person.objects.first()
|
author = self.Person.objects.first()
|
||||||
self.assertEquals(author.name, 'Test User')
|
self.assertEqual(author.name, 'Test User')
|
||||||
|
|
||||||
def test_update_update_has_a_value(self):
|
def test_update_update_has_a_value(self):
|
||||||
"""Test to ensure that update is passed a value to update to"""
|
"""Test to ensure that update is passed a value to update to"""
|
||||||
@@ -332,8 +337,8 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
BlogPost.objects(comments__by="jane").update(inc__comments__S__votes=1)
|
BlogPost.objects(comments__by="jane").update(inc__comments__S__votes=1)
|
||||||
|
|
||||||
post = BlogPost.objects.first()
|
post = BlogPost.objects.first()
|
||||||
self.assertEquals(post.comments[1].by, 'jane')
|
self.assertEqual(post.comments[1].by, 'jane')
|
||||||
self.assertEquals(post.comments[1].votes, 8)
|
self.assertEqual(post.comments[1].votes, 8)
|
||||||
|
|
||||||
# Currently the $ operator only applies to the first matched item in
|
# Currently the $ operator only applies to the first matched item in
|
||||||
# the query
|
# the query
|
||||||
@@ -346,7 +351,7 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
Simple.objects(x=2).update(inc__x__S=1)
|
Simple.objects(x=2).update(inc__x__S=1)
|
||||||
|
|
||||||
simple = Simple.objects.first()
|
simple = Simple.objects.first()
|
||||||
self.assertEquals(simple.x, [1, 3, 3, 2])
|
self.assertEqual(simple.x, [1, 3, 3, 2])
|
||||||
Simple.drop_collection()
|
Simple.drop_collection()
|
||||||
|
|
||||||
# You can set multiples
|
# You can set multiples
|
||||||
@@ -358,10 +363,10 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
Simple.objects(x=3).update(set__x__S=0)
|
Simple.objects(x=3).update(set__x__S=0)
|
||||||
|
|
||||||
s = Simple.objects()
|
s = Simple.objects()
|
||||||
self.assertEquals(s[0].x, [1, 2, 0, 4])
|
self.assertEqual(s[0].x, [1, 2, 0, 4])
|
||||||
self.assertEquals(s[1].x, [2, 0, 4, 5])
|
self.assertEqual(s[1].x, [2, 0, 4, 5])
|
||||||
self.assertEquals(s[2].x, [0, 4, 5, 6])
|
self.assertEqual(s[2].x, [0, 4, 5, 6])
|
||||||
self.assertEquals(s[3].x, [4, 5, 6, 7])
|
self.assertEqual(s[3].x, [4, 5, 6, 7])
|
||||||
|
|
||||||
# Using "$unset" with an expression like this "array.$" will result in
|
# Using "$unset" with an expression like this "array.$" will result in
|
||||||
# the array item becoming None, not being removed.
|
# the array item becoming None, not being removed.
|
||||||
@@ -369,14 +374,14 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
Simple(x=[1, 2, 3, 4, 3, 2, 3, 4]).save()
|
Simple(x=[1, 2, 3, 4, 3, 2, 3, 4]).save()
|
||||||
Simple.objects(x=3).update(unset__x__S=1)
|
Simple.objects(x=3).update(unset__x__S=1)
|
||||||
simple = Simple.objects.first()
|
simple = Simple.objects.first()
|
||||||
self.assertEquals(simple.x, [1, 2, None, 4, 3, 2, 3, 4])
|
self.assertEqual(simple.x, [1, 2, None, 4, 3, 2, 3, 4])
|
||||||
|
|
||||||
# Nested updates arent supported yet..
|
# Nested updates arent supported yet..
|
||||||
def update_nested():
|
def update_nested():
|
||||||
Simple.drop_collection()
|
Simple.drop_collection()
|
||||||
Simple(x=[{'test': [1, 2, 3, 4]}]).save()
|
Simple(x=[{'test': [1, 2, 3, 4]}]).save()
|
||||||
Simple.objects(x__test=2).update(set__x__S__test__S=3)
|
Simple.objects(x__test=2).update(set__x__S__test__S=3)
|
||||||
self.assertEquals(simple.x, [1, 2, 3, 4])
|
self.assertEqual(simple.x, [1, 2, 3, 4])
|
||||||
|
|
||||||
self.assertRaises(OperationError, update_nested)
|
self.assertRaises(OperationError, update_nested)
|
||||||
Simple.drop_collection()
|
Simple.drop_collection()
|
||||||
@@ -406,8 +411,8 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
BlogPost.objects(comments__by="joe").update(set__comments__S__votes=Vote(score=4))
|
BlogPost.objects(comments__by="joe").update(set__comments__S__votes=Vote(score=4))
|
||||||
|
|
||||||
post = BlogPost.objects.first()
|
post = BlogPost.objects.first()
|
||||||
self.assertEquals(post.comments[0].by, 'joe')
|
self.assertEqual(post.comments[0].by, 'joe')
|
||||||
self.assertEquals(post.comments[0].votes.score, 4)
|
self.assertEqual(post.comments[0].votes.score, 4)
|
||||||
|
|
||||||
def test_mapfield_update(self):
|
def test_mapfield_update(self):
|
||||||
"""Ensure that the MapField can be updated."""
|
"""Ensure that the MapField can be updated."""
|
||||||
@@ -480,7 +485,7 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
self.assertEqual(person.name, "User C")
|
self.assertEqual(person.name, "User C")
|
||||||
|
|
||||||
def test_bulk_insert(self):
|
def test_bulk_insert(self):
|
||||||
"""Ensure that query by array position works.
|
"""Ensure that bulk insert works
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class Comment(EmbeddedDocument):
|
class Comment(EmbeddedDocument):
|
||||||
@@ -490,13 +495,13 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
comments = ListField(EmbeddedDocumentField(Comment))
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
|
|
||||||
class Blog(Document):
|
class Blog(Document):
|
||||||
title = StringField()
|
title = StringField(unique=True)
|
||||||
tags = ListField(StringField())
|
tags = ListField(StringField())
|
||||||
posts = ListField(EmbeddedDocumentField(Post))
|
posts = ListField(EmbeddedDocumentField(Post))
|
||||||
|
|
||||||
Blog.drop_collection()
|
Blog.drop_collection()
|
||||||
|
|
||||||
# Recreates the collection
|
# Recreates the collection
|
||||||
self.assertEqual(0, Blog.objects.count())
|
self.assertEqual(0, Blog.objects.count())
|
||||||
|
|
||||||
with query_counter() as q:
|
with query_counter() as q:
|
||||||
@@ -561,7 +566,82 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
Blog.drop_collection()
|
Blog.drop_collection()
|
||||||
blog1 = Blog(title="code", posts=[post1, post2])
|
blog1 = Blog(title="code", posts=[post1, post2])
|
||||||
obj_id = Blog.objects.insert(blog1, load_bulk=False)
|
obj_id = Blog.objects.insert(blog1, load_bulk=False)
|
||||||
self.assertEquals(obj_id.__class__.__name__, 'ObjectId')
|
self.assertEqual(obj_id.__class__.__name__, 'ObjectId')
|
||||||
|
|
||||||
|
Blog.drop_collection()
|
||||||
|
post3 = Post(comments=[comment1, comment1])
|
||||||
|
blog1 = Blog(title="foo", posts=[post1, post2])
|
||||||
|
blog2 = Blog(title="bar", posts=[post2, post3])
|
||||||
|
blog3 = Blog(title="baz", posts=[post1, post2])
|
||||||
|
Blog.objects.insert([blog1, blog2])
|
||||||
|
|
||||||
|
def throw_operation_error_not_unique():
|
||||||
|
Blog.objects.insert([blog2, blog3], safe=True)
|
||||||
|
|
||||||
|
self.assertRaises(NotUniqueError, throw_operation_error_not_unique)
|
||||||
|
self.assertEqual(Blog.objects.count(), 2)
|
||||||
|
|
||||||
|
Blog.objects.insert([blog2, blog3], write_options={'continue_on_error': True})
|
||||||
|
self.assertEqual(Blog.objects.count(), 3)
|
||||||
|
|
||||||
|
def test_get_changed_fields_query_count(self):
|
||||||
|
|
||||||
|
class Person(Document):
|
||||||
|
name = StringField()
|
||||||
|
owns = ListField(ReferenceField('Organization'))
|
||||||
|
projects = ListField(ReferenceField('Project'))
|
||||||
|
|
||||||
|
class Organization(Document):
|
||||||
|
name = StringField()
|
||||||
|
owner = ReferenceField('Person')
|
||||||
|
employees = ListField(ReferenceField('Person'))
|
||||||
|
|
||||||
|
class Project(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
Person.drop_collection()
|
||||||
|
Organization.drop_collection()
|
||||||
|
Project.drop_collection()
|
||||||
|
|
||||||
|
r1 = Project(name="r1").save()
|
||||||
|
r2 = Project(name="r2").save()
|
||||||
|
r3 = Project(name="r3").save()
|
||||||
|
p1 = Person(name="p1", projects=[r1, r2]).save()
|
||||||
|
p2 = Person(name="p2", projects=[r2]).save()
|
||||||
|
o1 = Organization(name="o1", employees=[p1]).save()
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
fresh_o1 = Organization.objects.get(id=o1.id)
|
||||||
|
self.assertEqual(1, q)
|
||||||
|
fresh_o1._get_changed_fields()
|
||||||
|
self.assertEqual(1, q)
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
fresh_o1 = Organization.objects.get(id=o1.id)
|
||||||
|
fresh_o1.save()
|
||||||
|
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
fresh_o1 = Organization.objects.get(id=o1.id)
|
||||||
|
fresh_o1.save(cascade=False)
|
||||||
|
|
||||||
|
self.assertEqual(q, 2)
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
self.assertEqual(q, 0)
|
||||||
|
|
||||||
|
fresh_o1 = Organization.objects.get(id=o1.id)
|
||||||
|
fresh_o1.employees.append(p2)
|
||||||
|
fresh_o1.save(cascade=False)
|
||||||
|
|
||||||
|
self.assertEqual(q, 3)
|
||||||
|
|
||||||
def test_slave_okay(self):
|
def test_slave_okay(self):
|
||||||
"""Ensures that a query can take slave_okay syntax
|
"""Ensures that a query can take slave_okay syntax
|
||||||
@@ -619,17 +699,38 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
self.assertEqual(people1, people2)
|
self.assertEqual(people1, people2)
|
||||||
self.assertEqual(people1, people3)
|
self.assertEqual(people1, people3)
|
||||||
|
|
||||||
def test_repr_iteration(self):
|
def test_repr(self):
|
||||||
"""Ensure that QuerySet __repr__ can handle loops
|
"""Test repr behavior isnt destructive"""
|
||||||
"""
|
|
||||||
self.Person(name='Person 1').save()
|
|
||||||
self.Person(name='Person 2').save()
|
|
||||||
|
|
||||||
queryset = self.Person.objects
|
class Doc(Document):
|
||||||
self.assertEquals('[<Person: Person object>, <Person: Person object>]', repr(queryset))
|
number = IntField()
|
||||||
for person in queryset:
|
|
||||||
self.assertEquals('.. queryset mid-iteration ..', repr(queryset))
|
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<Doc: %s>" % self.number
|
||||||
|
|
||||||
|
Doc.drop_collection()
|
||||||
|
|
||||||
|
for i in xrange(1000):
|
||||||
|
Doc(number=i).save()
|
||||||
|
|
||||||
|
docs = Doc.objects.order_by('number')
|
||||||
|
|
||||||
|
self.assertEqual(docs.count(), 1000)
|
||||||
|
self.assertEqual(len(docs), 1000)
|
||||||
|
|
||||||
|
docs_string = "%s" % docs
|
||||||
|
self.assertTrue("Doc: 0" in docs_string)
|
||||||
|
|
||||||
|
self.assertEqual(docs.count(), 1000)
|
||||||
|
self.assertEqual(len(docs), 1000)
|
||||||
|
|
||||||
|
# Limit and skip
|
||||||
|
self.assertEqual('[<Doc: 1>, <Doc: 2>, <Doc: 3>]', "%s" % docs[1:4])
|
||||||
|
|
||||||
|
self.assertEqual(docs.count(), 3)
|
||||||
|
self.assertEqual(len(docs), 3)
|
||||||
|
for doc in docs:
|
||||||
|
self.assertEqual('.. queryset mid-iteration ..', repr(docs))
|
||||||
|
|
||||||
def test_regex_query_shortcuts(self):
|
def test_regex_query_shortcuts(self):
|
||||||
"""Ensure that contains, startswith, endswith, etc work.
|
"""Ensure that contains, startswith, endswith, etc work.
|
||||||
@@ -731,7 +832,11 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
def test_filter_chaining(self):
|
def test_filter_chaining(self):
|
||||||
"""Ensure filters can be chained together.
|
"""Ensure filters can be chained together.
|
||||||
"""
|
"""
|
||||||
|
class Blog(Document):
|
||||||
|
id = StringField(unique=True, primary_key=True)
|
||||||
|
|
||||||
class BlogPost(Document):
|
class BlogPost(Document):
|
||||||
|
blog = ReferenceField(Blog)
|
||||||
title = StringField()
|
title = StringField()
|
||||||
is_published = BooleanField()
|
is_published = BooleanField()
|
||||||
published_date = DateTimeField()
|
published_date = DateTimeField()
|
||||||
@@ -740,13 +845,24 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
def published(doc_cls, queryset):
|
def published(doc_cls, queryset):
|
||||||
return queryset(is_published=True)
|
return queryset(is_published=True)
|
||||||
|
|
||||||
blog_post_1 = BlogPost(title="Blog Post #1",
|
Blog.drop_collection()
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
blog_1 = Blog(id="1")
|
||||||
|
blog_2 = Blog(id="2")
|
||||||
|
blog_3 = Blog(id="3")
|
||||||
|
|
||||||
|
blog_1.save()
|
||||||
|
blog_2.save()
|
||||||
|
blog_3.save()
|
||||||
|
|
||||||
|
blog_post_1 = BlogPost(blog=blog_1, title="Blog Post #1",
|
||||||
is_published = True,
|
is_published = True,
|
||||||
published_date=datetime(2010, 1, 5, 0, 0 ,0))
|
published_date=datetime(2010, 1, 5, 0, 0 ,0))
|
||||||
blog_post_2 = BlogPost(title="Blog Post #2",
|
blog_post_2 = BlogPost(blog=blog_2, title="Blog Post #2",
|
||||||
is_published = True,
|
is_published = True,
|
||||||
published_date=datetime(2010, 1, 6, 0, 0 ,0))
|
published_date=datetime(2010, 1, 6, 0, 0 ,0))
|
||||||
blog_post_3 = BlogPost(title="Blog Post #3",
|
blog_post_3 = BlogPost(blog=blog_3, title="Blog Post #3",
|
||||||
is_published = True,
|
is_published = True,
|
||||||
published_date=datetime(2010, 1, 7, 0, 0 ,0))
|
published_date=datetime(2010, 1, 7, 0, 0 ,0))
|
||||||
|
|
||||||
@@ -760,7 +876,29 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
published_date__lt=datetime(2010, 1, 7, 0, 0 ,0))
|
published_date__lt=datetime(2010, 1, 7, 0, 0 ,0))
|
||||||
self.assertEqual(published_posts.count(), 2)
|
self.assertEqual(published_posts.count(), 2)
|
||||||
|
|
||||||
|
|
||||||
|
blog_posts = BlogPost.objects
|
||||||
|
blog_posts = blog_posts.filter(blog__in=[blog_1, blog_2])
|
||||||
|
blog_posts = blog_posts.filter(blog=blog_3)
|
||||||
|
self.assertEqual(blog_posts.count(), 0)
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
|
Blog.drop_collection()
|
||||||
|
|
||||||
|
def test_raw_and_merging(self):
|
||||||
|
class Doc(Document):
|
||||||
|
pass
|
||||||
|
|
||||||
|
raw_query = Doc.objects(__raw__={'deleted': False,
|
||||||
|
'scraped': 'yes',
|
||||||
|
'$nor': [{'views.extracted': 'no'},
|
||||||
|
{'attachments.views.extracted':'no'}]
|
||||||
|
})._query
|
||||||
|
|
||||||
|
expected = {'deleted': False, '_types': 'Doc', 'scraped': 'yes',
|
||||||
|
'$nor': [{'views.extracted': 'no'},
|
||||||
|
{'attachments.views.extracted': 'no'}]}
|
||||||
|
self.assertEqual(expected, raw_query)
|
||||||
|
|
||||||
def test_ordering(self):
|
def test_ordering(self):
|
||||||
"""Ensure default ordering is applied and can be overridden.
|
"""Ensure default ordering is applied and can be overridden.
|
||||||
@@ -986,27 +1124,27 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
# first three
|
# first three
|
||||||
numbers = Numbers.objects.fields(slice__n=3).get()
|
numbers = Numbers.objects.fields(slice__n=3).get()
|
||||||
self.assertEquals(numbers.n, [0, 1, 2])
|
self.assertEqual(numbers.n, [0, 1, 2])
|
||||||
|
|
||||||
# last three
|
# last three
|
||||||
numbers = Numbers.objects.fields(slice__n=-3).get()
|
numbers = Numbers.objects.fields(slice__n=-3).get()
|
||||||
self.assertEquals(numbers.n, [-3, -2, -1])
|
self.assertEqual(numbers.n, [-3, -2, -1])
|
||||||
|
|
||||||
# skip 2, limit 3
|
# skip 2, limit 3
|
||||||
numbers = Numbers.objects.fields(slice__n=[2, 3]).get()
|
numbers = Numbers.objects.fields(slice__n=[2, 3]).get()
|
||||||
self.assertEquals(numbers.n, [2, 3, 4])
|
self.assertEqual(numbers.n, [2, 3, 4])
|
||||||
|
|
||||||
# skip to fifth from last, limit 4
|
# skip to fifth from last, limit 4
|
||||||
numbers = Numbers.objects.fields(slice__n=[-5, 4]).get()
|
numbers = Numbers.objects.fields(slice__n=[-5, 4]).get()
|
||||||
self.assertEquals(numbers.n, [-5, -4, -3, -2])
|
self.assertEqual(numbers.n, [-5, -4, -3, -2])
|
||||||
|
|
||||||
# skip to fifth from last, limit 10
|
# skip to fifth from last, limit 10
|
||||||
numbers = Numbers.objects.fields(slice__n=[-5, 10]).get()
|
numbers = Numbers.objects.fields(slice__n=[-5, 10]).get()
|
||||||
self.assertEquals(numbers.n, [-5, -4, -3, -2, -1])
|
self.assertEqual(numbers.n, [-5, -4, -3, -2, -1])
|
||||||
|
|
||||||
# skip to fifth from last, limit 10 dict method
|
# skip to fifth from last, limit 10 dict method
|
||||||
numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get()
|
numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get()
|
||||||
self.assertEquals(numbers.n, [-5, -4, -3, -2, -1])
|
self.assertEqual(numbers.n, [-5, -4, -3, -2, -1])
|
||||||
|
|
||||||
def test_slicing_nested_fields(self):
|
def test_slicing_nested_fields(self):
|
||||||
"""Ensure that query slicing an embedded array works.
|
"""Ensure that query slicing an embedded array works.
|
||||||
@@ -1026,27 +1164,27 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
# first three
|
# first three
|
||||||
numbers = Numbers.objects.fields(slice__embedded__n=3).get()
|
numbers = Numbers.objects.fields(slice__embedded__n=3).get()
|
||||||
self.assertEquals(numbers.embedded.n, [0, 1, 2])
|
self.assertEqual(numbers.embedded.n, [0, 1, 2])
|
||||||
|
|
||||||
# last three
|
# last three
|
||||||
numbers = Numbers.objects.fields(slice__embedded__n=-3).get()
|
numbers = Numbers.objects.fields(slice__embedded__n=-3).get()
|
||||||
self.assertEquals(numbers.embedded.n, [-3, -2, -1])
|
self.assertEqual(numbers.embedded.n, [-3, -2, -1])
|
||||||
|
|
||||||
# skip 2, limit 3
|
# skip 2, limit 3
|
||||||
numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get()
|
numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get()
|
||||||
self.assertEquals(numbers.embedded.n, [2, 3, 4])
|
self.assertEqual(numbers.embedded.n, [2, 3, 4])
|
||||||
|
|
||||||
# skip to fifth from last, limit 4
|
# skip to fifth from last, limit 4
|
||||||
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get()
|
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get()
|
||||||
self.assertEquals(numbers.embedded.n, [-5, -4, -3, -2])
|
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2])
|
||||||
|
|
||||||
# skip to fifth from last, limit 10
|
# skip to fifth from last, limit 10
|
||||||
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get()
|
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get()
|
||||||
self.assertEquals(numbers.embedded.n, [-5, -4, -3, -2, -1])
|
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1])
|
||||||
|
|
||||||
# skip to fifth from last, limit 10 dict method
|
# skip to fifth from last, limit 10 dict method
|
||||||
numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get()
|
numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get()
|
||||||
self.assertEquals(numbers.embedded.n, [-5, -4, -3, -2, -1])
|
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1])
|
||||||
|
|
||||||
def test_find_embedded(self):
|
def test_find_embedded(self):
|
||||||
"""Ensure that an embedded document is properly returned from a query.
|
"""Ensure that an embedded document is properly returned from a query.
|
||||||
@@ -1130,7 +1268,6 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
published_posts = (post1, post2, post3, post5, post6)
|
published_posts = (post1, post2, post3, post5, post6)
|
||||||
self.assertTrue(all(obj.id in posts for obj in published_posts))
|
self.assertTrue(all(obj.id in posts for obj in published_posts))
|
||||||
|
|
||||||
|
|
||||||
# Check Q object combination
|
# Check Q object combination
|
||||||
date = datetime(2010, 1, 10)
|
date = datetime(2010, 1, 10)
|
||||||
q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True))
|
q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True))
|
||||||
@@ -1189,6 +1326,27 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_raw_query_and_Q_objects(self):
|
||||||
|
"""
|
||||||
|
Test raw plays nicely
|
||||||
|
"""
|
||||||
|
class Foo(Document):
|
||||||
|
name = StringField()
|
||||||
|
a = StringField()
|
||||||
|
b = StringField()
|
||||||
|
c = StringField()
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
'allow_inheritance': False
|
||||||
|
}
|
||||||
|
|
||||||
|
query = Foo.objects(__raw__={'$nor': [{'name': 'bar'}]})._query
|
||||||
|
self.assertEqual(query, {'$nor': [{'name': 'bar'}]})
|
||||||
|
|
||||||
|
q1 = {'$or': [{'a': 1}, {'b': 1}]}
|
||||||
|
query = Foo.objects(Q(__raw__=q1) & Q(c=1))._query
|
||||||
|
self.assertEqual(query, {'$or': [{'a': 1}, {'b': 1}], 'c': 1})
|
||||||
|
|
||||||
def test_exec_js_query(self):
|
def test_exec_js_query(self):
|
||||||
"""Ensure that queries are properly formed for use in exec_js.
|
"""Ensure that queries are properly formed for use in exec_js.
|
||||||
"""
|
"""
|
||||||
@@ -1287,7 +1445,7 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
# Test template style
|
# Test template style
|
||||||
code = "{{~comments.content}}"
|
code = "{{~comments.content}}"
|
||||||
sub_code = BlogPost.objects._sub_js_fields(code)
|
sub_code = BlogPost.objects._sub_js_fields(code)
|
||||||
self.assertEquals("cmnts.body", sub_code)
|
self.assertEqual("cmnts.body", sub_code)
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
@@ -1327,6 +1485,39 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
self.Person.objects(name='Test User').delete()
|
self.Person.objects(name='Test User').delete()
|
||||||
self.assertEqual(1, BlogPost.objects.count())
|
self.assertEqual(1, BlogPost.objects.count())
|
||||||
|
|
||||||
|
def test_reverse_delete_rule_cascade_self_referencing(self):
|
||||||
|
"""Ensure self-referencing CASCADE deletes do not result in infinite loop
|
||||||
|
"""
|
||||||
|
class Category(Document):
|
||||||
|
name = StringField()
|
||||||
|
parent = ReferenceField('self', reverse_delete_rule=CASCADE)
|
||||||
|
|
||||||
|
Category.drop_collection()
|
||||||
|
|
||||||
|
num_children = 3
|
||||||
|
base = Category(name='Root')
|
||||||
|
base.save()
|
||||||
|
|
||||||
|
# Create a simple parent-child tree
|
||||||
|
for i in range(num_children):
|
||||||
|
child_name = 'Child-%i' % i
|
||||||
|
child = Category(name=child_name, parent=base)
|
||||||
|
child.save()
|
||||||
|
|
||||||
|
for i in range(num_children):
|
||||||
|
child_child_name = 'Child-Child-%i' % i
|
||||||
|
child_child = Category(name=child_child_name, parent=child)
|
||||||
|
child_child.save()
|
||||||
|
|
||||||
|
tree_size = 1 + num_children + (num_children * num_children)
|
||||||
|
self.assertEqual(tree_size, Category.objects.count())
|
||||||
|
self.assertEqual(num_children, Category.objects(parent=base).count())
|
||||||
|
|
||||||
|
# The delete should effectively wipe out the Category collection
|
||||||
|
# without resulting in infinite parent-child cascade recursion
|
||||||
|
base.delete()
|
||||||
|
self.assertEqual(0, Category.objects.count())
|
||||||
|
|
||||||
def test_reverse_delete_rule_nullify(self):
|
def test_reverse_delete_rule_nullify(self):
|
||||||
"""Ensure nullification of references to deleted documents.
|
"""Ensure nullification of references to deleted documents.
|
||||||
"""
|
"""
|
||||||
@@ -1371,6 +1562,36 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
self.assertRaises(OperationError, self.Person.objects.delete)
|
self.assertRaises(OperationError, self.Person.objects.delete)
|
||||||
|
|
||||||
|
def test_reverse_delete_rule_pull(self):
|
||||||
|
"""Ensure pulling of references to deleted documents.
|
||||||
|
"""
|
||||||
|
class BlogPost(Document):
|
||||||
|
content = StringField()
|
||||||
|
authors = ListField(ReferenceField(self.Person,
|
||||||
|
reverse_delete_rule=PULL))
|
||||||
|
|
||||||
|
BlogPost.drop_collection()
|
||||||
|
self.Person.drop_collection()
|
||||||
|
|
||||||
|
me = self.Person(name='Test User')
|
||||||
|
me.save()
|
||||||
|
|
||||||
|
someoneelse = self.Person(name='Some-one Else')
|
||||||
|
someoneelse.save()
|
||||||
|
|
||||||
|
post = BlogPost(content='Watching TV', authors=[me, someoneelse])
|
||||||
|
post.save()
|
||||||
|
|
||||||
|
another = BlogPost(content='Chilling Out', authors=[someoneelse])
|
||||||
|
another.save()
|
||||||
|
|
||||||
|
someoneelse.delete()
|
||||||
|
post.reload()
|
||||||
|
another.reload()
|
||||||
|
|
||||||
|
self.assertEqual(post.authors, [me])
|
||||||
|
self.assertEqual(another.authors, [])
|
||||||
|
|
||||||
def test_update(self):
|
def test_update(self):
|
||||||
"""Ensure that atomic updates work properly.
|
"""Ensure that atomic updates work properly.
|
||||||
"""
|
"""
|
||||||
@@ -1421,7 +1642,7 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
def test_update_push_and_pull(self):
|
def test_update_push_and_pull_add_to_set(self):
|
||||||
"""Ensure that the 'pull' update operation works correctly.
|
"""Ensure that the 'pull' update operation works correctly.
|
||||||
"""
|
"""
|
||||||
class BlogPost(Document):
|
class BlogPost(Document):
|
||||||
@@ -1454,6 +1675,52 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
post.reload()
|
post.reload()
|
||||||
self.assertEqual(post.tags, ["code", "mongodb"])
|
self.assertEqual(post.tags, ["code", "mongodb"])
|
||||||
|
|
||||||
|
def test_add_to_set_each(self):
|
||||||
|
class Item(Document):
|
||||||
|
name = StringField(required=True)
|
||||||
|
description = StringField(max_length=50)
|
||||||
|
parents = ListField(ReferenceField('self'))
|
||||||
|
|
||||||
|
Item.drop_collection()
|
||||||
|
|
||||||
|
item = Item(name='test item').save()
|
||||||
|
parent_1 = Item(name='parent 1').save()
|
||||||
|
parent_2 = Item(name='parent 2').save()
|
||||||
|
|
||||||
|
item.update(add_to_set__parents=[parent_1, parent_2, parent_1])
|
||||||
|
item.reload()
|
||||||
|
|
||||||
|
self.assertEqual([parent_1, parent_2], item.parents)
|
||||||
|
|
||||||
|
def test_pull_nested(self):
|
||||||
|
|
||||||
|
class User(Document):
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
class Collaborator(EmbeddedDocument):
|
||||||
|
user = StringField()
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return '%s' % self.user
|
||||||
|
|
||||||
|
class Site(Document):
|
||||||
|
name = StringField(max_length=75, unique=True, required=True)
|
||||||
|
collaborators = ListField(EmbeddedDocumentField(Collaborator))
|
||||||
|
|
||||||
|
|
||||||
|
Site.drop_collection()
|
||||||
|
|
||||||
|
c = Collaborator(user='Esteban')
|
||||||
|
s = Site(name="test", collaborators=[c])
|
||||||
|
s.save()
|
||||||
|
|
||||||
|
Site.objects(id=s.id).update_one(pull__collaborators__user='Esteban')
|
||||||
|
self.assertEqual(Site.objects.first().collaborators, [])
|
||||||
|
|
||||||
|
def pull_all():
|
||||||
|
Site.objects(id=s.id).update_one(pull_all__collaborators__user=['Ross'])
|
||||||
|
|
||||||
|
self.assertRaises(InvalidQueryError, pull_all)
|
||||||
|
|
||||||
def test_update_one_pop_generic_reference(self):
|
def test_update_one_pop_generic_reference(self):
|
||||||
|
|
||||||
@@ -1510,7 +1777,7 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
BlogPost.objects(slug="test-2").update_one(set__tags__0__name="python")
|
BlogPost.objects(slug="test-2").update_one(set__tags__0__name="python")
|
||||||
post.reload()
|
post.reload()
|
||||||
self.assertEquals(post.tags[0].name, 'python')
|
self.assertEqual(post.tags[0].name, 'python')
|
||||||
|
|
||||||
BlogPost.objects(slug="test-2").update_one(pop__tags=-1)
|
BlogPost.objects(slug="test-2").update_one(pop__tags=-1)
|
||||||
post.reload()
|
post.reload()
|
||||||
@@ -1537,7 +1804,7 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
set__authors__S=Author(name="Ross"))
|
set__authors__S=Author(name="Ross"))
|
||||||
|
|
||||||
message = message.reload()
|
message = message.reload()
|
||||||
self.assertEquals(message.authors[0].name, "Ross")
|
self.assertEqual(message.authors[0].name, "Ross")
|
||||||
|
|
||||||
Message.objects(authors__name="Ross").update_one(
|
Message.objects(authors__name="Ross").update_one(
|
||||||
set__authors=[Author(name="Harry"),
|
set__authors=[Author(name="Harry"),
|
||||||
@@ -1545,9 +1812,9 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
Author(name="Adam")])
|
Author(name="Adam")])
|
||||||
|
|
||||||
message = message.reload()
|
message = message.reload()
|
||||||
self.assertEquals(message.authors[0].name, "Harry")
|
self.assertEqual(message.authors[0].name, "Harry")
|
||||||
self.assertEquals(message.authors[1].name, "Ross")
|
self.assertEqual(message.authors[1].name, "Ross")
|
||||||
self.assertEquals(message.authors[2].name, "Adam")
|
self.assertEqual(message.authors[2].name, "Adam")
|
||||||
|
|
||||||
def test_order_by(self):
|
def test_order_by(self):
|
||||||
"""Ensure that QuerySets may be ordered.
|
"""Ensure that QuerySets may be ordered.
|
||||||
@@ -1627,10 +1894,10 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
results = list(results)
|
results = list(results)
|
||||||
self.assertEqual(len(results), 4)
|
self.assertEqual(len(results), 4)
|
||||||
|
|
||||||
music = filter(lambda r: r.key == "music", results)[0]
|
music = list(filter(lambda r: r.key == "music", results))[0]
|
||||||
self.assertEqual(music.value, 2)
|
self.assertEqual(music.value, 2)
|
||||||
|
|
||||||
film = filter(lambda r: r.key == "film", results)[0]
|
film = list(filter(lambda r: r.key == "film", results))[0]
|
||||||
self.assertEqual(film.value, 3)
|
self.assertEqual(film.value, 3)
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
@@ -1849,9 +2116,9 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
# Check item_frequencies works for non-list fields
|
# Check item_frequencies works for non-list fields
|
||||||
def test_assertions(f):
|
def test_assertions(f):
|
||||||
self.assertEqual(set(['1', '2']), set(f.keys()))
|
self.assertEqual(set([1, 2]), set(f.keys()))
|
||||||
self.assertEqual(f['1'], 1)
|
self.assertEqual(f[1], 1)
|
||||||
self.assertEqual(f['2'], 2)
|
self.assertEqual(f[2], 2)
|
||||||
|
|
||||||
exec_js = BlogPost.objects.item_frequencies('hits')
|
exec_js = BlogPost.objects.item_frequencies('hits')
|
||||||
map_reduce = BlogPost.objects.item_frequencies('hits', map_reduce=True)
|
map_reduce = BlogPost.objects.item_frequencies('hits', map_reduce=True)
|
||||||
@@ -1930,15 +2197,15 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
Person(name="Wilson Jr").save()
|
Person(name="Wilson Jr").save()
|
||||||
|
|
||||||
freq = Person.objects.item_frequencies('city')
|
freq = Person.objects.item_frequencies('city')
|
||||||
self.assertEquals(freq, {'CRB': 1.0, None: 1.0})
|
self.assertEqual(freq, {'CRB': 1.0, None: 1.0})
|
||||||
freq = Person.objects.item_frequencies('city', normalize=True)
|
freq = Person.objects.item_frequencies('city', normalize=True)
|
||||||
self.assertEquals(freq, {'CRB': 0.5, None: 0.5})
|
self.assertEqual(freq, {'CRB': 0.5, None: 0.5})
|
||||||
|
|
||||||
|
|
||||||
freq = Person.objects.item_frequencies('city', map_reduce=True)
|
freq = Person.objects.item_frequencies('city', map_reduce=True)
|
||||||
self.assertEquals(freq, {'CRB': 1.0, None: 1.0})
|
self.assertEqual(freq, {'CRB': 1.0, None: 1.0})
|
||||||
freq = Person.objects.item_frequencies('city', normalize=True, map_reduce=True)
|
freq = Person.objects.item_frequencies('city', normalize=True, map_reduce=True)
|
||||||
self.assertEquals(freq, {'CRB': 0.5, None: 0.5})
|
self.assertEqual(freq, {'CRB': 0.5, None: 0.5})
|
||||||
|
|
||||||
def test_item_frequencies_with_null_embedded(self):
|
def test_item_frequencies_with_null_embedded(self):
|
||||||
class Data(EmbeddedDocument):
|
class Data(EmbeddedDocument):
|
||||||
@@ -1951,7 +2218,6 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
data = EmbeddedDocumentField(Data, required=True)
|
data = EmbeddedDocumentField(Data, required=True)
|
||||||
extra = EmbeddedDocumentField(Extra)
|
extra = EmbeddedDocumentField(Extra)
|
||||||
|
|
||||||
|
|
||||||
Person.drop_collection()
|
Person.drop_collection()
|
||||||
|
|
||||||
p = Person()
|
p = Person()
|
||||||
@@ -1964,10 +2230,56 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
p.save()
|
p.save()
|
||||||
|
|
||||||
ot = Person.objects.item_frequencies('extra.tag', map_reduce=False)
|
ot = Person.objects.item_frequencies('extra.tag', map_reduce=False)
|
||||||
self.assertEquals(ot, {None: 1.0, u'friend': 1.0})
|
self.assertEqual(ot, {None: 1.0, u'friend': 1.0})
|
||||||
|
|
||||||
ot = Person.objects.item_frequencies('extra.tag', map_reduce=True)
|
ot = Person.objects.item_frequencies('extra.tag', map_reduce=True)
|
||||||
self.assertEquals(ot, {None: 1.0, u'friend': 1.0})
|
self.assertEqual(ot, {None: 1.0, u'friend': 1.0})
|
||||||
|
|
||||||
|
def test_item_frequencies_with_0_values(self):
|
||||||
|
class Test(Document):
|
||||||
|
val = IntField()
|
||||||
|
|
||||||
|
Test.drop_collection()
|
||||||
|
t = Test()
|
||||||
|
t.val = 0
|
||||||
|
t.save()
|
||||||
|
|
||||||
|
ot = Test.objects.item_frequencies('val', map_reduce=True)
|
||||||
|
self.assertEqual(ot, {0: 1})
|
||||||
|
ot = Test.objects.item_frequencies('val', map_reduce=False)
|
||||||
|
self.assertEqual(ot, {0: 1})
|
||||||
|
|
||||||
|
def test_item_frequencies_with_False_values(self):
|
||||||
|
class Test(Document):
|
||||||
|
val = BooleanField()
|
||||||
|
|
||||||
|
Test.drop_collection()
|
||||||
|
t = Test()
|
||||||
|
t.val = False
|
||||||
|
t.save()
|
||||||
|
|
||||||
|
ot = Test.objects.item_frequencies('val', map_reduce=True)
|
||||||
|
self.assertEqual(ot, {False: 1})
|
||||||
|
ot = Test.objects.item_frequencies('val', map_reduce=False)
|
||||||
|
self.assertEqual(ot, {False: 1})
|
||||||
|
|
||||||
|
def test_item_frequencies_normalize(self):
|
||||||
|
class Test(Document):
|
||||||
|
val = IntField()
|
||||||
|
|
||||||
|
Test.drop_collection()
|
||||||
|
|
||||||
|
for i in xrange(50):
|
||||||
|
Test(val=1).save()
|
||||||
|
|
||||||
|
for i in xrange(20):
|
||||||
|
Test(val=2).save()
|
||||||
|
|
||||||
|
freqs = Test.objects.item_frequencies('val', map_reduce=False, normalize=True)
|
||||||
|
self.assertEqual(freqs, {1: 50.0/70, 2: 20.0/70})
|
||||||
|
|
||||||
|
freqs = Test.objects.item_frequencies('val', map_reduce=True, normalize=True)
|
||||||
|
self.assertEqual(freqs, {1: 50.0/70, 2: 20.0/70})
|
||||||
|
|
||||||
def test_average(self):
|
def test_average(self):
|
||||||
"""Ensure that field can be averaged correctly.
|
"""Ensure that field can be averaged correctly.
|
||||||
@@ -2027,7 +2339,29 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
foo = Foo(bar=bar)
|
foo = Foo(bar=bar)
|
||||||
foo.save()
|
foo.save()
|
||||||
|
|
||||||
self.assertEquals(Foo.objects.distinct("bar"), [bar])
|
self.assertEqual(Foo.objects.distinct("bar"), [bar])
|
||||||
|
|
||||||
|
def test_distinct_handles_references_to_alias(self):
|
||||||
|
register_connection('testdb', 'mongoenginetest2')
|
||||||
|
|
||||||
|
class Foo(Document):
|
||||||
|
bar = ReferenceField("Bar")
|
||||||
|
meta = {'db_alias': 'testdb'}
|
||||||
|
|
||||||
|
class Bar(Document):
|
||||||
|
text = StringField()
|
||||||
|
meta = {'db_alias': 'testdb'}
|
||||||
|
|
||||||
|
Bar.drop_collection()
|
||||||
|
Foo.drop_collection()
|
||||||
|
|
||||||
|
bar = Bar(text="hi")
|
||||||
|
bar.save()
|
||||||
|
|
||||||
|
foo = Foo(bar=bar)
|
||||||
|
foo.save()
|
||||||
|
|
||||||
|
self.assertEqual(Foo.objects.distinct("bar"), [bar])
|
||||||
|
|
||||||
def test_custom_manager(self):
|
def test_custom_manager(self):
|
||||||
"""Ensure that custom QuerySetManager instances work as expected.
|
"""Ensure that custom QuerySetManager instances work as expected.
|
||||||
@@ -2038,28 +2372,29 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
date = DateTimeField(default=datetime.now)
|
date = DateTimeField(default=datetime.now)
|
||||||
|
|
||||||
@queryset_manager
|
@queryset_manager
|
||||||
def objects(doc_cls, queryset):
|
def objects(cls, qryset):
|
||||||
return queryset(deleted=False)
|
opts = {"deleted": False}
|
||||||
|
return qryset(**opts)
|
||||||
|
|
||||||
@queryset_manager
|
@queryset_manager
|
||||||
def music_posts(doc_cls, queryset):
|
def music_posts(doc_cls, queryset, deleted=False):
|
||||||
return queryset(tags='music', deleted=False).order_by('-date')
|
return queryset(tags='music',
|
||||||
|
deleted=deleted).order_by('date')
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
post1 = BlogPost(tags=['music', 'film'])
|
post1 = BlogPost(tags=['music', 'film']).save()
|
||||||
post1.save()
|
post2 = BlogPost(tags=['music']).save()
|
||||||
post2 = BlogPost(tags=['music'])
|
post3 = BlogPost(tags=['film', 'actors']).save()
|
||||||
post2.save()
|
post4 = BlogPost(tags=['film', 'actors', 'music'], deleted=True).save()
|
||||||
post3 = BlogPost(tags=['film', 'actors'])
|
|
||||||
post3.save()
|
|
||||||
post4 = BlogPost(tags=['film', 'actors'], deleted=True)
|
|
||||||
post4.save()
|
|
||||||
|
|
||||||
self.assertEqual([p.id for p in BlogPost.objects],
|
self.assertEqual([p.id for p in BlogPost.objects()],
|
||||||
[post1.id, post2.id, post3.id])
|
[post1.id, post2.id, post3.id])
|
||||||
self.assertEqual([p.id for p in BlogPost.music_posts],
|
self.assertEqual([p.id for p in BlogPost.music_posts()],
|
||||||
[post2.id, post1.id])
|
[post1.id, post2.id])
|
||||||
|
|
||||||
|
self.assertEqual([p.id for p in BlogPost.music_posts(True)],
|
||||||
|
[post4.id])
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
@@ -2209,6 +2544,24 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
|
def test_types_index_with_pk(self):
|
||||||
|
|
||||||
|
class Comment(EmbeddedDocument):
|
||||||
|
comment_id = IntField(required=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
class BlogPost(Document):
|
||||||
|
comments = EmbeddedDocumentField(Comment)
|
||||||
|
meta = {'indexes': [{'fields': ['pk', 'comments.comment_id'],
|
||||||
|
'unique': True}]}
|
||||||
|
except UnboundLocalError:
|
||||||
|
self.fail('Unbound local error at types index + pk definition')
|
||||||
|
|
||||||
|
info = BlogPost.objects._collection.index_information()
|
||||||
|
info = [value['key'] for key, value in info.iteritems()]
|
||||||
|
index_item = [(u'_types', 1), (u'_id', 1), (u'comments.comment_id', 1)]
|
||||||
|
self.assertTrue(index_item in info)
|
||||||
|
|
||||||
def test_dict_with_custom_baseclass(self):
|
def test_dict_with_custom_baseclass(self):
|
||||||
"""Ensure DictField working with custom base clases.
|
"""Ensure DictField working with custom base clases.
|
||||||
"""
|
"""
|
||||||
@@ -2486,8 +2839,8 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
Post().save()
|
Post().save()
|
||||||
Post(is_published=True).save()
|
Post(is_published=True).save()
|
||||||
self.assertEquals(Post.objects.count(), 2)
|
self.assertEqual(Post.objects.count(), 2)
|
||||||
self.assertEquals(Post.published.count(), 1)
|
self.assertEqual(Post.published.count(), 1)
|
||||||
|
|
||||||
Post.drop_collection()
|
Post.drop_collection()
|
||||||
|
|
||||||
@@ -2653,10 +3006,10 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
Number(n=3).save()
|
Number(n=3).save()
|
||||||
|
|
||||||
numbers = [n.n for n in Number.objects.order_by('-n')]
|
numbers = [n.n for n in Number.objects.order_by('-n')]
|
||||||
self.assertEquals([3, 2, 1], numbers)
|
self.assertEqual([3, 2, 1], numbers)
|
||||||
|
|
||||||
numbers = [n.n for n in Number.objects.order_by('+n')]
|
numbers = [n.n for n in Number.objects.order_by('+n')]
|
||||||
self.assertEquals([1, 2, 3], numbers)
|
self.assertEqual([1, 2, 3], numbers)
|
||||||
Number.drop_collection()
|
Number.drop_collection()
|
||||||
|
|
||||||
|
|
||||||
@@ -2899,6 +3252,19 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
self.assertEqual(plist[1], (20, False))
|
self.assertEqual(plist[1], (20, False))
|
||||||
self.assertEqual(plist[2], (30, True))
|
self.assertEqual(plist[2], (30, True))
|
||||||
|
|
||||||
|
def test_scalar_primary_key(self):
|
||||||
|
|
||||||
|
class SettingValue(Document):
|
||||||
|
key = StringField(primary_key=True)
|
||||||
|
value = StringField()
|
||||||
|
|
||||||
|
SettingValue.drop_collection()
|
||||||
|
s = SettingValue(key="test", value="test value")
|
||||||
|
s.save()
|
||||||
|
|
||||||
|
val = SettingValue.objects.scalar('key', 'value')
|
||||||
|
self.assertEqual(list(val), [('test', 'test value')])
|
||||||
|
|
||||||
def test_scalar_cursor_behaviour(self):
|
def test_scalar_cursor_behaviour(self):
|
||||||
"""Ensure that a query returns a valid set of results.
|
"""Ensure that a query returns a valid set of results.
|
||||||
"""
|
"""
|
||||||
@@ -2965,15 +3331,22 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
self.assertEqual(len(self.Person.objects.scalar('name')), 55)
|
self.assertEqual(len(self.Person.objects.scalar('name')), 55)
|
||||||
self.assertEqual("A0", "%s" % self.Person.objects.order_by('name').scalar('name').first())
|
self.assertEqual("A0", "%s" % self.Person.objects.order_by('name').scalar('name').first())
|
||||||
self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').order_by('name')[0])
|
self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').order_by('name')[0])
|
||||||
self.assertEqual("[u'A1', u'A2']", "%s" % self.Person.objects.order_by('age').scalar('name')[1:3])
|
if PY3:
|
||||||
self.assertEqual("[u'A51', u'A52']", "%s" % self.Person.objects.order_by('age').scalar('name')[51:53])
|
self.assertEqual("['A1', 'A2']", "%s" % self.Person.objects.order_by('age').scalar('name')[1:3])
|
||||||
|
self.assertEqual("['A51', 'A52']", "%s" % self.Person.objects.order_by('age').scalar('name')[51:53])
|
||||||
|
else:
|
||||||
|
self.assertEqual("[u'A1', u'A2']", "%s" % self.Person.objects.order_by('age').scalar('name')[1:3])
|
||||||
|
self.assertEqual("[u'A51', u'A52']", "%s" % self.Person.objects.order_by('age').scalar('name')[51:53])
|
||||||
|
|
||||||
# with_id and in_bulk
|
# with_id and in_bulk
|
||||||
person = self.Person.objects.order_by('name').first()
|
person = self.Person.objects.order_by('name').first()
|
||||||
self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').with_id(person.id))
|
self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').with_id(person.id))
|
||||||
|
|
||||||
pks = self.Person.objects.order_by('age').scalar('pk')[1:3]
|
pks = self.Person.objects.order_by('age').scalar('pk')[1:3]
|
||||||
self.assertEqual("[u'A1', u'A2']", "%s" % sorted(self.Person.objects.scalar('name').in_bulk(list(pks)).values()))
|
if PY3:
|
||||||
|
self.assertEqual("['A1', 'A2']", "%s" % sorted(self.Person.objects.scalar('name').in_bulk(list(pks)).values()))
|
||||||
|
else:
|
||||||
|
self.assertEqual("[u'A1', u'A2']", "%s" % sorted(self.Person.objects.scalar('name').in_bulk(list(pks)).values()))
|
||||||
|
|
||||||
|
|
||||||
class QTest(unittest.TestCase):
|
class QTest(unittest.TestCase):
|
33
tests/test_replicaset_connection.py
Normal file
33
tests/test_replicaset_connection.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import unittest
|
||||||
|
|
||||||
|
import pymongo
|
||||||
|
from pymongo import ReadPreference, ReplicaSetConnection
|
||||||
|
|
||||||
|
import mongoengine
|
||||||
|
from mongoengine import *
|
||||||
|
from mongoengine.connection import get_db, get_connection, ConnectionError
|
||||||
|
|
||||||
|
|
||||||
|
class ConnectionTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
mongoengine.connection._connection_settings = {}
|
||||||
|
mongoengine.connection._connections = {}
|
||||||
|
mongoengine.connection._dbs = {}
|
||||||
|
|
||||||
|
def test_replicaset_uri_passes_read_preference(self):
|
||||||
|
"""Requires a replica set called "rs" on port 27017
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
conn = connect(db='mongoenginetest', host="mongodb://localhost/mongoenginetest?replicaSet=rs", read_preference=ReadPreference.SECONDARY_ONLY)
|
||||||
|
except ConnectionError, e:
|
||||||
|
return
|
||||||
|
|
||||||
|
if not isinstance(conn, ReplicaSetConnection):
|
||||||
|
return
|
||||||
|
|
||||||
|
self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_ONLY)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
@@ -212,9 +212,9 @@ class SignalTests(unittest.TestCase):
|
|||||||
|
|
||||||
# The output of this signal is not entirely deterministic. The reloaded
|
# The output of this signal is not entirely deterministic. The reloaded
|
||||||
# object will have an object ID. Hence, we only check part of the output
|
# object will have an object ID. Hence, we only check part of the output
|
||||||
self.assertEquals(signal_output[3],
|
self.assertEqual(signal_output[3],
|
||||||
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]")
|
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]")
|
||||||
self.assertEquals(signal_output[-2:],
|
self.assertEqual(signal_output[-2:],
|
||||||
["post_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
["post_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||||
"Is loaded",])
|
"Is loaded",])
|
||||||
|
|
Reference in New Issue
Block a user