Compare commits
216 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
82446d641e | ||
|
9451c9f331 | ||
|
7564bbdee8 | ||
|
69251e5000 | ||
|
6ecdc7b59d | ||
|
b7d0d8f0cc | ||
|
df52ed1162 | ||
|
aa6370dd5d | ||
|
c272b7901f | ||
|
c61de6540a | ||
|
3c7bf50089 | ||
|
32fc4152a7 | ||
|
bdf7187d5c | ||
|
1639576203 | ||
|
ae20c785ea | ||
|
a2eb876f8c | ||
|
5a1eaa0a98 | ||
|
398fd4a548 | ||
|
44b9fb66e1 | ||
|
2afa2171f9 | ||
|
1d7ea71c0d | ||
|
2a391f0f16 | ||
|
e9b8093dac | ||
|
6a229cfbc5 | ||
|
3300f409ba | ||
|
4466005363 | ||
|
296ef5bddf | ||
|
1f2a432e82 | ||
|
855933ab2a | ||
|
ece8d25187 | ||
|
589a720162 | ||
|
a59b518cf2 | ||
|
a15352a4f8 | ||
|
df65f3fc3f | ||
|
734986c1b5 | ||
|
4a9ed5f2f2 | ||
|
088f229865 | ||
|
cb2cb851e2 | ||
|
d3962c4f7d | ||
|
0301135f96 | ||
|
f59aa922ea | ||
|
f60a49d6f6 | ||
|
9a190eb00d | ||
|
6bad4bd415 | ||
|
50d9b0b796 | ||
|
12f884e3ac | ||
|
02b1aa7355 | ||
|
90bfa608dd | ||
|
13f38b1c1d | ||
|
1afe7240f4 | ||
|
7a41155178 | ||
|
39a20ea471 | ||
|
d8855a4a0f | ||
|
de8da78042 | ||
|
318b42dff2 | ||
|
0018674b62 | ||
|
82913e8d69 | ||
|
0d867a108d | ||
|
5ee4b4a5ac | ||
|
62219d9648 | ||
|
6d9bfff19c | ||
|
7614b92197 | ||
|
7c1afd0031 | ||
|
ca7b2371fb | ||
|
ed5fba6b0f | ||
|
2b3b3bf652 | ||
|
11daf706df | ||
|
4a269eb2c4 | ||
|
9b3899476c | ||
|
febb3d7e3d | ||
|
83e3c5c7d8 | ||
|
3c271845c9 | ||
|
56c4292164 | ||
|
2531ade3bb | ||
|
3e2f035400 | ||
|
e7bcb5e366 | ||
|
112e921ce2 | ||
|
216f15602b | ||
|
fbe1901e65 | ||
|
8d2bc444bb | ||
|
cf4a45da11 | ||
|
be78209f94 | ||
|
45b5bf73fe | ||
|
84f9e44b6c | ||
|
700bc1b4bb | ||
|
beef2ede25 | ||
|
9bfc838029 | ||
|
e9d7353294 | ||
|
a6948771d8 | ||
|
403977cd49 | ||
|
153538cef9 | ||
|
9f1196e982 | ||
|
6419a8d09a | ||
|
769cee3d64 | ||
|
fc460b775e | ||
|
ba59e498de | ||
|
939bd2bb1f | ||
|
e231f71b4a | ||
|
d06c5f036b | ||
|
071562d755 | ||
|
391f659af1 | ||
|
8a44232bfc | ||
|
9188f9bf62 | ||
|
0187a0e113 | ||
|
beacfae400 | ||
|
fdc385ea33 | ||
|
8b97808931 | ||
|
179c4a10c8 | ||
|
6cef571bfb | ||
|
fbe8b28b2e | ||
|
a8d91a56bf | ||
|
8d7291506e | ||
|
d9005ac2fc | ||
|
c775c0a80c | ||
|
700e2cd93d | ||
|
083f00be84 | ||
|
d00859ecfd | ||
|
4e73566c11 | ||
|
208a467b24 | ||
|
e1bb453f32 | ||
|
4607b08be5 | ||
|
aa5c776f3d | ||
|
0075c0a1e8 | ||
|
83fff80b0f | ||
|
5e553ffaf7 | ||
|
6d185b7f7a | ||
|
e80144e9f2 | ||
|
fa4b820931 | ||
|
63c5a4dd65 | ||
|
34646a414c | ||
|
5aeee9deb2 | ||
|
4c1509a62a | ||
|
bfdaae944d | ||
|
4e44198bbd | ||
|
a4e8177b76 | ||
|
81bf5cb78b | ||
|
a9fc476fb8 | ||
|
26f0c06624 | ||
|
59bd72a888 | ||
|
7d808b483e | ||
|
3ee60affa9 | ||
|
558b8123b5 | ||
|
ecdf2ae5c7 | ||
|
aa9ed614ad | ||
|
1acdb880fc | ||
|
7cd22aaf83 | ||
|
5eb63cfa30 | ||
|
5dc998ed52 | ||
|
8074094568 | ||
|
56d1139d71 | ||
|
165cdc8840 | ||
|
c42aef74de | ||
|
634e1f661f | ||
|
a1db437c42 | ||
|
b8e2bdc99f | ||
|
52d4ea7d78 | ||
|
7db5335420 | ||
|
62480fe940 | ||
|
3d7b30da77 | ||
|
8e87648d53 | ||
|
f842c90007 | ||
|
7f2b686ab5 | ||
|
b09c52fc7e | ||
|
202d6e414f | ||
|
3d817f145c | ||
|
181e191fee | ||
|
79ecf027dd | ||
|
76d771d20f | ||
|
4d5f602ee7 | ||
|
452bbcc19b | ||
|
24b8650026 | ||
|
269e6e29d6 | ||
|
c4b0002ddb | ||
|
53598781b8 | ||
|
0624cdd6e4 | ||
|
5fb9d61d28 | ||
|
7b1860d17b | ||
|
8797565606 | ||
|
3d97c41fe9 | ||
|
5edfeb2e29 | ||
|
268908b3b2 | ||
|
fb70b47acb | ||
|
591149b1f0 | ||
|
9a0a0b1bd4 | ||
|
219d316b49 | ||
|
3aa2233b5d | ||
|
d59862ae6e | ||
|
0a03f9a31a | ||
|
dca135190a | ||
|
aedcf3dc81 | ||
|
6961a9494f | ||
|
6d70ef1a08 | ||
|
e1fc15875d | ||
|
94ae1388b1 | ||
|
17728d4e74 | ||
|
417aa743ca | ||
|
2f26f7a827 | ||
|
09f9c59b3d | ||
|
bec6805296 | ||
|
d99c7c20cc | ||
|
60b6ad3fcf | ||
|
9b4d0f6450 | ||
|
1a2c74391c | ||
|
08288e591c | ||
|
823cf421fa | ||
|
3799f27734 | ||
|
a7edd8602c | ||
|
c081aca794 | ||
|
2ca6648227 | ||
|
1af54f93f5 | ||
|
a9cacd2e06 | ||
|
f7fbb3d2f6 | ||
|
adb7bbeea0 | ||
|
89c44cd14e | ||
|
8105bfd8b3 | ||
|
b91db87ae0 |
5
.gitignore
vendored
5
.gitignore
vendored
@@ -1,7 +1,8 @@
|
||||
.*
|
||||
!.gitignore
|
||||
*.pyc
|
||||
.*.swp
|
||||
*~
|
||||
*.py[co]
|
||||
.*.sw[po]
|
||||
*.egg
|
||||
docs/.build
|
||||
docs/_build
|
||||
|
32
AUTHORS
32
AUTHORS
@@ -1,11 +1,11 @@
|
||||
The PRIMARY AUTHORS are (and/or have been):
|
||||
|
||||
Ross Lawley <ross.lawley@gmail.com>
|
||||
Harry Marr <harry@hmarr.com>
|
||||
Matt Dennewitz <mattdennewitz@gmail.com>
|
||||
Deepak Thukral <iapain@yahoo.com>
|
||||
Florian Schlachter <flori@n-schlachter.de>
|
||||
Steve Challis <steve@stevechallis.com>
|
||||
Ross Lawley <ross.lawley@gmail.com>
|
||||
Wilson Júnior <wilsonpjunior@gmail.com>
|
||||
Dan Crosta https://github.com/dcrosta
|
||||
|
||||
@@ -67,3 +67,33 @@ that much better:
|
||||
* Gareth Lloyd
|
||||
* Albert Choi
|
||||
* John Arnfield
|
||||
* grubberr
|
||||
* Paul Aliagas
|
||||
* Paul Cunnane
|
||||
* Julien Rebetez
|
||||
* Marc Tamlyn
|
||||
* Karim Allah
|
||||
* Adam Parrish
|
||||
* jpfarias
|
||||
* jonrscott
|
||||
* Alice Zoë Bevan-McGregor
|
||||
* Stephen Young
|
||||
* tkloc
|
||||
* aid
|
||||
* yamaneko1212
|
||||
* dave mankoff
|
||||
* Alexander G. Morano
|
||||
* jwilder
|
||||
* Joe Shaw
|
||||
* Adam Flynn
|
||||
* Ankhbayar
|
||||
* Jan Schrewe
|
||||
* David Koblas
|
||||
* Crittercism
|
||||
* Alvin Liang
|
||||
* andrewmlevy
|
||||
* Chris Faulkner
|
||||
* Ashwin Purohit
|
||||
* Shalabh Aggarwal
|
||||
* Chris Williams
|
||||
* Robert Kajic
|
||||
|
19
README.rst
19
README.rst
@@ -3,20 +3,21 @@ MongoEngine
|
||||
===========
|
||||
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
||||
:Author: Harry Marr (http://github.com/hmarr)
|
||||
:Maintainer: Ross Lawley (http://github.com/rozza)
|
||||
|
||||
About
|
||||
=====
|
||||
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
||||
Documentation available at http://hmarr.com/mongoengine/ - there is currently
|
||||
a `tutorial <http://hmarr.com/mongoengine/tutorial.html>`_, a `user guide
|
||||
<http://hmarr.com/mongoengine/userguide.html>`_ and an `API reference
|
||||
<http://hmarr.com/mongoengine/apireference.html>`_.
|
||||
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
||||
Documentation available at http://mongoengine-odm.rtfd.org - there is currently
|
||||
a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide
|
||||
<http://readthedocs.org/docs/mongoengine-odm/en/latest/userguide.html>`_ and an `API reference
|
||||
<http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_.
|
||||
|
||||
Installation
|
||||
============
|
||||
If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
||||
you can use ``easy_install -U mongoengine``. Otherwise, you can download the
|
||||
source from `GitHub <http://github.com/hmarr/mongoengine>`_ and run ``python
|
||||
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
|
||||
setup.py install``.
|
||||
|
||||
Dependencies
|
||||
@@ -84,14 +85,14 @@ the standard port, and run ``python setup.py test``.
|
||||
|
||||
Community
|
||||
=========
|
||||
- `MongoEngine Users mailing list
|
||||
- `MongoEngine Users mailing list
|
||||
<http://groups.google.com/group/mongoengine-users>`_
|
||||
- `MongoEngine Developers mailing list
|
||||
- `MongoEngine Developers mailing list
|
||||
<http://groups.google.com/group/mongoengine-dev>`_
|
||||
- `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_
|
||||
|
||||
Contributing
|
||||
============
|
||||
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ - to
|
||||
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to
|
||||
contribute to the project, fork it on GitHub and send a pull request, all
|
||||
contributions and suggestions are welcome!
|
||||
|
182
benchmark.py
Normal file
182
benchmark.py
Normal file
@@ -0,0 +1,182 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import timeit
|
||||
|
||||
|
||||
def cprofile_main():
|
||||
from pymongo import Connection
|
||||
connection = Connection()
|
||||
connection.drop_database('timeit_test')
|
||||
connection.disconnect()
|
||||
|
||||
from mongoengine import Document, DictField, connect
|
||||
connect("timeit_test")
|
||||
|
||||
class Noddy(Document):
|
||||
fields = DictField()
|
||||
|
||||
for i in xrange(1):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key" + str(j)] = "value " + str(j)
|
||||
noddy.save()
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
0.4 Performance Figures ...
|
||||
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo
|
||||
1.1141769886
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine
|
||||
2.37724113464
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||
1.92479610443
|
||||
|
||||
0.5.X
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo
|
||||
1.10552310944
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine
|
||||
16.5169169903
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||
14.9446101189
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||
14.912801981
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, force=True
|
||||
14.9617750645
|
||||
|
||||
Performance
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo
|
||||
1.10072994232
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine
|
||||
5.27341103554
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
||||
4.49365401268
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
||||
4.43459296227
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, force=True
|
||||
4.40114378929
|
||||
"""
|
||||
|
||||
setup = """
|
||||
from pymongo import Connection
|
||||
connection = Connection()
|
||||
connection.drop_database('timeit_test')
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
from pymongo import Connection
|
||||
connection = Connection()
|
||||
|
||||
db = connection.timeit_test
|
||||
noddy = db.noddy
|
||||
|
||||
for i in xrange(10000):
|
||||
example = {'fields': {}}
|
||||
for j in range(20):
|
||||
example['fields']["key"+str(j)] = "value "+str(j)
|
||||
|
||||
noddy.insert(example)
|
||||
|
||||
myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print "-" * 100
|
||||
print """Creating 10000 dictionaries - Pymongo"""
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print t.timeit(1)
|
||||
|
||||
setup = """
|
||||
from pymongo import Connection
|
||||
connection = Connection()
|
||||
connection.drop_database('timeit_test')
|
||||
connection.disconnect()
|
||||
|
||||
from mongoengine import Document, DictField, connect
|
||||
connect("timeit_test")
|
||||
|
||||
class Noddy(Document):
|
||||
fields = DictField()
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
for i in xrange(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save()
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print "-" * 100
|
||||
print """Creating 10000 dictionaries - MongoEngine"""
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print t.timeit(1)
|
||||
|
||||
stmt = """
|
||||
for i in xrange(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(safe=False, validate=False)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print "-" * 100
|
||||
print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False"""
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print t.timeit(1)
|
||||
|
||||
|
||||
stmt = """
|
||||
for i in xrange(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(safe=False, validate=False, cascade=False)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print "-" * 100
|
||||
print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False"""
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print t.timeit(1)
|
||||
|
||||
stmt = """
|
||||
for i in xrange(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(force_insert=True, safe=False, validate=False, cascade=False)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print "-" * 100
|
||||
print """Creating 10000 dictionaries - MongoEngine, force=True"""
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print t.timeit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@@ -6,6 +6,7 @@ Connecting
|
||||
==========
|
||||
|
||||
.. autofunction:: mongoengine.connect
|
||||
.. autofunction:: mongoengine.register_connection
|
||||
|
||||
Documents
|
||||
=========
|
||||
@@ -15,12 +16,18 @@ Documents
|
||||
|
||||
.. attribute:: objects
|
||||
|
||||
A :class:`~mongoengine.queryset.QuerySet` object that is created lazily
|
||||
A :class:`~mongoengine.queryset.QuerySet` object that is created lazily
|
||||
on access.
|
||||
|
||||
.. autoclass:: mongoengine.EmbeddedDocument
|
||||
:members:
|
||||
|
||||
|
||||
.. autoclass:: mongoengine.DynamicDocument
|
||||
:members:
|
||||
|
||||
.. autoclass:: mongoengine.DynamicEmbeddedDocument
|
||||
:members:
|
||||
|
||||
.. autoclass:: mongoengine.document.MapReduceDocument
|
||||
:members:
|
||||
|
||||
@@ -31,46 +38,31 @@ Querying
|
||||
:members:
|
||||
|
||||
.. automethod:: mongoengine.queryset.QuerySet.__call__
|
||||
|
||||
|
||||
.. autofunction:: mongoengine.queryset.queryset_manager
|
||||
|
||||
Fields
|
||||
======
|
||||
|
||||
.. autoclass:: mongoengine.StringField
|
||||
|
||||
.. autoclass:: mongoengine.URLField
|
||||
|
||||
.. autoclass:: mongoengine.EmailField
|
||||
|
||||
.. autoclass:: mongoengine.IntField
|
||||
|
||||
.. autoclass:: mongoengine.FloatField
|
||||
|
||||
.. autoclass:: mongoengine.DecimalField
|
||||
|
||||
.. autoclass:: mongoengine.BooleanField
|
||||
|
||||
.. autoclass:: mongoengine.DateTimeField
|
||||
|
||||
.. autoclass:: mongoengine.ComplexDateTimeField
|
||||
|
||||
.. autoclass:: mongoengine.EmbeddedDocumentField
|
||||
|
||||
.. autoclass:: mongoengine.DictField
|
||||
|
||||
.. autoclass:: mongoengine.ListField
|
||||
|
||||
.. autoclass:: mongoengine.SortedListField
|
||||
|
||||
.. autoclass:: mongoengine.BinaryField
|
||||
|
||||
.. autoclass:: mongoengine.DictField
|
||||
.. autoclass:: mongoengine.MapField
|
||||
.. autoclass:: mongoengine.ObjectIdField
|
||||
|
||||
.. autoclass:: mongoengine.ReferenceField
|
||||
|
||||
.. autoclass:: mongoengine.GenericReferenceField
|
||||
|
||||
.. autoclass:: mongoengine.EmbeddedDocumentField
|
||||
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
|
||||
.. autoclass:: mongoengine.BooleanField
|
||||
.. autoclass:: mongoengine.FileField
|
||||
|
||||
.. autoclass:: mongoengine.BinaryField
|
||||
.. autoclass:: mongoengine.GeoPointField
|
||||
.. autoclass:: mongoengine.SequenceField
|
||||
|
@@ -2,9 +2,81 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
Changes in dev
|
||||
Changes in 0.6
|
||||
==============
|
||||
|
||||
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
|
||||
- Added support for covered indexes when inheritance is off
|
||||
- No longer always upsert on save for items with a '_id'
|
||||
- Error raised if update doesn't have an operation
|
||||
- DeReferencing is now thread safe
|
||||
- Errors raised if trying to perform a join in a query
|
||||
- Updates can now take __raw__ queries
|
||||
- Added custom 2D index declarations
|
||||
- Added replicaSet connection support
|
||||
- Updated deprecated imports from pymongo (safe for pymongo 2.2)
|
||||
- Added uri support for connections
|
||||
- Added scalar for efficiently returning partial data values (aliased to values_list)
|
||||
- Fixed limit skip bug
|
||||
- Improved Inheritance / Mixin
|
||||
- Added sharding support
|
||||
- Added pymongo 2.1 support
|
||||
- Fixed Abstract documents can now declare indexes
|
||||
- Added db_alias support to individual documents
|
||||
- Fixed GridFS documents can now be pickled
|
||||
- Added Now raises an InvalidDocumentError when declaring multiple fields with the same db_field
|
||||
- Added InvalidQueryError when calling with_id with a filter
|
||||
- Added support for DBRefs in distinct()
|
||||
- Fixed issue saving False booleans
|
||||
- Fixed issue with dynamic documents deltas
|
||||
- Added Reverse Delete Rule support to ListFields - MapFields aren't supported
|
||||
- Added customisable cascade kwarg options
|
||||
- Fixed Handle None values for non-required fields
|
||||
- Removed Document._get_subclasses() - no longer required
|
||||
- Fixed bug requiring subclasses when not actually needed
|
||||
- Fixed deletion of dynamic data
|
||||
- Added support for the $elementMatch operator
|
||||
- Added reverse option to SortedListFields
|
||||
- Fixed dereferencing - multi directional list dereferencing
|
||||
- Fixed issue creating indexes with recursive embedded documents
|
||||
- Fixed recursive lookup in _unique_with_indexes
|
||||
- Fixed passing ComplexField defaults to constructor for ReferenceFields
|
||||
- Fixed validation of DictField Int keys
|
||||
- Added optional cascade saving
|
||||
- Fixed dereferencing - max_depth now taken into account
|
||||
- Fixed document mutation saving issue
|
||||
- Fixed positional operator when replacing embedded documents
|
||||
- Added Non-Django Style choices back (you can have either)
|
||||
- Fixed __repr__ of a sliced queryset
|
||||
- Added recursive validation error of documents / complex fields
|
||||
- Fixed breaking during queryset iteration
|
||||
- Added pre and post bulk-insert signals
|
||||
- Added ImageField - requires PIL
|
||||
- Fixed Reference Fields can be None in get_or_create / queries
|
||||
- Fixed accessing pk on an embedded document
|
||||
- Fixed calling a queryset after drop_collection now recreates the collection
|
||||
- Add field name to validation exception messages
|
||||
- Added UUID field
|
||||
- Improved efficiency of .get()
|
||||
- Updated ComplexFields so if required they won't accept empty lists / dicts
|
||||
- Added spec file for rpm-based distributions
|
||||
- Fixed ListField so it doesnt accept strings
|
||||
- Added DynamicDocument and EmbeddedDynamicDocument classes for expando schemas
|
||||
|
||||
Changes in v0.5.2
|
||||
=================
|
||||
|
||||
- A Robust Circular reference bugfix
|
||||
|
||||
|
||||
Changes in v0.5.1
|
||||
=================
|
||||
|
||||
- Fixed simple circular reference bug
|
||||
|
||||
Changes in v0.5
|
||||
===============
|
||||
|
||||
- Added InvalidDocumentError - so Document core methods can't be overwritten
|
||||
- Added GenericEmbeddedDocument - so you can embed any type of embeddable document
|
||||
- Added within_polygon support - for those with mongodb 1.9
|
||||
|
@@ -38,7 +38,7 @@ master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'MongoEngine'
|
||||
copyright = u'2009-2011, Harry Marr'
|
||||
copyright = u'2009-2012, MongoEngine Authors'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
@@ -121,7 +121,7 @@ html_theme_path = ['_themes']
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
#html_static_path = ['_static']
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
|
@@ -3,6 +3,7 @@
|
||||
=====================
|
||||
Connecting to MongoDB
|
||||
=====================
|
||||
|
||||
To connect to a running instance of :program:`mongod`, use the
|
||||
:func:`~mongoengine.connect` function. The first argument is the name of the
|
||||
database to connect to. If the database does not exist, it will be created. If
|
||||
@@ -18,3 +19,42 @@ provide :attr:`host` and :attr:`port` arguments to
|
||||
:func:`~mongoengine.connect`::
|
||||
|
||||
connect('project1', host='192.168.1.35', port=12345)
|
||||
|
||||
Uri style connections are also supported as long as you include the database
|
||||
name - just supply the uri as the :attr:`host` to
|
||||
:func:`~mongoengine.connect`::
|
||||
|
||||
connect('project1', host='mongodb://localhost/database_name')
|
||||
|
||||
|
||||
|
||||
Multiple Databases
|
||||
==================
|
||||
|
||||
Multiple database support was added in MongoEngine 0.6. To use multiple
|
||||
databases you can use :func:`~mongoengine.connect` and provide an `alias` name
|
||||
for the connection - if no `alias` is provided then "default" is used.
|
||||
|
||||
In the background this uses :func:`~mongoengine.register_connection` to
|
||||
store the data and you can register all aliases up front if required.
|
||||
|
||||
Individual documents can also support multiple databases by providing a
|
||||
`db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` objects
|
||||
to point across databases and collections. Below is an example schema, using
|
||||
3 different databases to store data::
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
meta = {"db_alias": "user-db"}
|
||||
|
||||
class Book(Document):
|
||||
name = StringField()
|
||||
|
||||
meta = {"db_alias": "book-db"}
|
||||
|
||||
class AuthorBooks(Document):
|
||||
author = ReferenceField(User)
|
||||
book = ReferenceField(Book)
|
||||
|
||||
meta = {"db_alias": "users-books-db"}
|
||||
|
@@ -24,6 +24,34 @@ objects** as class attributes to the document class::
|
||||
title = StringField(max_length=200, required=True)
|
||||
date_modified = DateTimeField(default=datetime.datetime.now)
|
||||
|
||||
Dynamic document schemas
|
||||
========================
|
||||
One of the benefits of MongoDb is dynamic schemas for a collection, whilst data
|
||||
should be planned and organised (after all explicit is better than implicit!)
|
||||
there are scenarios where having dynamic / expando style documents is desirable.
|
||||
|
||||
:class:`~mongoengine.DynamicDocument` documents work in the same way as
|
||||
:class:`~mongoengine.Document` but any data / attributes set to them will also
|
||||
be saved ::
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
class Page(DynamicDocument):
|
||||
title = StringField(max_length=200, required=True)
|
||||
|
||||
# Create a new page and add tags
|
||||
>>> page = Page(title='Using MongoEngine')
|
||||
>>> page.tags = ['mongodb', 'mongoengine']
|
||||
>>> page.save()
|
||||
|
||||
>>> Page.objects(tags='mongoengine').count()
|
||||
>>> 1
|
||||
|
||||
..note::
|
||||
|
||||
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
||||
|
||||
|
||||
Fields
|
||||
======
|
||||
By default, fields are not required. To make a field mandatory, set the
|
||||
@@ -107,12 +135,33 @@ arguments can be set on all fields:
|
||||
When True, use this field as a primary key for the collection.
|
||||
|
||||
:attr:`choices` (Default: None)
|
||||
An iterable of choices to which the value of this field should be limited.
|
||||
An iterable (e.g. a list or tuple) of choices to which the value of this
|
||||
field should be limited.
|
||||
|
||||
Can be either be a nested tuples of value (stored in mongo) and a
|
||||
human readable key ::
|
||||
|
||||
SIZE = (('S', 'Small'),
|
||||
('M', 'Medium'),
|
||||
('L', 'Large'),
|
||||
('XL', 'Extra Large'),
|
||||
('XXL', 'Extra Extra Large'))
|
||||
|
||||
|
||||
class Shirt(Document):
|
||||
size = StringField(max_length=3, choices=SIZE)
|
||||
|
||||
Or a flat iterable just containing values ::
|
||||
|
||||
SIZE = ('S', 'M', 'L', 'XL', 'XXL')
|
||||
|
||||
class Shirt(Document):
|
||||
size = StringField(max_length=3, choices=SIZE)
|
||||
|
||||
:attr:`help_text` (Default: None)
|
||||
Optional help text to output with the field - used by form libraries
|
||||
|
||||
:attr:`verbose` (Default: None)
|
||||
:attr:`verbose_name` (Default: None)
|
||||
Optional human-readable name for the field - used by form libraries
|
||||
|
||||
|
||||
@@ -382,10 +431,31 @@ If a dictionary is passed then the following options are available:
|
||||
:attr:`unique` (Default: False)
|
||||
Whether the index should be sparse.
|
||||
|
||||
.. note::
|
||||
.. warning::
|
||||
|
||||
Geospatial indexes will be automatically created for all
|
||||
:class:`~mongoengine.GeoPointField`\ s
|
||||
|
||||
Inheritance adds extra indices.
|
||||
If don't need inheritance for a document turn inheritance off - see :ref:`document-inheritance`.
|
||||
|
||||
|
||||
Geospatial indexes
|
||||
---------------------------
|
||||
Geospatial indexes will be automatically created for all
|
||||
:class:`~mongoengine.GeoPointField`\ s
|
||||
|
||||
It is also possible to explicitly define geospatial indexes. This is
|
||||
useful if you need to define a geospatial index on a subfield of a
|
||||
:class:`~mongoengine.DictField` or a custom field that contains a
|
||||
point. To create a geospatial index you must prefix the field with the
|
||||
***** sign. ::
|
||||
|
||||
class Place(Document):
|
||||
location = DictField()
|
||||
meta = {
|
||||
'indexes': [
|
||||
'*location.point',
|
||||
],
|
||||
}
|
||||
|
||||
Ordering
|
||||
========
|
||||
@@ -427,8 +497,31 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. ::
|
||||
first_post = BlogPost.objects.order_by("+published_date").first()
|
||||
assert first_post.title == "Blog Post #1"
|
||||
|
||||
Shard keys
|
||||
==========
|
||||
|
||||
If your collection is sharded, then you need to specify the shard key as a tuple,
|
||||
using the :attr:`shard_key` attribute of :attr:`-mongoengine.Document.meta`.
|
||||
This ensures that the shard key is sent with the query when calling the
|
||||
:meth:`~mongoengine.document.Document.save` or
|
||||
:meth:`~mongoengine.document.Document.update` method on an existing
|
||||
:class:`-mongoengine.Document` instance::
|
||||
|
||||
class LogEntry(Document):
|
||||
machine = StringField()
|
||||
app = StringField()
|
||||
timestamp = DateTimeField()
|
||||
data = StringField()
|
||||
|
||||
meta = {
|
||||
'shard_key': ('machine', 'timestamp',)
|
||||
}
|
||||
|
||||
.. _document-inheritance:
|
||||
|
||||
Document inheritance
|
||||
====================
|
||||
|
||||
To create a specialised type of a :class:`~mongoengine.Document` you have
|
||||
defined, you may subclass it and add any extra fields or methods you may need.
|
||||
As this is new class is not a direct subclass of
|
||||
@@ -440,10 +533,15 @@ convenient and efficient retrieval of related documents::
|
||||
class Page(Document):
|
||||
title = StringField(max_length=200, required=True)
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
# Also stored in the collection named 'page'
|
||||
class DatedPage(Page):
|
||||
date = DateTimeField()
|
||||
|
||||
.. note:: From 0.7 onwards you must declare `allow_inheritance` in the document meta.
|
||||
|
||||
|
||||
Working with existing data
|
||||
--------------------------
|
||||
To enable correct retrieval of documents involved in this kind of heirarchy,
|
||||
|
@@ -35,13 +35,23 @@ already exist, then any changes will be updated atomically. For example::
|
||||
* ``list_field.pop(0)`` - *sets* the resulting list
|
||||
* ``del(list_field)`` - *unsets* whole list
|
||||
|
||||
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
|
||||
Note that this will only work if the document exists in the database and has a
|
||||
valide :attr:`id`.
|
||||
|
||||
.. seealso::
|
||||
:ref:`guide-atomic-updates`
|
||||
|
||||
Cascading Saves
|
||||
---------------
|
||||
If your document contains :class:`~mongoengine.ReferenceField` or
|
||||
:class:`~mongoengine.GenericReferenceField` objects, then by default the
|
||||
:meth:`~mongoengine.Document.save` method will automatically save any changes to
|
||||
those objects as well. If this is not desired passing :attr:`cascade` as False
|
||||
to the save method turns this feature off.
|
||||
|
||||
Deleting documents
|
||||
------------------
|
||||
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
|
||||
Note that this will only work if the document exists in the database and has a
|
||||
valid :attr:`id`.
|
||||
|
||||
Document IDs
|
||||
============
|
||||
Each document in the database has a unique id. This may be accessed through the
|
||||
|
@@ -76,6 +76,7 @@ expressions:
|
||||
* ``istartswith`` -- string field starts with value (case insensitive)
|
||||
* ``endswith`` -- string field ends with value
|
||||
* ``iendswith`` -- string field ends with value (case insensitive)
|
||||
* ``match`` -- performs an $elemMatch so you can match an entire document within an array
|
||||
|
||||
There are a few special operators for performing geographical queries, that
|
||||
may used with :class:`~mongoengine.GeoPointField`\ s:
|
||||
@@ -194,22 +195,6 @@ to be created::
|
||||
>>> a.name == b.name and a.age == b.age
|
||||
True
|
||||
|
||||
Dereferencing results
|
||||
---------------------
|
||||
When iterating the results of :class:`~mongoengine.ListField` or
|
||||
:class:`~mongoengine.DictField` we automatically dereference any
|
||||
:class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the
|
||||
number the queries to mongo.
|
||||
|
||||
There are times when that efficiency is not enough, documents that have
|
||||
:class:`~mongoengine.ReferenceField` objects or
|
||||
:class:`~mongoengine.GenericReferenceField` objects at the top level are
|
||||
expensive as the number of queries to MongoDB can quickly rise.
|
||||
|
||||
To limit the number of queries use
|
||||
:func:`~mongoengine.queryset.QuerySet.select_related` which converts the
|
||||
QuerySet to a list and dereferences as efficiently as possible.
|
||||
|
||||
Default Document queries
|
||||
========================
|
||||
By default, the objects :attr:`~mongoengine.Document.objects` attribute on a
|
||||
@@ -312,8 +297,16 @@ would be generating "tag-clouds"::
|
||||
from operator import itemgetter
|
||||
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
|
||||
|
||||
|
||||
Query efficiency and performance
|
||||
================================
|
||||
|
||||
There are a couple of methods to improve efficiency when querying, reducing the
|
||||
information returned by the query or efficient dereferencing .
|
||||
|
||||
Retrieving a subset of fields
|
||||
=============================
|
||||
-----------------------------
|
||||
|
||||
Sometimes a subset of fields on a :class:`~mongoengine.Document` is required,
|
||||
and for efficiency only these should be retrieved from the database. This issue
|
||||
is especially important for MongoDB, as fields may often be extremely large
|
||||
@@ -346,6 +339,27 @@ will be given::
|
||||
If you later need the missing fields, just call
|
||||
:meth:`~mongoengine.Document.reload` on your document.
|
||||
|
||||
Getting related data
|
||||
--------------------
|
||||
|
||||
When iterating the results of :class:`~mongoengine.ListField` or
|
||||
:class:`~mongoengine.DictField` we automatically dereference any
|
||||
:class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the
|
||||
number the queries to mongo.
|
||||
|
||||
There are times when that efficiency is not enough, documents that have
|
||||
:class:`~mongoengine.ReferenceField` objects or
|
||||
:class:`~mongoengine.GenericReferenceField` objects at the top level are
|
||||
expensive as the number of queries to MongoDB can quickly rise.
|
||||
|
||||
To limit the number of queries use
|
||||
:func:`~mongoengine.queryset.QuerySet.select_related` which converts the
|
||||
QuerySet to a list and dereferences as efficiently as possible. By default
|
||||
:func:`~mongoengine.queryset.QuerySet.select_related` only dereferences any
|
||||
references to the depth of 1 level. If you have more complicated documents and
|
||||
want to dereference more of the object at once then increasing the :attr:`max_depth`
|
||||
will dereference more levels of the document.
|
||||
|
||||
Advanced queries
|
||||
================
|
||||
Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword
|
||||
|
@@ -9,7 +9,11 @@ Signal support is provided by the excellent `blinker`_ library and
|
||||
will gracefully fall back if it is not available.
|
||||
|
||||
|
||||
The following document signals exist in MongoEngine and are pretty self explaintary:
|
||||
<<<<<<< HEAD
|
||||
The following document signals exist in MongoEngine and are pretty self explanatory:
|
||||
=======
|
||||
The following document signals exist in MongoEngine and are pretty self-explanatory:
|
||||
>>>>>>> master
|
||||
|
||||
* `mongoengine.signals.pre_init`
|
||||
* `mongoengine.signals.post_init`
|
||||
@@ -17,6 +21,8 @@ The following document signals exist in MongoEngine and are pretty self explaint
|
||||
* `mongoengine.signals.post_save`
|
||||
* `mongoengine.signals.pre_delete`
|
||||
* `mongoengine.signals.post_delete`
|
||||
* `mongoengine.signals.pre_bulk_insert`
|
||||
* `mongoengine.signals.post_bulk_insert`
|
||||
|
||||
Example usage::
|
||||
|
||||
@@ -42,8 +48,8 @@ Example usage::
|
||||
else:
|
||||
logging.debug("Updated")
|
||||
|
||||
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||
signals.post_save.connect(Author.post_save, sender=Author)
|
||||
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||
signals.post_save.connect(Author.post_save, sender=Author)
|
||||
|
||||
|
||||
.. _blinker: http://pypi.python.org/pypi/blinker
|
||||
|
@@ -18,6 +18,9 @@ MongoDB. To install it, simply run
|
||||
:doc:`apireference`
|
||||
The complete API documentation.
|
||||
|
||||
:doc:`upgrade`
|
||||
How to upgrade MongoEngine.
|
||||
|
||||
:doc:`django`
|
||||
Using MongoEngine and Django
|
||||
|
||||
@@ -42,7 +45,8 @@ Also, you can join the developers' `mailing list
|
||||
|
||||
Changes
|
||||
-------
|
||||
See the :doc:`changelog` for a full list of changes to MongoEngine.
|
||||
See the :doc:`changelog` for a full list of changes to MongoEngine and
|
||||
:doc:`upgrade` for upgrade information.
|
||||
|
||||
.. toctree::
|
||||
:hidden:
|
||||
|
@@ -167,6 +167,11 @@ To delete all the posts if a user is deleted set the rule::
|
||||
|
||||
See :class:`~mongoengine.ReferenceField` for more information.
|
||||
|
||||
..note::
|
||||
MapFields and DictFields currently don't support automatic handling of
|
||||
deleted references
|
||||
|
||||
|
||||
Adding data to our Tumblelog
|
||||
============================
|
||||
Now that we've defined how our documents will be structured, let's start adding
|
||||
|
@@ -2,6 +2,22 @@
|
||||
Upgrading
|
||||
=========
|
||||
|
||||
0.5 to 0.6
|
||||
==========
|
||||
|
||||
Embedded Documents - if you had a `pk` field you will have to rename it from `_id`
|
||||
to `pk` as pk is no longer a property of Embedded Documents.
|
||||
|
||||
Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw
|
||||
an InvalidDocument error as they aren't currently supported.
|
||||
|
||||
Document._get_subclasses - Is no longer used and the class method has been removed.
|
||||
|
||||
Document.objects.with_id - now raises an InvalidQueryError if used with a filter.
|
||||
|
||||
FutureWarning - A future warning has been added to all inherited classes that
|
||||
don't define `allow_inheritance` in their meta.
|
||||
|
||||
0.4 to 0.5
|
||||
===========
|
||||
|
||||
@@ -9,7 +25,7 @@ There have been the following backwards incompatibilities from 0.4 to 0.5. The
|
||||
main areas of changed are: choices in fields, map_reduce and collection names.
|
||||
|
||||
Choice options:
|
||||
--------------
|
||||
---------------
|
||||
|
||||
Are now expected to be an iterable of tuples, with the first element in each
|
||||
tuple being the actual value to be stored. The second element is the
|
||||
|
@@ -12,9 +12,7 @@ from signals import *
|
||||
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
||||
queryset.__all__ + signals.__all__)
|
||||
|
||||
__author__ = 'Harry Marr'
|
||||
|
||||
VERSION = (0, 5, 0)
|
||||
VERSION = (0, 6, 0)
|
||||
|
||||
|
||||
def get_version():
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,82 +1,156 @@
|
||||
from pymongo import Connection
|
||||
import multiprocessing
|
||||
import threading
|
||||
|
||||
__all__ = ['ConnectionError', 'connect']
|
||||
import pymongo
|
||||
from pymongo import Connection, ReplicaSetConnection, uri_parser
|
||||
|
||||
|
||||
_connection_defaults = {
|
||||
'host': 'localhost',
|
||||
'port': 27017,
|
||||
}
|
||||
_connection = {}
|
||||
_connection_settings = _connection_defaults.copy()
|
||||
__all__ = ['ConnectionError', 'connect', 'register_connection',
|
||||
'DEFAULT_CONNECTION_NAME']
|
||||
|
||||
_db_name = None
|
||||
_db_username = None
|
||||
_db_password = None
|
||||
_db = {}
|
||||
|
||||
DEFAULT_CONNECTION_NAME = 'default'
|
||||
|
||||
|
||||
class ConnectionError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _get_connection(reconnect=False):
|
||||
"""Handles the connection to the database
|
||||
_connection_settings = {}
|
||||
_connections = {}
|
||||
_dbs = {}
|
||||
|
||||
|
||||
def register_connection(alias, name, host='localhost', port=27017,
|
||||
is_slave=False, read_preference=False, slaves=None,
|
||||
username=None, password=None, **kwargs):
|
||||
"""Add a connection.
|
||||
|
||||
:param alias: the name that will be used to refer to this connection
|
||||
throughout MongoEngine
|
||||
:param name: the name of the specific database to use
|
||||
:param host: the host name of the :program:`mongod` instance to connect to
|
||||
:param port: the port that the :program:`mongod` instance is running on
|
||||
:param is_slave: whether the connection can act as a slave ** Depreciated pymongo 2.0.1+
|
||||
:param read_preference: The read preference for the collection ** Added pymongo 2.1
|
||||
:param slaves: a list of aliases of slave connections; each of these must
|
||||
be a registered connection that has :attr:`is_slave` set to ``True``
|
||||
:param username: username to authenticate with
|
||||
:param password: password to authenticate with
|
||||
:param kwargs: allow ad-hoc parameters to be passed into the pymongo driver
|
||||
|
||||
"""
|
||||
global _connection
|
||||
identity = get_identity()
|
||||
global _connection_settings
|
||||
|
||||
# Handle uri style connections
|
||||
if "://" in host:
|
||||
uri_dict = uri_parser.parse_uri(host)
|
||||
if uri_dict.get('database') is None:
|
||||
raise ConnectionError("If using URI style connection include "\
|
||||
"database name in string")
|
||||
_connection_settings[alias] = {
|
||||
'host': host,
|
||||
'name': uri_dict.get('database'),
|
||||
'username': uri_dict.get('username'),
|
||||
'password': uri_dict.get('password')
|
||||
}
|
||||
_connection_settings[alias].update(kwargs)
|
||||
return
|
||||
|
||||
_connection_settings[alias] = {
|
||||
'name': name,
|
||||
'host': host,
|
||||
'port': port,
|
||||
'is_slave': is_slave,
|
||||
'slaves': slaves or [],
|
||||
'username': username,
|
||||
'password': password,
|
||||
'read_preference': read_preference
|
||||
}
|
||||
_connection_settings[alias].update(kwargs)
|
||||
|
||||
|
||||
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
||||
global _connections
|
||||
global _dbs
|
||||
|
||||
if alias in _connections:
|
||||
get_connection(alias=alias).disconnect()
|
||||
del _connections[alias]
|
||||
if alias in _dbs:
|
||||
del _dbs[alias]
|
||||
|
||||
|
||||
def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
global _connections
|
||||
# Connect to the database if not already connected
|
||||
if _connection.get(identity) is None or reconnect:
|
||||
if reconnect:
|
||||
disconnect(alias)
|
||||
|
||||
if alias not in _connections:
|
||||
if alias not in _connection_settings:
|
||||
msg = 'Connection with alias "%s" has not been defined'
|
||||
if alias == DEFAULT_CONNECTION_NAME:
|
||||
msg = 'You have not defined a default connection'
|
||||
raise ConnectionError(msg)
|
||||
conn_settings = _connection_settings[alias].copy()
|
||||
|
||||
if hasattr(pymongo, 'version_tuple'): # Support for 2.1+
|
||||
conn_settings.pop('name', None)
|
||||
conn_settings.pop('slaves', None)
|
||||
conn_settings.pop('is_slave', None)
|
||||
conn_settings.pop('username', None)
|
||||
conn_settings.pop('password', None)
|
||||
else:
|
||||
# Get all the slave connections
|
||||
if 'slaves' in conn_settings:
|
||||
slaves = []
|
||||
for slave_alias in conn_settings['slaves']:
|
||||
slaves.append(get_connection(slave_alias))
|
||||
conn_settings['slaves'] = slaves
|
||||
conn_settings.pop('read_preference')
|
||||
|
||||
connection_class = Connection
|
||||
if 'replicaSet' in conn_settings:
|
||||
connection_class = ReplicaSetConnection
|
||||
try:
|
||||
_connection[identity] = Connection(**_connection_settings)
|
||||
_connections[alias] = connection_class(**conn_settings)
|
||||
except Exception, e:
|
||||
raise ConnectionError("Cannot connect to the database:\n%s" % e)
|
||||
return _connection[identity]
|
||||
raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e))
|
||||
return _connections[alias]
|
||||
|
||||
def _get_db(reconnect=False):
|
||||
"""Handles database connections and authentication based on the current
|
||||
identity
|
||||
|
||||
def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
global _dbs
|
||||
if reconnect:
|
||||
disconnect(alias)
|
||||
|
||||
if alias not in _dbs:
|
||||
conn = get_connection(alias)
|
||||
conn_settings = _connection_settings[alias]
|
||||
_dbs[alias] = conn[conn_settings['name']]
|
||||
# Authenticate if necessary
|
||||
if conn_settings['username'] and conn_settings['password']:
|
||||
_dbs[alias].authenticate(conn_settings['username'],
|
||||
conn_settings['password'])
|
||||
return _dbs[alias]
|
||||
|
||||
|
||||
def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||
"""Connect to the database specified by the 'db' argument.
|
||||
|
||||
Connection settings may be provided here as well if the database is not
|
||||
running on the default port on localhost. If authentication is needed,
|
||||
provide username and password arguments as well.
|
||||
|
||||
Multiple databases are supported by using aliases. Provide a separate
|
||||
`alias` to connect to a different instance of :program:`mongod`.
|
||||
|
||||
.. versionchanged:: 0.6 - added multiple database support.
|
||||
"""
|
||||
global _db, _connection
|
||||
identity = get_identity()
|
||||
# Connect if not already connected
|
||||
if _connection.get(identity) is None or reconnect:
|
||||
_connection[identity] = _get_connection(reconnect=reconnect)
|
||||
global _connections
|
||||
if alias not in _connections:
|
||||
register_connection(alias, db, **kwargs)
|
||||
|
||||
if _db.get(identity) is None or reconnect:
|
||||
# _db_name will be None if the user hasn't called connect()
|
||||
if _db_name is None:
|
||||
raise ConnectionError('Not connected to the database')
|
||||
|
||||
# Get DB from current connection and authenticate if necessary
|
||||
_db[identity] = _connection[identity][_db_name]
|
||||
if _db_username and _db_password:
|
||||
_db[identity].authenticate(_db_username, _db_password)
|
||||
|
||||
return _db[identity]
|
||||
|
||||
def get_identity():
|
||||
"""Creates an identity key based on the current process and thread
|
||||
identity.
|
||||
"""
|
||||
identity = multiprocessing.current_process()._identity
|
||||
identity = 0 if not identity else identity[0]
|
||||
|
||||
identity = (identity, threading.current_thread().ident)
|
||||
return identity
|
||||
|
||||
def connect(db, username=None, password=None, **kwargs):
|
||||
"""Connect to the database specified by the 'db' argument. Connection
|
||||
settings may be provided here as well if the database is not running on
|
||||
the default port on localhost. If authentication is needed, provide
|
||||
username and password arguments as well.
|
||||
"""
|
||||
global _connection_settings, _db_name, _db_username, _db_password, _db
|
||||
_connection_settings = dict(_connection_defaults, **kwargs)
|
||||
_db_name = db
|
||||
_db_username = username
|
||||
_db_password = password
|
||||
return _get_db(reconnect=True)
|
||||
return get_connection(alias)
|
||||
|
||||
# Support old naming convention
|
||||
_get_connection = get_connection
|
||||
_get_db = get_db
|
||||
|
@@ -1,17 +1,15 @@
|
||||
import operator
|
||||
from bson import DBRef, SON
|
||||
|
||||
import pymongo
|
||||
|
||||
from base import BaseDict, BaseList, get_document, TopLevelDocumentMetaclass
|
||||
from fields import ReferenceField
|
||||
from connection import _get_db
|
||||
from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document)
|
||||
from fields import (ReferenceField, ListField, DictField, MapField)
|
||||
from connection import get_db
|
||||
from queryset import QuerySet
|
||||
from document import Document
|
||||
|
||||
|
||||
class DeReference(object):
|
||||
|
||||
def __call__(self, items, max_depth=1, instance=None, name=None, get=False):
|
||||
def __call__(self, items, max_depth=1, instance=None, name=None):
|
||||
"""
|
||||
Cheaply dereferences the items to a set depth.
|
||||
Also handles the convertion of complex data types.
|
||||
@@ -33,17 +31,19 @@ class DeReference(object):
|
||||
items = [i for i in items]
|
||||
|
||||
self.max_depth = max_depth
|
||||
|
||||
|
||||
doc_type = None
|
||||
if instance and instance._fields:
|
||||
doc_type = instance._fields[name].field
|
||||
|
||||
|
||||
if isinstance(doc_type, ReferenceField):
|
||||
doc_type = doc_type.document_type
|
||||
if all([i.__class__ == doc_type for i in items]):
|
||||
return items
|
||||
|
||||
self.reference_map = self._find_references(items)
|
||||
self.object_map = self._fetch_objects(doc_type=doc_type)
|
||||
return self._attach_objects(items, 0, instance, name, get)
|
||||
return self._attach_objects(items, 0, instance, name)
|
||||
|
||||
def _find_references(self, items, depth=0):
|
||||
"""
|
||||
@@ -53,7 +53,7 @@ class DeReference(object):
|
||||
:param depth: The current depth of recursion
|
||||
"""
|
||||
reference_map = {}
|
||||
if not items:
|
||||
if not items or depth >= self.max_depth:
|
||||
return reference_map
|
||||
|
||||
# Determine the iterator to use
|
||||
@@ -63,13 +63,14 @@ class DeReference(object):
|
||||
iterator = items.iteritems()
|
||||
|
||||
# Recursively find dbreferences
|
||||
depth += 1
|
||||
for k, item in iterator:
|
||||
if hasattr(item, '_fields'):
|
||||
for field_name, field in item._fields.iteritems():
|
||||
v = item._data.get(field_name, None)
|
||||
if isinstance(v, (pymongo.dbref.DBRef)):
|
||||
if isinstance(v, (DBRef)):
|
||||
reference_map.setdefault(field.document_type, []).append(v.id)
|
||||
elif isinstance(v, (dict, pymongo.son.SON)) and '_ref' in v:
|
||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||
reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
|
||||
@@ -78,15 +79,15 @@ class DeReference(object):
|
||||
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
||||
key = field_cls
|
||||
reference_map.setdefault(key, []).extend(refs)
|
||||
elif isinstance(item, (pymongo.dbref.DBRef)):
|
||||
elif isinstance(item, (DBRef)):
|
||||
reference_map.setdefault(item.collection, []).append(item.id)
|
||||
elif isinstance(item, (dict, pymongo.son.SON)) and '_ref' in item:
|
||||
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
||||
reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id)
|
||||
elif isinstance(item, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
references = self._find_references(item, depth)
|
||||
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
||||
references = self._find_references(item, depth - 1)
|
||||
for key, refs in references.iteritems():
|
||||
reference_map.setdefault(key, []).extend(refs)
|
||||
depth += 1
|
||||
|
||||
return reference_map
|
||||
|
||||
def _fetch_objects(self, doc_type=None):
|
||||
@@ -101,16 +102,22 @@ class DeReference(object):
|
||||
for key, doc in references.iteritems():
|
||||
object_map[key] = doc
|
||||
else: # Generic reference: use the refs data to convert to document
|
||||
references = _get_db()[col].find({'_id': {'$in': refs}})
|
||||
for ref in references:
|
||||
if '_cls' in ref:
|
||||
doc = get_document(ref['_cls'])._from_son(ref)
|
||||
else:
|
||||
if doc_type and not isinstance(doc_type, (ListField, DictField, MapField,) ):
|
||||
references = doc_type._get_db()[col].find({'_id': {'$in': refs}})
|
||||
for ref in references:
|
||||
doc = doc_type._from_son(ref)
|
||||
object_map[doc.id] = doc
|
||||
object_map[doc.id] = doc
|
||||
else:
|
||||
references = get_db()[col].find({'_id': {'$in': refs}})
|
||||
for ref in references:
|
||||
if '_cls' in ref:
|
||||
doc = get_document(ref["_cls"])._from_son(ref)
|
||||
else:
|
||||
doc = doc_type._from_son(ref)
|
||||
object_map[doc.id] = doc
|
||||
return object_map
|
||||
|
||||
def _attach_objects(self, items, depth=0, instance=None, name=None, get=False):
|
||||
def _attach_objects(self, items, depth=0, instance=None, name=None):
|
||||
"""
|
||||
Recursively finds all db references to be dereferenced
|
||||
|
||||
@@ -120,7 +127,6 @@ class DeReference(object):
|
||||
:class:`~mongoengine.base.ComplexBaseField`
|
||||
:param name: The name of the field, used for tracking changes by
|
||||
:class:`~mongoengine.base.ComplexBaseField`
|
||||
:param get: A boolean determining if being called by __get__
|
||||
"""
|
||||
if not items:
|
||||
if isinstance(items, (BaseDict, BaseList)):
|
||||
@@ -128,17 +134,16 @@ class DeReference(object):
|
||||
|
||||
if instance:
|
||||
if isinstance(items, dict):
|
||||
return BaseDict(items, instance=instance, name=name)
|
||||
return BaseDict(items, instance, name)
|
||||
else:
|
||||
return BaseList(items, instance=instance, name=name)
|
||||
return BaseList(items, instance, name)
|
||||
|
||||
if isinstance(items, (dict, pymongo.son.SON)):
|
||||
if isinstance(items, (dict, SON)):
|
||||
if '_ref' in items:
|
||||
return self.object_map.get(items['_ref'].id, items)
|
||||
elif '_types' in items and '_cls' in items:
|
||||
doc = get_document(items['_cls'])._from_son(items)
|
||||
if not get:
|
||||
doc._data = self._attach_objects(doc._data, depth, doc, name, get)
|
||||
doc._data = self._attach_objects(doc._data, depth, doc, name)
|
||||
return doc
|
||||
|
||||
if not hasattr(items, 'items'):
|
||||
@@ -150,6 +155,7 @@ class DeReference(object):
|
||||
iterator = items.iteritems()
|
||||
data = {}
|
||||
|
||||
depth += 1
|
||||
for k, v in iterator:
|
||||
if is_list:
|
||||
data.append(v)
|
||||
@@ -161,24 +167,22 @@ class DeReference(object):
|
||||
elif hasattr(v, '_fields'):
|
||||
for field_name, field in v._fields.iteritems():
|
||||
v = data[k]._data.get(field_name, None)
|
||||
if isinstance(v, (pymongo.dbref.DBRef)):
|
||||
if isinstance(v, (DBRef)):
|
||||
data[k]._data[field_name] = self.object_map.get(v.id, v)
|
||||
elif isinstance(v, (dict, pymongo.son.SON)) and '_ref' in v:
|
||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||
data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v)
|
||||
elif isinstance(v, dict) and depth < self.max_depth:
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name, get=get)
|
||||
elif isinstance(v, (list, tuple)):
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name, get=get)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth < self.max_depth:
|
||||
data[k] = self._attach_objects(v, depth, instance=instance, name=name, get=get)
|
||||
elif isinstance(v, dict) and depth <= self.max_depth:
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
||||
elif isinstance(v, (list, tuple)) and depth <= self.max_depth:
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name)
|
||||
elif hasattr(v, 'id'):
|
||||
data[k] = self.object_map.get(v.id, v)
|
||||
|
||||
if instance and name:
|
||||
if is_list:
|
||||
return BaseList(data, instance=instance, name=name)
|
||||
return BaseDict(data, instance=instance, name=name)
|
||||
return BaseList(data, instance, name)
|
||||
return BaseDict(data, instance, name)
|
||||
depth += 1
|
||||
return data
|
||||
|
||||
dereference = DeReference()
|
||||
|
@@ -5,16 +5,22 @@ from django.utils.encoding import force_unicode
|
||||
from mongoengine.document import Document
|
||||
from mongoengine import fields
|
||||
from mongoengine.queryset import OperationError
|
||||
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME
|
||||
from django.conf import settings
|
||||
from datetime import datetime
|
||||
|
||||
MONGOENGINE_SESSION_DB_ALIAS = getattr(
|
||||
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
|
||||
DEFAULT_CONNECTION_NAME)
|
||||
|
||||
class MongoSession(Document):
|
||||
session_key = fields.StringField(primary_key=True, max_length=40)
|
||||
session_data = fields.StringField()
|
||||
expire_date = fields.DateTimeField()
|
||||
|
||||
meta = {'collection': 'django_session', 'allow_inheritance': False}
|
||||
meta = {'collection': 'django_session',
|
||||
'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
|
||||
'allow_inheritance': False}
|
||||
|
||||
|
||||
class SessionStore(SessionBase):
|
||||
|
@@ -1,13 +1,14 @@
|
||||
import pymongo
|
||||
from bson.dbref import DBRef
|
||||
|
||||
from mongoengine import signals
|
||||
from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument,
|
||||
ValidationError, BaseDict, BaseList)
|
||||
BaseDict, BaseList)
|
||||
from queryset import OperationError
|
||||
from connection import _get_db
|
||||
from connection import get_db, DEFAULT_CONNECTION_NAME
|
||||
|
||||
import pymongo
|
||||
|
||||
__all__ = ['Document', 'EmbeddedDocument', 'ValidationError',
|
||||
'OperationError', 'InvalidCollectionError']
|
||||
__all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument',
|
||||
'DynamicEmbeddedDocument', 'OperationError', 'InvalidCollectionError']
|
||||
|
||||
|
||||
class InvalidCollectionError(Exception):
|
||||
@@ -23,6 +24,10 @@ class EmbeddedDocument(BaseDocument):
|
||||
|
||||
__metaclass__ = DocumentMetaclass
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(EmbeddedDocument, self).__init__(*args, **kwargs)
|
||||
self._changed_fields = []
|
||||
|
||||
def __delattr__(self, *args, **kwargs):
|
||||
"""Handle deletions of fields"""
|
||||
field_name = args[0]
|
||||
@@ -35,7 +40,6 @@ class EmbeddedDocument(BaseDocument):
|
||||
super(EmbeddedDocument, self).__delattr__(*args, **kwargs)
|
||||
|
||||
|
||||
|
||||
class Document(BaseDocument):
|
||||
"""The base class used for defining the structure and properties of
|
||||
collections of documents stored in MongoDB. Inherit from this class, and
|
||||
@@ -77,42 +81,57 @@ class Document(BaseDocument):
|
||||
"""
|
||||
__metaclass__ = TopLevelDocumentMetaclass
|
||||
|
||||
@classmethod
|
||||
def _get_collection(self):
|
||||
"""Returns the collection for the document."""
|
||||
db = _get_db()
|
||||
collection_name = self._get_collection_name()
|
||||
@apply
|
||||
def pk():
|
||||
"""Primary key alias
|
||||
"""
|
||||
def fget(self):
|
||||
return getattr(self, self._meta['id_field'])
|
||||
def fset(self, value):
|
||||
return setattr(self, self._meta['id_field'], value)
|
||||
return property(fget, fset)
|
||||
|
||||
if not hasattr(self, '_collection') or self._collection is None:
|
||||
@classmethod
|
||||
def _get_db(cls):
|
||||
"""Some Model using other db_alias"""
|
||||
return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME ))
|
||||
|
||||
@classmethod
|
||||
def _get_collection(cls):
|
||||
"""Returns the collection for the document."""
|
||||
if not hasattr(cls, '_collection') or cls._collection is None:
|
||||
db = cls._get_db()
|
||||
collection_name = cls._get_collection_name()
|
||||
# Create collection as a capped collection if specified
|
||||
if self._meta['max_size'] or self._meta['max_documents']:
|
||||
if cls._meta['max_size'] or cls._meta['max_documents']:
|
||||
# Get max document limit and max byte size from meta
|
||||
max_size = self._meta['max_size'] or 10000000 # 10MB default
|
||||
max_documents = self._meta['max_documents']
|
||||
max_size = cls._meta['max_size'] or 10000000 # 10MB default
|
||||
max_documents = cls._meta['max_documents']
|
||||
|
||||
if collection_name in db.collection_names():
|
||||
self._collection = db[collection_name]
|
||||
cls._collection = db[collection_name]
|
||||
# The collection already exists, check if its capped
|
||||
# options match the specified capped options
|
||||
options = self._collection.options()
|
||||
options = cls._collection.options()
|
||||
if options.get('max') != max_documents or \
|
||||
options.get('size') != max_size:
|
||||
msg = ('Cannot create collection "%s" as a capped '
|
||||
'collection as it already exists') % self._collection
|
||||
'collection as it already exists') % cls._collection
|
||||
raise InvalidCollectionError(msg)
|
||||
else:
|
||||
# Create the collection as a capped collection
|
||||
opts = {'capped': True, 'size': max_size}
|
||||
if max_documents:
|
||||
opts['max'] = max_documents
|
||||
self._collection = db.create_collection(
|
||||
cls._collection = db.create_collection(
|
||||
collection_name, **opts
|
||||
)
|
||||
else:
|
||||
self._collection = db[collection_name]
|
||||
return self._collection
|
||||
cls._collection = db[collection_name]
|
||||
return cls._collection
|
||||
|
||||
def save(self, safe=True, force_insert=False, validate=True, write_options=None, _refs=None):
|
||||
def save(self, safe=True, force_insert=False, validate=True, write_options=None,
|
||||
cascade=None, cascade_kwargs=None, _refs=None):
|
||||
"""Save the :class:`~mongoengine.Document` to the database. If the
|
||||
document already exists, it will be updated, otherwise it will be
|
||||
created.
|
||||
@@ -130,14 +149,22 @@ class Document(BaseDocument):
|
||||
which will be used as options for the resultant ``getLastError`` command.
|
||||
For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers
|
||||
have recorded the write and will force an fsync on each server being written to.
|
||||
:param cascade: Sets the flag for cascading saves. You can set a default by setting
|
||||
"cascade" in the document __meta__
|
||||
:param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves
|
||||
:param _refs: A list of processed references used in cascading saves
|
||||
|
||||
.. versionchanged:: 0.5
|
||||
In existing documents it only saves changed fields using set / unset
|
||||
Saves are cascaded and any :class:`~pymongo.dbref.DBRef` objects
|
||||
Saves are cascaded and any :class:`~bson.dbref.DBRef` objects
|
||||
that have changes are saved as well.
|
||||
"""
|
||||
from fields import ReferenceField, GenericReferenceField
|
||||
.. versionchanged:: 0.6
|
||||
Cascade saves are optional = defaults to True, if you want fine grain
|
||||
control then you can turn off using document meta['cascade'] = False
|
||||
Also you can pass different kwargs to the cascade save using cascade_kwargs
|
||||
which overwrites the existing kwargs with custom values
|
||||
|
||||
"""
|
||||
signals.pre_save.send(self.__class__, document=self)
|
||||
|
||||
if validate:
|
||||
@@ -148,11 +175,11 @@ class Document(BaseDocument):
|
||||
|
||||
doc = self.to_mongo()
|
||||
|
||||
created = '_id' in doc
|
||||
creation_mode = force_insert or not created
|
||||
created = force_insert or '_id' not in doc
|
||||
|
||||
try:
|
||||
collection = self.__class__.objects._collection
|
||||
if creation_mode:
|
||||
if created:
|
||||
if force_insert:
|
||||
object_id = collection.insert(doc, safe=safe, **write_options)
|
||||
else:
|
||||
@@ -160,21 +187,33 @@ class Document(BaseDocument):
|
||||
else:
|
||||
object_id = doc['_id']
|
||||
updates, removals = self._delta()
|
||||
if updates:
|
||||
collection.update({'_id': object_id}, {"$set": updates}, upsert=True, safe=safe, **write_options)
|
||||
if removals:
|
||||
collection.update({'_id': object_id}, {"$unset": removals}, upsert=True, safe=safe, **write_options)
|
||||
|
||||
# Save any references / generic references
|
||||
_refs = _refs or []
|
||||
for name, cls in self._fields.items():
|
||||
if isinstance(cls, (ReferenceField, GenericReferenceField)):
|
||||
ref = getattr(self, name)
|
||||
if ref and str(ref) not in _refs:
|
||||
_refs.append(str(ref))
|
||||
ref.save(safe=safe, force_insert=force_insert,
|
||||
validate=validate, write_options=write_options,
|
||||
_refs=_refs)
|
||||
# Need to add shard key to query, or you get an error
|
||||
select_dict = {'_id': object_id}
|
||||
shard_key = self.__class__._meta.get('shard_key', tuple())
|
||||
for k in shard_key:
|
||||
actual_key = self._db_field_map.get(k, k)
|
||||
select_dict[actual_key] = doc[actual_key]
|
||||
|
||||
upsert = self._created
|
||||
if updates:
|
||||
collection.update(select_dict, {"$set": updates}, upsert=upsert, safe=safe, **write_options)
|
||||
if removals:
|
||||
collection.update(select_dict, {"$unset": removals}, upsert=upsert, safe=safe, **write_options)
|
||||
|
||||
cascade = self._meta.get('cascade', True) if cascade is None else cascade
|
||||
if cascade:
|
||||
kwargs = {
|
||||
"safe": safe,
|
||||
"force_insert": force_insert,
|
||||
"validate": validate,
|
||||
"write_options": write_options,
|
||||
"cascade": cascade
|
||||
}
|
||||
if cascade_kwargs: # Allow granular control over cascades
|
||||
kwargs.update(cascade_kwargs)
|
||||
kwargs['_refs'] = _refs
|
||||
self.cascade_save(**kwargs)
|
||||
|
||||
except pymongo.errors.OperationFailure, err:
|
||||
message = 'Could not save document (%s)'
|
||||
@@ -184,21 +223,26 @@ class Document(BaseDocument):
|
||||
id_field = self._meta['id_field']
|
||||
self[id_field] = self._fields[id_field].to_python(object_id)
|
||||
|
||||
def reset_changed_fields(doc, inspected_docs=None):
|
||||
"""Loop through and reset changed fields lists"""
|
||||
self._changed_fields = []
|
||||
self._created = False
|
||||
signals.post_save.send(self.__class__, document=self, created=created)
|
||||
|
||||
inspected_docs = inspected_docs or []
|
||||
inspected_docs.append(doc)
|
||||
if hasattr(doc, '_changed_fields'):
|
||||
doc._changed_fields = []
|
||||
|
||||
for field_name in doc._fields:
|
||||
field = getattr(doc, field_name)
|
||||
if field not in inspected_docs and hasattr(field, '_changed_fields'):
|
||||
reset_changed_fields(field, inspected_docs)
|
||||
|
||||
reset_changed_fields(self)
|
||||
signals.post_save.send(self.__class__, document=self, created=creation_mode)
|
||||
def cascade_save(self, *args, **kwargs):
|
||||
"""Recursively saves any references / generic references on an object"""
|
||||
from fields import ReferenceField, GenericReferenceField
|
||||
_refs = kwargs.get('_refs', []) or []
|
||||
for name, cls in self._fields.items():
|
||||
if not isinstance(cls, (ReferenceField, GenericReferenceField)):
|
||||
continue
|
||||
ref = getattr(self, name)
|
||||
if not ref:
|
||||
continue
|
||||
ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
|
||||
if ref and ref_id not in _refs:
|
||||
_refs.append(ref_id)
|
||||
kwargs["_refs"] = _refs
|
||||
ref.save(**kwargs)
|
||||
ref._changed_fields = []
|
||||
|
||||
def update(self, **kwargs):
|
||||
"""Performs an update on the :class:`~mongoengine.Document`
|
||||
@@ -210,7 +254,12 @@ class Document(BaseDocument):
|
||||
if not self.pk:
|
||||
raise OperationError('attempt to update a document not yet saved')
|
||||
|
||||
return self.__class__.objects(pk=self.pk).update_one(**kwargs)
|
||||
# Need to add shard key to query, or you get an error
|
||||
select_dict = {'pk': self.pk}
|
||||
shard_key = self.__class__._meta.get('shard_key', tuple())
|
||||
for k in shard_key:
|
||||
select_dict[k] = getattr(self, k)
|
||||
return self.__class__.objects(**select_dict).update_one(**kwargs)
|
||||
|
||||
def delete(self, safe=False):
|
||||
"""Delete the :class:`~mongoengine.Document` from the database. This
|
||||
@@ -220,10 +269,8 @@ class Document(BaseDocument):
|
||||
"""
|
||||
signals.pre_delete.send(self.__class__, document=self)
|
||||
|
||||
id_field = self._meta['id_field']
|
||||
object_id = self._fields[id_field].to_mongo(self[id_field])
|
||||
try:
|
||||
self.__class__.objects(**{id_field: object_id}).delete(safe=safe)
|
||||
self.__class__.objects(pk=self.pk).delete(safe=safe)
|
||||
except pymongo.errors.OperationFailure, err:
|
||||
message = u'Could not delete document (%s)' % err.message
|
||||
raise OperationError(message)
|
||||
@@ -231,47 +278,54 @@ class Document(BaseDocument):
|
||||
signals.post_delete.send(self.__class__, document=self)
|
||||
|
||||
def select_related(self, max_depth=1):
|
||||
"""Handles dereferencing of :class:`~pymongo.dbref.DBRef` objects to
|
||||
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects to
|
||||
a maximum depth in order to cut down the number queries to mongodb.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
from dereference import dereference
|
||||
self._data = dereference(self._data, max_depth)
|
||||
from dereference import DeReference
|
||||
self._data = DeReference()(self._data, max_depth)
|
||||
return self
|
||||
|
||||
def reload(self):
|
||||
def reload(self, max_depth=1):
|
||||
"""Reloads all attributes from the database.
|
||||
|
||||
.. versionadded:: 0.1.2
|
||||
.. versionchanged:: 0.6 Now chainable
|
||||
"""
|
||||
id_field = self._meta['id_field']
|
||||
obj = self.__class__.objects(**{id_field: self[id_field]}).first()
|
||||
obj = self.__class__.objects(
|
||||
**{id_field: self[id_field]}
|
||||
).first().select_related(max_depth=max_depth)
|
||||
for field in self._fields:
|
||||
setattr(self, field, self._reload(field, obj[field]))
|
||||
self._changed_fields = []
|
||||
if self._dynamic:
|
||||
for name in self._dynamic_fields.keys():
|
||||
setattr(self, name, self._reload(name, obj._data[name]))
|
||||
self._changed_fields = obj._changed_fields
|
||||
return obj
|
||||
|
||||
def _reload(self, key, value):
|
||||
"""Used by :meth:`~mongoengine.Document.reload` to ensure the
|
||||
correct instance is linked to self.
|
||||
"""
|
||||
if isinstance(value, BaseDict):
|
||||
value = [(k, self._reload(k,v)) for k,v in value.items()]
|
||||
value = BaseDict(value, instance=self, name=key)
|
||||
value = [(k, self._reload(k, v)) for k, v in value.items()]
|
||||
value = BaseDict(value, self, key)
|
||||
elif isinstance(value, BaseList):
|
||||
value = [self._reload(key, v) for v in value]
|
||||
value = BaseList(value, instance=self, name=key)
|
||||
elif isinstance(value, EmbeddedDocument):
|
||||
value = BaseList(value, self, key)
|
||||
elif isinstance(value, (EmbeddedDocument, DynamicEmbeddedDocument)):
|
||||
value._changed_fields = []
|
||||
return value
|
||||
|
||||
def to_dbref(self):
|
||||
"""Returns an instance of :class:`~pymongo.dbref.DBRef` useful in
|
||||
"""Returns an instance of :class:`~bson.dbref.DBRef` useful in
|
||||
`__raw__` queries."""
|
||||
if not self.pk:
|
||||
msg = "Only saved documents can have a valid dbref"
|
||||
raise OperationError(msg)
|
||||
return pymongo.dbref.DBRef(self.__class__._get_collection_name(), self.pk)
|
||||
return DBRef(self.__class__._get_collection_name(), self.pk)
|
||||
|
||||
@classmethod
|
||||
def register_delete_rule(cls, document_cls, field_name, rule):
|
||||
@@ -285,8 +339,52 @@ class Document(BaseDocument):
|
||||
"""Drops the entire collection associated with this
|
||||
:class:`~mongoengine.Document` type from the database.
|
||||
"""
|
||||
db = _get_db()
|
||||
from mongoengine.queryset import QuerySet
|
||||
db = cls._get_db()
|
||||
db.drop_collection(cls._get_collection_name())
|
||||
QuerySet._reset_already_indexed(cls)
|
||||
|
||||
|
||||
class DynamicDocument(Document):
|
||||
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
|
||||
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
|
||||
way as an ordinary document but has expando style properties. Any data
|
||||
passed or set against the :class:`~mongoengine.DynamicDocument` that is
|
||||
not a field is automatically converted into a
|
||||
:class:`~mongoengine.BaseDynamicField` and data can be attributed to that
|
||||
field.
|
||||
|
||||
..note::
|
||||
|
||||
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
||||
"""
|
||||
__metaclass__ = TopLevelDocumentMetaclass
|
||||
_dynamic = True
|
||||
|
||||
def __delattr__(self, *args, **kwargs):
|
||||
"""Deletes the attribute by setting to None and allowing _delta to unset
|
||||
it"""
|
||||
field_name = args[0]
|
||||
if field_name in self._dynamic_fields:
|
||||
setattr(self, field_name, None)
|
||||
else:
|
||||
super(DynamicDocument, self).__delattr__(*args, **kwargs)
|
||||
|
||||
|
||||
class DynamicEmbeddedDocument(EmbeddedDocument):
|
||||
"""A Dynamic Embedded Document class allowing flexible, expandable and
|
||||
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
|
||||
information about dynamic documents.
|
||||
"""
|
||||
|
||||
__metaclass__ = DocumentMetaclass
|
||||
_dynamic = True
|
||||
|
||||
def __delattr__(self, *args, **kwargs):
|
||||
"""Deletes the attribute by setting to None and allowing _delta to unset
|
||||
it"""
|
||||
field_name = args[0]
|
||||
setattr(self, field_name, None)
|
||||
|
||||
|
||||
class MapReduceDocument(object):
|
||||
@@ -294,7 +392,7 @@ class MapReduceDocument(object):
|
||||
|
||||
:param collection: An instance of :class:`~pymongo.Collection`
|
||||
:param key: Document/result key, often an instance of
|
||||
:class:`~pymongo.objectid.ObjectId`. If supplied as
|
||||
:class:`~bson.objectid.ObjectId`. If supplied as
|
||||
an ``ObjectId`` found in the given ``collection``,
|
||||
the object can be accessed via the ``object`` property.
|
||||
:param value: The result(s) for this key.
|
||||
|
@@ -1,18 +1,30 @@
|
||||
from base import (BaseField, ComplexBaseField, ObjectIdField,
|
||||
ValidationError, get_document)
|
||||
from queryset import DO_NOTHING
|
||||
from document import Document, EmbeddedDocument
|
||||
from connection import _get_db
|
||||
from operator import itemgetter
|
||||
|
||||
import re
|
||||
import pymongo
|
||||
import pymongo.dbref
|
||||
import pymongo.son
|
||||
import pymongo.binary
|
||||
import datetime, time
|
||||
import datetime
|
||||
import time
|
||||
import decimal
|
||||
import gridfs
|
||||
import re
|
||||
import uuid
|
||||
|
||||
from bson import Binary, DBRef, SON, ObjectId
|
||||
|
||||
from base import (BaseField, ComplexBaseField, ObjectIdField,
|
||||
ValidationError, get_document)
|
||||
from queryset import DO_NOTHING, QuerySet
|
||||
from document import Document, EmbeddedDocument
|
||||
from connection import get_db, DEFAULT_CONNECTION_NAME
|
||||
from operator import itemgetter
|
||||
|
||||
|
||||
try:
|
||||
from PIL import Image, ImageOps
|
||||
except ImportError:
|
||||
Image = None
|
||||
ImageOps = None
|
||||
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
except ImportError:
|
||||
from StringIO import StringIO
|
||||
|
||||
|
||||
__all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField',
|
||||
@@ -20,8 +32,8 @@ __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField',
|
||||
'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField',
|
||||
'DecimalField', 'ComplexDateTimeField', 'URLField',
|
||||
'GenericReferenceField', 'FileField', 'BinaryField',
|
||||
'SortedListField', 'EmailField', 'GeoPointField',
|
||||
'SequenceField', 'GenericEmbeddedDocumentField']
|
||||
'SortedListField', 'EmailField', 'GeoPointField', 'ImageField',
|
||||
'SequenceField', 'UUIDField', 'GenericEmbeddedDocumentField']
|
||||
|
||||
RECURSIVE_REFERENCE_CONSTANT = 'self'
|
||||
|
||||
@@ -40,17 +52,17 @@ class StringField(BaseField):
|
||||
return unicode(value)
|
||||
|
||||
def validate(self, value):
|
||||
assert isinstance(value, (str, unicode))
|
||||
if not isinstance(value, (str, unicode)):
|
||||
self.error('StringField only accepts string values')
|
||||
|
||||
if self.max_length is not None and len(value) > self.max_length:
|
||||
raise ValidationError('String value is too long')
|
||||
self.error('String value is too long')
|
||||
|
||||
if self.min_length is not None and len(value) < self.min_length:
|
||||
raise ValidationError('String value is too short')
|
||||
self.error('String value is too short')
|
||||
|
||||
if self.regex is not None and self.regex.match(value) is None:
|
||||
message = 'String value did not match validation regex'
|
||||
raise ValidationError(message)
|
||||
self.error('String value did not match validation regex')
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
return None
|
||||
@@ -100,16 +112,15 @@ class URLField(StringField):
|
||||
|
||||
def validate(self, value):
|
||||
if not URLField.URL_REGEX.match(value):
|
||||
raise ValidationError('Invalid URL: %s' % value)
|
||||
self.error('Invalid URL: %s' % value)
|
||||
|
||||
if self.verify_exists:
|
||||
import urllib2
|
||||
try:
|
||||
request = urllib2.Request(value)
|
||||
response = urllib2.urlopen(request)
|
||||
urllib2.urlopen(request)
|
||||
except Exception, e:
|
||||
message = 'This URL appears to be a broken link: %s' % e
|
||||
raise ValidationError(message)
|
||||
self.error('This URL appears to be a broken link: %s' % e)
|
||||
|
||||
|
||||
class EmailField(StringField):
|
||||
@@ -126,7 +137,7 @@ class EmailField(StringField):
|
||||
|
||||
def validate(self, value):
|
||||
if not EmailField.EMAIL_REGEX.match(value):
|
||||
raise ValidationError('Invalid Mail-address: %s' % value)
|
||||
self.error('Invalid Mail-address: %s' % value)
|
||||
|
||||
|
||||
class IntField(BaseField):
|
||||
@@ -144,13 +155,13 @@ class IntField(BaseField):
|
||||
try:
|
||||
value = int(value)
|
||||
except:
|
||||
raise ValidationError('%s could not be converted to int' % value)
|
||||
self.error('%s could not be converted to int' % value)
|
||||
|
||||
if self.min_value is not None and value < self.min_value:
|
||||
raise ValidationError('Integer value is too small')
|
||||
self.error('Integer value is too small')
|
||||
|
||||
if self.max_value is not None and value > self.max_value:
|
||||
raise ValidationError('Integer value is too large')
|
||||
self.error('Integer value is too large')
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
return int(value)
|
||||
@@ -170,13 +181,14 @@ class FloatField(BaseField):
|
||||
def validate(self, value):
|
||||
if isinstance(value, int):
|
||||
value = float(value)
|
||||
assert isinstance(value, float)
|
||||
if not isinstance(value, float):
|
||||
self.error('FoatField only accepts float values')
|
||||
|
||||
if self.min_value is not None and value < self.min_value:
|
||||
raise ValidationError('Float value is too small')
|
||||
self.error('Float value is too small')
|
||||
|
||||
if self.max_value is not None and value > self.max_value:
|
||||
raise ValidationError('Float value is too large')
|
||||
self.error('Float value is too large')
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
return float(value)
|
||||
@@ -207,13 +219,13 @@ class DecimalField(BaseField):
|
||||
try:
|
||||
value = decimal.Decimal(value)
|
||||
except Exception, exc:
|
||||
raise ValidationError('Could not convert to decimal: %s' % exc)
|
||||
self.error('Could not convert value to decimal: %s' % exc)
|
||||
|
||||
if self.min_value is not None and value < self.min_value:
|
||||
raise ValidationError('Decimal value is too small')
|
||||
self.error('Decimal value is too small')
|
||||
|
||||
if self.max_value is not None and value > self.max_value:
|
||||
raise ValidationError('Decimal value is too large')
|
||||
self.error('Decimal value is too large')
|
||||
|
||||
|
||||
class BooleanField(BaseField):
|
||||
@@ -226,7 +238,8 @@ class BooleanField(BaseField):
|
||||
return bool(value)
|
||||
|
||||
def validate(self, value):
|
||||
assert isinstance(value, bool)
|
||||
if not isinstance(value, bool):
|
||||
self.error('BooleanField only accepts boolean values')
|
||||
|
||||
|
||||
class DateTimeField(BaseField):
|
||||
@@ -239,7 +252,8 @@ class DateTimeField(BaseField):
|
||||
"""
|
||||
|
||||
def validate(self, value):
|
||||
assert isinstance(value, (datetime.datetime, datetime.date))
|
||||
if not isinstance(value, (datetime.datetime, datetime.date)):
|
||||
self.error(u'cannot parse date "%s"' % value)
|
||||
|
||||
def to_mongo(self, value):
|
||||
return self.prepare_query_value(None, value)
|
||||
@@ -360,8 +374,8 @@ class ComplexDateTimeField(StringField):
|
||||
|
||||
def validate(self, value):
|
||||
if not isinstance(value, datetime.datetime):
|
||||
raise ValidationError('Only datetime objects may used in a \
|
||||
ComplexDateTimeField')
|
||||
self.error('Only datetime objects may used in a '
|
||||
'ComplexDateTimeField')
|
||||
|
||||
def to_python(self, value):
|
||||
return self._convert_from_string(value)
|
||||
@@ -381,8 +395,8 @@ class EmbeddedDocumentField(BaseField):
|
||||
def __init__(self, document_type, **kwargs):
|
||||
if not isinstance(document_type, basestring):
|
||||
if not issubclass(document_type, EmbeddedDocument):
|
||||
raise ValidationError('Invalid embedded document class '
|
||||
'provided to an EmbeddedDocumentField')
|
||||
self.error('Invalid embedded document class provided to an '
|
||||
'EmbeddedDocumentField')
|
||||
self.document_type_obj = document_type
|
||||
super(EmbeddedDocumentField, self).__init__(**kwargs)
|
||||
|
||||
@@ -411,8 +425,8 @@ class EmbeddedDocumentField(BaseField):
|
||||
"""
|
||||
# Using isinstance also works for subclasses of self.document
|
||||
if not isinstance(value, self.document_type):
|
||||
raise ValidationError('Invalid embedded document instance '
|
||||
'provided to an EmbeddedDocumentField')
|
||||
self.error('Invalid embedded document instance provided to an '
|
||||
'EmbeddedDocumentField')
|
||||
self.document_type.validate(value)
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
@@ -441,8 +455,8 @@ class GenericEmbeddedDocumentField(BaseField):
|
||||
|
||||
def validate(self, value):
|
||||
if not isinstance(value, EmbeddedDocument):
|
||||
raise ValidationError('Invalid embedded document instance '
|
||||
'provided to an GenericEmbeddedDocumentField')
|
||||
self.error('Invalid embedded document instance provided to an '
|
||||
'GenericEmbeddedDocumentField')
|
||||
|
||||
value.validate()
|
||||
|
||||
@@ -459,6 +473,9 @@ class GenericEmbeddedDocumentField(BaseField):
|
||||
class ListField(ComplexBaseField):
|
||||
"""A list field that wraps a standard field, allowing multiple instances
|
||||
of the field to be used as a list in the database.
|
||||
|
||||
.. note::
|
||||
Required means it cannot be empty - as the default for ListFields is []
|
||||
"""
|
||||
|
||||
# ListFields cannot be indexed with _types - MongoDB doesn't support this
|
||||
@@ -472,9 +489,9 @@ class ListField(ComplexBaseField):
|
||||
def validate(self, value):
|
||||
"""Make sure that a list of valid fields is being used.
|
||||
"""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
raise ValidationError('Only lists and tuples may be used in a '
|
||||
'list field')
|
||||
if (not isinstance(value, (list, tuple, QuerySet)) or
|
||||
isinstance(value, basestring)):
|
||||
self.error('Only lists and tuples may be used in a list field')
|
||||
super(ListField, self).validate(value)
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
@@ -491,27 +508,40 @@ class SortedListField(ListField):
|
||||
the database in order to ensure that a sorted list is always
|
||||
retrieved.
|
||||
|
||||
.. warning::
|
||||
There is a potential race condition when handling lists. If you set /
|
||||
save the whole list then other processes trying to save the whole list
|
||||
as well could overwrite changes. The safest way to append to a list is
|
||||
to perform a push operation.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
.. versionchanged:: 0.6 - added reverse keyword
|
||||
"""
|
||||
|
||||
_ordering = None
|
||||
_order_reverse = False
|
||||
|
||||
def __init__(self, field, **kwargs):
|
||||
if 'ordering' in kwargs.keys():
|
||||
self._ordering = kwargs.pop('ordering')
|
||||
if 'reverse' in kwargs.keys():
|
||||
self._order_reverse = kwargs.pop('reverse')
|
||||
super(SortedListField, self).__init__(field, **kwargs)
|
||||
|
||||
def to_mongo(self, value):
|
||||
value = super(SortedListField, self).to_mongo(value)
|
||||
if self._ordering is not None:
|
||||
return sorted(value, key=itemgetter(self._ordering))
|
||||
return sorted(value)
|
||||
return sorted(value, key=itemgetter(self._ordering), reverse=self._order_reverse)
|
||||
return sorted(value, reverse=self._order_reverse)
|
||||
|
||||
|
||||
class DictField(ComplexBaseField):
|
||||
"""A dictionary field that wraps a standard Python dictionary. This is
|
||||
similar to an embedded document, but the structure is not defined.
|
||||
|
||||
.. note::
|
||||
Required means it cannot be empty - as the default for ListFields is []
|
||||
|
||||
.. versionadded:: 0.3
|
||||
.. versionchanged:: 0.5 - Can now handle complex / varying types of data
|
||||
"""
|
||||
@@ -519,7 +549,8 @@ class DictField(ComplexBaseField):
|
||||
def __init__(self, basecls=None, field=None, *args, **kwargs):
|
||||
self.field = field
|
||||
self.basecls = basecls or BaseField
|
||||
assert issubclass(self.basecls, BaseField)
|
||||
if not issubclass(self.basecls, BaseField):
|
||||
self.error('DictField only accepts dict values')
|
||||
kwargs.setdefault('default', lambda: {})
|
||||
super(DictField, self).__init__(*args, **kwargs)
|
||||
|
||||
@@ -527,12 +558,13 @@ class DictField(ComplexBaseField):
|
||||
"""Make sure that a list of valid fields is being used.
|
||||
"""
|
||||
if not isinstance(value, dict):
|
||||
raise ValidationError('Only dictionaries may be used in a '
|
||||
'DictField')
|
||||
self.error('Only dictionaries may be used in a DictField')
|
||||
|
||||
if any(('.' in k or '$' in k) for k in value):
|
||||
raise ValidationError('Invalid dictionary key name - keys may not '
|
||||
'contain "." or "$" characters')
|
||||
if any(k for k in value.keys() if not isinstance(k, basestring)):
|
||||
self.error('Invalid dictionary key - documents must have only string keys')
|
||||
if any(('.' in k or '$' in k) for k in value.keys()):
|
||||
self.error('Invalid dictionary key name - keys may not contain "."'
|
||||
' or "$" characters')
|
||||
super(DictField, self).validate(value)
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
@@ -559,18 +591,19 @@ class MapField(DictField):
|
||||
|
||||
def __init__(self, field=None, *args, **kwargs):
|
||||
if not isinstance(field, BaseField):
|
||||
raise ValidationError('Argument to MapField constructor must be '
|
||||
'a valid field')
|
||||
self.error('Argument to MapField constructor must be a valid '
|
||||
'field')
|
||||
super(MapField, self).__init__(field=field, *args, **kwargs)
|
||||
|
||||
|
||||
|
||||
class ReferenceField(BaseField):
|
||||
"""A reference to a document that will be automatically dereferenced on
|
||||
access (lazily).
|
||||
|
||||
Use the `reverse_delete_rule` to handle what should happen if the document
|
||||
the field is referencing is deleted.
|
||||
the field is referencing is deleted. EmbeddedDocuments, DictFields and
|
||||
MapFields do not support reverse_delete_rules and an `InvalidDocumentError`
|
||||
will be raised if trying to set on one of these Document / Field types.
|
||||
|
||||
The options are:
|
||||
|
||||
@@ -590,8 +623,8 @@ class ReferenceField(BaseField):
|
||||
"""
|
||||
if not isinstance(document_type, basestring):
|
||||
if not issubclass(document_type, (Document, basestring)):
|
||||
raise ValidationError('Argument to ReferenceField constructor '
|
||||
'must be a document class or a string')
|
||||
self.error('Argument to ReferenceField constructor must be a '
|
||||
'document class or a string')
|
||||
self.document_type_obj = document_type
|
||||
self.reverse_delete_rule = reverse_delete_rule
|
||||
super(ReferenceField, self).__init__(**kwargs)
|
||||
@@ -615,8 +648,8 @@ class ReferenceField(BaseField):
|
||||
# Get value from document instance if available
|
||||
value = instance._data.get(self.name)
|
||||
# Dereference DBRefs
|
||||
if isinstance(value, (pymongo.dbref.DBRef)):
|
||||
value = _get_db().dereference(value)
|
||||
if isinstance(value, (DBRef)):
|
||||
value = self.document_type._get_db().dereference(value)
|
||||
if value is not None:
|
||||
instance._data[self.name] = self.document_type._from_son(value)
|
||||
|
||||
@@ -630,25 +663,28 @@ class ReferenceField(BaseField):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
id_ = document.id
|
||||
if id_ is None:
|
||||
raise ValidationError('You can only reference documents once '
|
||||
'they have been saved to the database')
|
||||
self.error('You can only reference documents once they have'
|
||||
' been saved to the database')
|
||||
else:
|
||||
id_ = document
|
||||
|
||||
id_ = id_field.to_mongo(id_)
|
||||
collection = self.document_type._get_collection_name()
|
||||
return pymongo.dbref.DBRef(collection, id_)
|
||||
return DBRef(collection, id_)
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
return self.to_mongo(value)
|
||||
|
||||
def validate(self, value):
|
||||
assert isinstance(value, (self.document_type, pymongo.dbref.DBRef))
|
||||
if not isinstance(value, (self.document_type, DBRef)):
|
||||
self.error('A ReferenceField only accepts DBRef')
|
||||
|
||||
if isinstance(value, Document) and value.id is None:
|
||||
raise ValidationError('You can only reference documents once '
|
||||
'they have been saved to the database')
|
||||
|
||||
self.error('You can only reference documents once they have been '
|
||||
'saved to the database')
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
return self.document_type._fields.get(member_name)
|
||||
@@ -669,24 +705,24 @@ class GenericReferenceField(BaseField):
|
||||
return self
|
||||
|
||||
value = instance._data.get(self.name)
|
||||
if isinstance(value, (dict, pymongo.son.SON)):
|
||||
if isinstance(value, (dict, SON)):
|
||||
instance._data[self.name] = self.dereference(value)
|
||||
|
||||
return super(GenericReferenceField, self).__get__(instance, owner)
|
||||
|
||||
def validate(self, value):
|
||||
if not isinstance(value, (Document, pymongo.dbref.DBRef)):
|
||||
raise ValidationError('GenericReferences can only contain documents')
|
||||
if not isinstance(value, (Document, DBRef)):
|
||||
self.error('GenericReferences can only contain documents')
|
||||
|
||||
# We need the id from the saved object to create the DBRef
|
||||
if isinstance(value, Document) and value.id is None:
|
||||
raise ValidationError('You can only reference documents once '
|
||||
'they have been saved to the database')
|
||||
self.error('You can only reference documents once they have been'
|
||||
' saved to the database')
|
||||
|
||||
def dereference(self, value):
|
||||
doc_cls = get_document(value['_cls'])
|
||||
reference = value['_ref']
|
||||
doc = _get_db().dereference(reference)
|
||||
doc = doc_cls._get_db().dereference(reference)
|
||||
if doc is not None:
|
||||
doc = doc_cls._from_son(doc)
|
||||
return doc
|
||||
@@ -702,17 +738,20 @@ class GenericReferenceField(BaseField):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
id_ = document.id
|
||||
if id_ is None:
|
||||
raise ValidationError('You can only reference documents once '
|
||||
'they have been saved to the database')
|
||||
self.error('You can only reference documents once they have'
|
||||
' been saved to the database')
|
||||
else:
|
||||
id_ = document
|
||||
|
||||
id_ = id_field.to_mongo(id_)
|
||||
collection = document._get_collection_name()
|
||||
ref = pymongo.dbref.DBRef(collection, id_)
|
||||
ref = DBRef(collection, id_)
|
||||
return {'_cls': document._class_name, '_ref': ref}
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
return self.to_mongo(value)
|
||||
|
||||
|
||||
@@ -725,17 +764,18 @@ class BinaryField(BaseField):
|
||||
super(BinaryField, self).__init__(**kwargs)
|
||||
|
||||
def to_mongo(self, value):
|
||||
return pymongo.binary.Binary(value)
|
||||
return Binary(value)
|
||||
|
||||
def to_python(self, value):
|
||||
# Returns str not unicode as this is binary data
|
||||
return str(value)
|
||||
|
||||
def validate(self, value):
|
||||
assert isinstance(value, str)
|
||||
if not isinstance(value, str):
|
||||
self.error('BinaryField only accepts string values')
|
||||
|
||||
if self.max_bytes is not None and len(value) > self.max_bytes:
|
||||
raise ValidationError('Binary value is too long')
|
||||
self.error('Binary value is too long')
|
||||
|
||||
|
||||
class GridFSError(Exception):
|
||||
@@ -747,17 +787,28 @@ class GridFSProxy(object):
|
||||
|
||||
.. versionadded:: 0.4
|
||||
.. versionchanged:: 0.5 - added optional size param to read
|
||||
.. versionchanged:: 0.6 - added collection name param
|
||||
"""
|
||||
|
||||
def __init__(self, grid_id=None, key=None, instance=None):
|
||||
self.fs = gridfs.GridFS(_get_db()) # Filesystem instance
|
||||
self.newfile = None # Used for partial writes
|
||||
self.grid_id = grid_id # Store GridFS id for file
|
||||
self.gridout = None
|
||||
_fs = None
|
||||
|
||||
def __init__(self, grid_id=None, key=None,
|
||||
instance=None,
|
||||
db_alias=DEFAULT_CONNECTION_NAME,
|
||||
collection_name='fs'):
|
||||
self.grid_id = grid_id # Store GridFS id for file
|
||||
self.key = key
|
||||
self.instance = instance
|
||||
self.db_alias = db_alias
|
||||
self.collection_name = collection_name
|
||||
self.newfile = None # Used for partial writes
|
||||
self.gridout = None
|
||||
|
||||
def __getattr__(self, name):
|
||||
attrs = ('_fs', 'grid_id', 'key', 'instance', 'db_alias',
|
||||
'collection_name', 'newfile', 'gridout')
|
||||
if name in attrs:
|
||||
return self.__getattribute__(name)
|
||||
obj = self.get()
|
||||
if name in dir(obj):
|
||||
return getattr(obj, name)
|
||||
@@ -769,6 +820,17 @@ class GridFSProxy(object):
|
||||
def __nonzero__(self):
|
||||
return bool(self.grid_id)
|
||||
|
||||
def __getstate__(self):
|
||||
self_dict = self.__dict__
|
||||
self_dict['_fs'] = None
|
||||
return self_dict
|
||||
|
||||
@property
|
||||
def fs(self):
|
||||
if not self._fs:
|
||||
self._fs = gridfs.GridFS(get_db(self.db_alias), self.collection_name)
|
||||
return self._fs
|
||||
|
||||
def get(self, id=None):
|
||||
if id:
|
||||
self.grid_id = id
|
||||
@@ -840,10 +902,16 @@ class FileField(BaseField):
|
||||
|
||||
.. versionadded:: 0.4
|
||||
.. versionchanged:: 0.5 added optional size param for read
|
||||
.. versionchanged:: 0.6 added db_alias for multidb support
|
||||
"""
|
||||
proxy_class = GridFSProxy
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
def __init__(self,
|
||||
db_alias=DEFAULT_CONNECTION_NAME,
|
||||
collection_name="fs", **kwargs):
|
||||
super(FileField, self).__init__(**kwargs)
|
||||
self.collection_name = collection_name
|
||||
self.db_alias = db_alias
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
if instance is None:
|
||||
@@ -852,12 +920,14 @@ class FileField(BaseField):
|
||||
# Check if a file already exists for this model
|
||||
grid_file = instance._data.get(self.name)
|
||||
self.grid_file = grid_file
|
||||
if isinstance(self.grid_file, GridFSProxy):
|
||||
if isinstance(self.grid_file, self.proxy_class):
|
||||
if not self.grid_file.key:
|
||||
self.grid_file.key = self.name
|
||||
self.grid_file.instance = instance
|
||||
return self.grid_file
|
||||
return GridFSProxy(key=self.name, instance=instance)
|
||||
return self.proxy_class(key=self.name, instance=instance,
|
||||
db_alias=self.db_alias,
|
||||
collection_name=self.collection_name)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
key = self.name
|
||||
@@ -874,7 +944,8 @@ class FileField(BaseField):
|
||||
grid_file.put(value)
|
||||
else:
|
||||
# Create a new proxy object as we don't already have one
|
||||
instance._data[key] = GridFSProxy(key=key, instance=instance)
|
||||
instance._data[key] = self.proxy_class(key=key, instance=instance,
|
||||
collection_name=self.collection_name)
|
||||
instance._data[key].put(value)
|
||||
else:
|
||||
instance._data[key] = value
|
||||
@@ -883,18 +954,181 @@ class FileField(BaseField):
|
||||
|
||||
def to_mongo(self, value):
|
||||
# Store the GridFS file id in MongoDB
|
||||
if isinstance(value, GridFSProxy) and value.grid_id is not None:
|
||||
if isinstance(value, self.proxy_class) and value.grid_id is not None:
|
||||
return value.grid_id
|
||||
return None
|
||||
|
||||
def to_python(self, value):
|
||||
if value is not None:
|
||||
return GridFSProxy(value)
|
||||
return self.proxy_class(value,
|
||||
collection_name=self.collection_name,
|
||||
db_alias=self.db_alias)
|
||||
|
||||
def validate(self, value):
|
||||
if value.grid_id is not None:
|
||||
assert isinstance(value, GridFSProxy)
|
||||
assert isinstance(value.grid_id, pymongo.objectid.ObjectId)
|
||||
if not isinstance(value, self.proxy_class):
|
||||
self.error('FileField only accepts GridFSProxy values')
|
||||
if not isinstance(value.grid_id, ObjectId):
|
||||
self.error('Invalid GridFSProxy value')
|
||||
|
||||
|
||||
class ImageGridFsProxy(GridFSProxy):
|
||||
"""
|
||||
Proxy for ImageField
|
||||
|
||||
versionadded: 0.6
|
||||
"""
|
||||
def put(self, file_obj, **kwargs):
|
||||
"""
|
||||
Insert a image in database
|
||||
applying field properties (size, thumbnail_size)
|
||||
"""
|
||||
field = self.instance._fields[self.key]
|
||||
|
||||
try:
|
||||
img = Image.open(file_obj)
|
||||
except:
|
||||
raise ValidationError('Invalid image')
|
||||
|
||||
if (field.size and (img.size[0] > field.size['width'] or
|
||||
img.size[1] > field.size['height'])):
|
||||
size = field.size
|
||||
|
||||
if size['force']:
|
||||
img = ImageOps.fit(img,
|
||||
(size['width'],
|
||||
size['height']),
|
||||
Image.ANTIALIAS)
|
||||
else:
|
||||
img.thumbnail((size['width'],
|
||||
size['height']),
|
||||
Image.ANTIALIAS)
|
||||
|
||||
thumbnail = None
|
||||
if field.thumbnail_size:
|
||||
size = field.thumbnail_size
|
||||
|
||||
if size['force']:
|
||||
thumbnail = ImageOps.fit(img,
|
||||
(size['width'],
|
||||
size['height']),
|
||||
Image.ANTIALIAS)
|
||||
else:
|
||||
thumbnail = img.copy()
|
||||
thumbnail.thumbnail((size['width'],
|
||||
size['height']),
|
||||
Image.ANTIALIAS)
|
||||
|
||||
if thumbnail:
|
||||
thumb_id = self._put_thumbnail(thumbnail,
|
||||
img.format)
|
||||
else:
|
||||
thumb_id = None
|
||||
|
||||
w, h = img.size
|
||||
|
||||
io = StringIO()
|
||||
img.save(io, img.format)
|
||||
io.seek(0)
|
||||
|
||||
return super(ImageGridFsProxy, self).put(io,
|
||||
width=w,
|
||||
height=h,
|
||||
format=img.format,
|
||||
thumbnail_id=thumb_id,
|
||||
**kwargs)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
#deletes thumbnail
|
||||
out = self.get()
|
||||
if out and out.thumbnail_id:
|
||||
self.fs.delete(out.thumbnail_id)
|
||||
|
||||
return super(ImageGridFsProxy, self).delete(*args, **kwargs)
|
||||
|
||||
def _put_thumbnail(self, thumbnail, format, **kwargs):
|
||||
w, h = thumbnail.size
|
||||
|
||||
io = StringIO()
|
||||
thumbnail.save(io, format)
|
||||
io.seek(0)
|
||||
|
||||
return self.fs.put(io, width=w,
|
||||
height=h,
|
||||
format=format,
|
||||
**kwargs)
|
||||
@property
|
||||
def size(self):
|
||||
"""
|
||||
return a width, height of image
|
||||
"""
|
||||
out = self.get()
|
||||
if out:
|
||||
return out.width, out.height
|
||||
|
||||
@property
|
||||
def format(self):
|
||||
"""
|
||||
return format of image
|
||||
ex: PNG, JPEG, GIF, etc
|
||||
"""
|
||||
out = self.get()
|
||||
if out:
|
||||
return out.format
|
||||
|
||||
@property
|
||||
def thumbnail(self):
|
||||
"""
|
||||
return a gridfs.grid_file.GridOut
|
||||
representing a thumbnail of Image
|
||||
"""
|
||||
out = self.get()
|
||||
if out and out.thumbnail_id:
|
||||
return self.fs.get(out.thumbnail_id)
|
||||
|
||||
def write(self, *args, **kwargs):
|
||||
raise RuntimeError("Please use \"put\" method instead")
|
||||
|
||||
def writelines(self, *args, **kwargs):
|
||||
raise RuntimeError("Please use \"put\" method instead")
|
||||
|
||||
|
||||
class ImproperlyConfigured(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ImageField(FileField):
|
||||
"""
|
||||
A Image File storage field.
|
||||
|
||||
@size (width, height, force):
|
||||
max size to store images, if larger will be automatically resized
|
||||
ex: size=(800, 600, True)
|
||||
|
||||
@thumbnail (width, height, force):
|
||||
size to generate a thumbnail
|
||||
|
||||
.. versionadded:: 0.6
|
||||
"""
|
||||
proxy_class = ImageGridFsProxy
|
||||
|
||||
def __init__(self, size=None, thumbnail_size=None,
|
||||
collection_name='images', **kwargs):
|
||||
if not Image:
|
||||
raise ImproperlyConfigured("PIL library was not found")
|
||||
|
||||
params_size = ('width', 'height', 'force')
|
||||
extra_args = dict(size=size, thumbnail_size=thumbnail_size)
|
||||
for att_name, att in extra_args.items():
|
||||
if att and (isinstance(att, tuple) or isinstance(att, list)):
|
||||
setattr(self, att_name, dict(
|
||||
map(None, params_size, att)))
|
||||
else:
|
||||
setattr(self, att_name, None)
|
||||
|
||||
super(ImageField, self).__init__(
|
||||
collection_name=collection_name,
|
||||
**kwargs)
|
||||
|
||||
|
||||
class GeoPointField(BaseField):
|
||||
@@ -909,20 +1143,22 @@ class GeoPointField(BaseField):
|
||||
"""Make sure that a geo-value is of type (x, y)
|
||||
"""
|
||||
if not isinstance(value, (list, tuple)):
|
||||
raise ValidationError('GeoPointField can only accept tuples or '
|
||||
'lists of (x, y)')
|
||||
self.error('GeoPointField can only accept tuples or lists '
|
||||
'of (x, y)')
|
||||
|
||||
if not len(value) == 2:
|
||||
raise ValidationError('Value must be a two-dimensional point.')
|
||||
self.error('Value must be a two-dimensional point')
|
||||
if (not isinstance(value[0], (float, int)) and
|
||||
not isinstance(value[1], (float, int))):
|
||||
raise ValidationError('Both values in point must be float or int.')
|
||||
self.error('Both values in point must be float or int')
|
||||
|
||||
|
||||
class SequenceField(IntField):
|
||||
"""Provides a sequental counter.
|
||||
"""Provides a sequental counter (see http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers)
|
||||
|
||||
..note:: Although traditional databases often use increasing sequence
|
||||
.. note::
|
||||
|
||||
Although traditional databases often use increasing sequence
|
||||
numbers for primary keys. In MongoDB, the preferred approach is to
|
||||
use Object IDs instead. The concept is that in a very large
|
||||
cluster of machines, it is easier to create an object ID than have
|
||||
@@ -930,8 +1166,9 @@ class SequenceField(IntField):
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
def __init__(self, collection_name=None, *args, **kwargs):
|
||||
def __init__(self, collection_name=None, db_alias = None, *args, **kwargs):
|
||||
self.collection_name = collection_name or 'mongoengine.counters'
|
||||
self.db_alias = db_alias or DEFAULT_CONNECTION_NAME
|
||||
return super(SequenceField, self).__init__(*args, **kwargs)
|
||||
|
||||
def generate_new_value(self):
|
||||
@@ -940,7 +1177,7 @@ class SequenceField(IntField):
|
||||
"""
|
||||
sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(),
|
||||
self.name)
|
||||
collection = _get_db()[self.collection_name]
|
||||
collection = get_db(alias = self.db_alias )[self.collection_name]
|
||||
counter = collection.find_and_modify(query={"_id": sequence_id},
|
||||
update={"$inc": {"next": 1}},
|
||||
new=True,
|
||||
@@ -975,3 +1212,30 @@ class SequenceField(IntField):
|
||||
if value is None:
|
||||
value = self.generate_new_value()
|
||||
return value
|
||||
|
||||
|
||||
class UUIDField(BaseField):
|
||||
"""A UUID field.
|
||||
|
||||
.. versionadded:: 0.6
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(UUIDField, self).__init__(**kwargs)
|
||||
|
||||
def to_python(self, value):
|
||||
if not isinstance(value, basestring):
|
||||
value = unicode(value)
|
||||
return uuid.UUID(value)
|
||||
|
||||
def to_mongo(self, value):
|
||||
return unicode(value)
|
||||
|
||||
def validate(self, value):
|
||||
if not isinstance(value, uuid.UUID):
|
||||
if not isinstance(value, basestring):
|
||||
value = str(value)
|
||||
try:
|
||||
value = uuid.UUID(value)
|
||||
except Exception, exc:
|
||||
self.error('Could not convert to UUID: %s' % exc)
|
||||
|
@@ -1,15 +1,14 @@
|
||||
from connection import _get_db
|
||||
|
||||
import pprint
|
||||
import pymongo
|
||||
import pymongo.code
|
||||
import pymongo.dbref
|
||||
import pymongo.objectid
|
||||
import re
|
||||
import copy
|
||||
import itertools
|
||||
import operator
|
||||
|
||||
import pymongo
|
||||
from bson.code import Code
|
||||
|
||||
from mongoengine import signals
|
||||
|
||||
__all__ = ['queryset_manager', 'Q', 'InvalidQueryError',
|
||||
'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY']
|
||||
|
||||
@@ -274,16 +273,20 @@ class Q(QNode):
|
||||
|
||||
class QueryFieldList(object):
|
||||
"""Object that handles combinations of .only() and .exclude() calls"""
|
||||
ONLY = True
|
||||
EXCLUDE = False
|
||||
ONLY = 1
|
||||
EXCLUDE = 0
|
||||
|
||||
def __init__(self, fields=[], value=ONLY, always_include=[]):
|
||||
self.value = value
|
||||
self.fields = set(fields)
|
||||
self.always_include = set(always_include)
|
||||
self._id = None
|
||||
|
||||
def as_dict(self):
|
||||
return dict((field, self.value) for field in self.fields)
|
||||
field_list = dict((field, self.value) for field in self.fields)
|
||||
if self._id is not None:
|
||||
field_list['_id'] = self._id
|
||||
return field_list
|
||||
|
||||
def __add__(self, f):
|
||||
if not self.fields:
|
||||
@@ -299,6 +302,9 @@ class QueryFieldList(object):
|
||||
self.value = self.ONLY
|
||||
self.fields = f.fields - self.fields
|
||||
|
||||
if '_id' in f.fields:
|
||||
self._id = f.value
|
||||
|
||||
if self.always_include:
|
||||
if self.value is self.ONLY and self.fields:
|
||||
self.fields = self.fields.union(self.always_include)
|
||||
@@ -334,6 +340,7 @@ class QuerySet(object):
|
||||
self._timeout = True
|
||||
self._class_check = True
|
||||
self._slave_okay = False
|
||||
self._scalar = []
|
||||
|
||||
# If inheritance is allowed, only return instances and instances of
|
||||
# subclasses of the class being used
|
||||
@@ -399,12 +406,14 @@ class QuerySet(object):
|
||||
index_list = []
|
||||
use_types = doc_cls._meta.get('allow_inheritance', True)
|
||||
for key in spec['fields']:
|
||||
# Get direction from + or -
|
||||
# Get ASCENDING direction from +, DESCENDING from -, and GEO2D from *
|
||||
direction = pymongo.ASCENDING
|
||||
if key.startswith("-"):
|
||||
direction = pymongo.DESCENDING
|
||||
if key.startswith(("+", "-")):
|
||||
key = key[1:]
|
||||
elif key.startswith("*"):
|
||||
direction = pymongo.GEO2D
|
||||
if key.startswith(("+", "-", "*")):
|
||||
key = key[1:]
|
||||
|
||||
# Use real field name, do it manually because we need field
|
||||
# objects for the next part (list field checking)
|
||||
@@ -421,7 +430,7 @@ class QuerySet(object):
|
||||
# If _types is being used, prepend it to every specified index
|
||||
index_types = doc_cls._meta.get('index_types', True)
|
||||
allow_inheritance = doc_cls._meta.get('allow_inheritance')
|
||||
if spec.get('types', index_types) and allow_inheritance and use_types:
|
||||
if spec.get('types', index_types) and allow_inheritance and use_types and direction is not pymongo.GEO2D:
|
||||
index_list.insert(0, ('_types', 1))
|
||||
|
||||
spec['fields'] = index_list
|
||||
@@ -434,9 +443,11 @@ class QuerySet(object):
|
||||
return spec
|
||||
|
||||
@classmethod
|
||||
def _reset_already_indexed(cls):
|
||||
def _reset_already_indexed(cls, document=None):
|
||||
"""Helper to reset already indexed, can be useful for testing purposes"""
|
||||
cls.__already_indexed = set()
|
||||
if document:
|
||||
cls.__already_indexed.discard(document)
|
||||
cls.__already_indexed.clear()
|
||||
|
||||
def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query):
|
||||
"""Filter the selected documents by calling the
|
||||
@@ -476,6 +487,13 @@ class QuerySet(object):
|
||||
perform operations only if the collection is accessed.
|
||||
"""
|
||||
if self._document not in QuerySet.__already_indexed:
|
||||
|
||||
# Ensure collection exists
|
||||
db = self._document._get_db()
|
||||
if self._collection_obj.name not in db.collection_names():
|
||||
self._document._collection = None
|
||||
self._collection_obj = self._document._get_collection()
|
||||
|
||||
QuerySet.__already_indexed.add(self._document)
|
||||
|
||||
background = self._document._meta.get('index_background', False)
|
||||
@@ -555,7 +573,7 @@ class QuerySet(object):
|
||||
self.order_by(*self._document._meta['ordering'])
|
||||
|
||||
if self._limit is not None:
|
||||
self._cursor_obj.limit(self._limit)
|
||||
self._cursor_obj.limit(self._limit - (self._skip or 0))
|
||||
|
||||
if self._skip is not None:
|
||||
self._cursor_obj.skip(self._skip)
|
||||
@@ -590,8 +608,18 @@ class QuerySet(object):
|
||||
if field_name == 'pk':
|
||||
# Deal with "primary key" alias
|
||||
field_name = document._meta['id_field']
|
||||
field = document._fields[field_name]
|
||||
if field_name in document._fields:
|
||||
field = document._fields[field_name]
|
||||
elif document._dynamic:
|
||||
from base import BaseDynamicField
|
||||
field = BaseDynamicField(db_field=field_name)
|
||||
else:
|
||||
raise InvalidQueryError('Cannot resolve field "%s"'
|
||||
% field_name)
|
||||
else:
|
||||
from mongoengine.fields import ReferenceField, GenericReferenceField
|
||||
if isinstance(field, (ReferenceField, GenericReferenceField)):
|
||||
raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts))
|
||||
# Look up subfield on the previous field
|
||||
new_field = field.lookup_member(field_name)
|
||||
from base import ComplexBaseField
|
||||
@@ -603,7 +631,6 @@ class QuerySet(object):
|
||||
% field_name)
|
||||
field = new_field # update field to the new field type
|
||||
fields.append(field)
|
||||
|
||||
return fields
|
||||
|
||||
@classmethod
|
||||
@@ -624,6 +651,7 @@ class QuerySet(object):
|
||||
match_operators = ['contains', 'icontains', 'startswith',
|
||||
'istartswith', 'endswith', 'iendswith',
|
||||
'exact', 'iexact']
|
||||
custom_operators = ['match']
|
||||
|
||||
mongo_query = {}
|
||||
for key, value in query.items():
|
||||
@@ -636,7 +664,7 @@ class QuerySet(object):
|
||||
parts = [part for part in parts if not part.isdigit()]
|
||||
# Check for an operator and transform to mongo-style if there is
|
||||
op = None
|
||||
if parts[-1] in operators + match_operators + geo_operators:
|
||||
if parts[-1] in operators + match_operators + geo_operators + custom_operators:
|
||||
op = parts.pop()
|
||||
|
||||
negate = False
|
||||
@@ -650,8 +678,8 @@ class QuerySet(object):
|
||||
parts = []
|
||||
|
||||
cleaned_fields = []
|
||||
append_field = True
|
||||
for field in fields:
|
||||
append_field = True
|
||||
if isinstance(field, str):
|
||||
parts.append(field)
|
||||
append_field = False
|
||||
@@ -669,7 +697,7 @@ class QuerySet(object):
|
||||
if isinstance(field, basestring):
|
||||
if op in match_operators and isinstance(value, basestring):
|
||||
from mongoengine import StringField
|
||||
value = StringField().prepare_query_value(op, value)
|
||||
value = StringField.prepare_query_value(op, value)
|
||||
else:
|
||||
value = field
|
||||
else:
|
||||
@@ -696,6 +724,12 @@ class QuerySet(object):
|
||||
else:
|
||||
raise NotImplementedError("Geo method '%s' has not "
|
||||
"been implemented" % op)
|
||||
elif op in custom_operators:
|
||||
if op == 'match':
|
||||
value = {"$elemMatch": value}
|
||||
else:
|
||||
NotImplementedError("Custom method '%s' has not "
|
||||
"been implemented" % op)
|
||||
elif op not in match_operators:
|
||||
value = {'$' + op: value}
|
||||
|
||||
@@ -721,18 +755,23 @@ class QuerySet(object):
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
self.limit(2)
|
||||
self.__call__(*q_objs, **query)
|
||||
count = self.count()
|
||||
if count == 1:
|
||||
return self[0]
|
||||
elif count > 1:
|
||||
message = u'%d items returned, instead of 1' % count
|
||||
raise self._document.MultipleObjectsReturned(message)
|
||||
else:
|
||||
try:
|
||||
result1 = self.next()
|
||||
except StopIteration:
|
||||
raise self._document.DoesNotExist("%s matching query does not exist."
|
||||
% self._document._class_name)
|
||||
try:
|
||||
result2 = self.next()
|
||||
except StopIteration:
|
||||
return result1
|
||||
|
||||
def get_or_create(self, write_options=None, *q_objs, **query):
|
||||
self.rewind()
|
||||
message = u'%d items returned, instead of 1' % self.count()
|
||||
raise self._document.MultipleObjectsReturned(message)
|
||||
|
||||
def get_or_create(self, write_options=None, auto_save=True, *q_objs, **query):
|
||||
"""Retrieve unique object or create, if it doesn't exist. Returns a tuple of
|
||||
``(object, created)``, where ``object`` is the retrieved or created object
|
||||
and ``created`` is a boolean specifying whether a new object was created. Raises
|
||||
@@ -747,23 +786,25 @@ class QuerySet(object):
|
||||
Passes any write_options onto :meth:`~mongoengine.Document.save`
|
||||
|
||||
.. versionadded:: 0.3
|
||||
|
||||
:param auto_save: if the object is to be saved automatically if not found.
|
||||
|
||||
.. versionadded:: 0.6
|
||||
"""
|
||||
defaults = query.get('defaults', {})
|
||||
if 'defaults' in query:
|
||||
del query['defaults']
|
||||
|
||||
self.__call__(*q_objs, **query)
|
||||
count = self.count()
|
||||
if count == 0:
|
||||
try:
|
||||
doc = self.get(*q_objs, **query)
|
||||
return doc, False
|
||||
except self._document.DoesNotExist:
|
||||
query.update(defaults)
|
||||
doc = self._document(**query)
|
||||
doc.save(write_options=write_options)
|
||||
|
||||
if auto_save:
|
||||
doc.save(write_options=write_options)
|
||||
return doc, True
|
||||
elif count == 1:
|
||||
return self.first(), False
|
||||
else:
|
||||
message = u'%d items returned, instead of 1' % count
|
||||
raise self._document.MultipleObjectsReturned(message)
|
||||
|
||||
def create(self, **kwargs):
|
||||
"""Create new object. Returns the saved object instance.
|
||||
@@ -812,23 +853,33 @@ class QuerySet(object):
|
||||
raise OperationError(msg)
|
||||
raw.append(doc.to_mongo())
|
||||
|
||||
signals.pre_bulk_insert.send(self._document, documents=docs)
|
||||
ids = self._collection.insert(raw)
|
||||
|
||||
if not load_bulk:
|
||||
signals.post_bulk_insert.send(
|
||||
self._document, documents=docs, loaded=False)
|
||||
return return_one and ids[0] or ids
|
||||
|
||||
documents = self.in_bulk(ids)
|
||||
results = []
|
||||
for obj_id in ids:
|
||||
results.append(documents.get(obj_id))
|
||||
signals.post_bulk_insert.send(
|
||||
self._document, documents=results, loaded=True)
|
||||
return return_one and results[0] or results
|
||||
|
||||
def with_id(self, object_id):
|
||||
"""Retrieve the object matching the id provided.
|
||||
"""Retrieve the object matching the id provided. Uses `object_id` only
|
||||
and raises InvalidQueryError if a filter has been applied.
|
||||
|
||||
:param object_id: the value for the id of the document to look up
|
||||
|
||||
.. versionchanged:: 0.6 Raises InvalidQueryError if filter has been set
|
||||
"""
|
||||
return self._document.objects(pk=object_id).first()
|
||||
if not self._query_obj.empty:
|
||||
raise InvalidQueryError("Cannot use a filter whilst using `with_id`")
|
||||
return self.filter(pk=object_id).first()
|
||||
|
||||
def in_bulk(self, object_ids):
|
||||
"""Retrieve a set of documents by their ids.
|
||||
@@ -843,8 +894,13 @@ class QuerySet(object):
|
||||
|
||||
docs = self._collection.find({'_id': {'$in': object_ids}},
|
||||
**self._cursor_args)
|
||||
for doc in docs:
|
||||
doc_map[doc['_id']] = self._document._from_son(doc)
|
||||
if self._scalar:
|
||||
for doc in docs:
|
||||
doc_map[doc['_id']] = self._get_scalar(
|
||||
self._document._from_son(doc))
|
||||
else:
|
||||
for doc in docs:
|
||||
doc_map[doc['_id']] = self._document._from_son(doc)
|
||||
|
||||
return doc_map
|
||||
|
||||
@@ -854,6 +910,9 @@ class QuerySet(object):
|
||||
try:
|
||||
if self._limit == 0:
|
||||
raise StopIteration
|
||||
if self._scalar:
|
||||
return self._get_scalar(self._document._from_son(
|
||||
self._cursor.next()))
|
||||
return self._document._from_son(self._cursor.next())
|
||||
except StopIteration, e:
|
||||
self.rewind()
|
||||
@@ -887,9 +946,9 @@ class QuerySet(object):
|
||||
and :meth:`~mongoengine.tests.QuerySetTest.test_map_advanced`
|
||||
tests in ``tests.queryset.QuerySetTest`` for usage examples.
|
||||
|
||||
:param map_f: map function, as :class:`~pymongo.code.Code` or string
|
||||
:param map_f: map function, as :class:`~bson.code.Code` or string
|
||||
:param reduce_f: reduce function, as
|
||||
:class:`~pymongo.code.Code` or string
|
||||
:class:`~bson.code.Code` or string
|
||||
:param output: output collection name, if set to 'inline' will try to
|
||||
use :class:`~pymongo.collection.Collection.inline_map_reduce`
|
||||
:param finalize_f: finalize function, an optional function that
|
||||
@@ -919,27 +978,27 @@ class QuerySet(object):
|
||||
raise NotImplementedError("Requires MongoDB >= 1.7.1")
|
||||
|
||||
map_f_scope = {}
|
||||
if isinstance(map_f, pymongo.code.Code):
|
||||
if isinstance(map_f, Code):
|
||||
map_f_scope = map_f.scope
|
||||
map_f = unicode(map_f)
|
||||
map_f = pymongo.code.Code(self._sub_js_fields(map_f), map_f_scope)
|
||||
map_f = Code(self._sub_js_fields(map_f), map_f_scope)
|
||||
|
||||
reduce_f_scope = {}
|
||||
if isinstance(reduce_f, pymongo.code.Code):
|
||||
if isinstance(reduce_f, Code):
|
||||
reduce_f_scope = reduce_f.scope
|
||||
reduce_f = unicode(reduce_f)
|
||||
reduce_f_code = self._sub_js_fields(reduce_f)
|
||||
reduce_f = pymongo.code.Code(reduce_f_code, reduce_f_scope)
|
||||
reduce_f = Code(reduce_f_code, reduce_f_scope)
|
||||
|
||||
mr_args = {'query': self._query}
|
||||
|
||||
if finalize_f:
|
||||
finalize_f_scope = {}
|
||||
if isinstance(finalize_f, pymongo.code.Code):
|
||||
if isinstance(finalize_f, Code):
|
||||
finalize_f_scope = finalize_f.scope
|
||||
finalize_f = unicode(finalize_f)
|
||||
finalize_f_code = self._sub_js_fields(finalize_f)
|
||||
finalize_f = pymongo.code.Code(finalize_f_code, finalize_f_scope)
|
||||
finalize_f = Code(finalize_f_code, finalize_f_scope)
|
||||
mr_args['finalize'] = finalize_f
|
||||
|
||||
if scope:
|
||||
@@ -1030,6 +1089,9 @@ class QuerySet(object):
|
||||
return self
|
||||
# Integer index provided
|
||||
elif isinstance(key, int):
|
||||
if self._scalar:
|
||||
return self._get_scalar(self._document._from_son(
|
||||
self._cursor[key]))
|
||||
return self._document._from_son(self._cursor[key])
|
||||
raise AttributeError
|
||||
|
||||
@@ -1039,8 +1101,10 @@ class QuerySet(object):
|
||||
:param field: the field to select distinct values from
|
||||
|
||||
.. versionadded:: 0.4
|
||||
.. versionchanged:: 0.5 - Fixed handling references
|
||||
"""
|
||||
return self._cursor.distinct(field)
|
||||
from dereference import DeReference
|
||||
return DeReference()(self._cursor.distinct(field), 1)
|
||||
|
||||
def only(self, *fields):
|
||||
"""Load only a subset of this document's fields. ::
|
||||
@@ -1226,6 +1290,9 @@ class QuerySet(object):
|
||||
|
||||
mongo_update = {}
|
||||
for key, value in update.items():
|
||||
if key == "__raw__":
|
||||
mongo_update.update(value)
|
||||
continue
|
||||
parts = key.split('__')
|
||||
# Check for an operator and transform to mongo-style if there is
|
||||
op = None
|
||||
@@ -1249,8 +1316,8 @@ class QuerySet(object):
|
||||
parts = []
|
||||
|
||||
cleaned_fields = []
|
||||
append_field = True
|
||||
for field in fields:
|
||||
append_field = True
|
||||
if isinstance(field, str):
|
||||
# Convert the S operator to $
|
||||
if field == 'S':
|
||||
@@ -1266,17 +1333,21 @@ class QuerySet(object):
|
||||
field = cleaned_fields[-1]
|
||||
|
||||
if op in (None, 'set', 'push', 'pull', 'addToSet'):
|
||||
value = field.prepare_query_value(op, value)
|
||||
if field.required or value is not None:
|
||||
value = field.prepare_query_value(op, value)
|
||||
elif op in ('pushAll', 'pullAll'):
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
|
||||
key = '.'.join(parts)
|
||||
|
||||
if not op:
|
||||
raise InvalidQueryError("Updates must supply an operation eg: set__FIELD=value")
|
||||
|
||||
if op:
|
||||
value = {key: value}
|
||||
key = '$' + op
|
||||
|
||||
if op is None or key not in mongo_update:
|
||||
if key not in mongo_update:
|
||||
mongo_update[key] = value
|
||||
elif key in mongo_update and isinstance(mongo_update[key], dict):
|
||||
mongo_update[key].update(value)
|
||||
@@ -1342,8 +1413,47 @@ class QuerySet(object):
|
||||
raise OperationError(u'Update failed [%s]' % unicode(e))
|
||||
|
||||
def __iter__(self):
|
||||
self.rewind()
|
||||
return self
|
||||
|
||||
def _get_scalar(self, doc):
|
||||
|
||||
def lookup(obj, name):
|
||||
chunks = name.split('__')
|
||||
for chunk in chunks:
|
||||
if hasattr(obj, '_db_field_map'):
|
||||
chunk = obj._db_field_map.get(chunk, chunk)
|
||||
obj = getattr(obj, chunk)
|
||||
return obj
|
||||
|
||||
data = [lookup(doc, n) for n in self._scalar]
|
||||
if len(data) == 1:
|
||||
return data[0]
|
||||
|
||||
return tuple(data)
|
||||
|
||||
def scalar(self, *fields):
|
||||
"""Instead of returning Document instances, return either a specific
|
||||
value or a tuple of values in order.
|
||||
|
||||
This effects all results and can be unset by calling ``scalar``
|
||||
without arguments. Calls ``only`` automatically.
|
||||
|
||||
:param fields: One or more fields to return instead of a Document.
|
||||
"""
|
||||
self._scalar = list(fields)
|
||||
|
||||
if fields:
|
||||
self.only(*fields)
|
||||
else:
|
||||
self.all_fields()
|
||||
|
||||
return self
|
||||
|
||||
def values_list(self, *fields):
|
||||
"""An alias for scalar"""
|
||||
return self.scalar(*fields)
|
||||
|
||||
def _sub_js_fields(self, code):
|
||||
"""When fields are specified with [~fieldname] syntax, where
|
||||
*fieldname* is the Python name of a field, *fieldname* will be
|
||||
@@ -1406,9 +1516,9 @@ class QuerySet(object):
|
||||
query['$where'] = self._where_clause
|
||||
|
||||
scope['query'] = query
|
||||
code = pymongo.code.Code(code, scope=scope)
|
||||
code = Code(code, scope=scope)
|
||||
|
||||
db = _get_db()
|
||||
db = self._document._get_db()
|
||||
return db.eval(code, *fields)
|
||||
|
||||
def where(self, where_clause):
|
||||
@@ -1435,13 +1545,13 @@ class QuerySet(object):
|
||||
.. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work
|
||||
with sharding.
|
||||
"""
|
||||
map_func = pymongo.code.Code("""
|
||||
map_func = Code("""
|
||||
function() {
|
||||
emit(1, this[field] || 0);
|
||||
}
|
||||
""", scope={'field': field})
|
||||
|
||||
reduce_func = pymongo.code.Code("""
|
||||
reduce_func = Code("""
|
||||
function(key, values) {
|
||||
var sum = 0;
|
||||
for (var i in values) {
|
||||
@@ -1465,14 +1575,14 @@ class QuerySet(object):
|
||||
.. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work
|
||||
with sharding.
|
||||
"""
|
||||
map_func = pymongo.code.Code("""
|
||||
map_func = Code("""
|
||||
function() {
|
||||
if (this.hasOwnProperty(field))
|
||||
emit(1, {t: this[field] || 0, c: 1});
|
||||
}
|
||||
""", scope={'field': field})
|
||||
|
||||
reduce_func = pymongo.code.Code("""
|
||||
reduce_func = Code("""
|
||||
function(key, values) {
|
||||
var out = {t: 0, c: 0};
|
||||
for (var i in values) {
|
||||
@@ -1484,7 +1594,7 @@ class QuerySet(object):
|
||||
}
|
||||
""")
|
||||
|
||||
finalize_func = pymongo.code.Code("""
|
||||
finalize_func = Code("""
|
||||
function(key, value) {
|
||||
return value.t / value.c;
|
||||
}
|
||||
@@ -1526,13 +1636,20 @@ class QuerySet(object):
|
||||
function() {
|
||||
path = '{{~%(field)s}}'.split('.');
|
||||
field = this;
|
||||
for (p in path) { field = field[path[p]]; }
|
||||
for (p in path) {
|
||||
if (field)
|
||||
field = field[path[p]];
|
||||
else
|
||||
break;
|
||||
}
|
||||
if (field && field.constructor == Array) {
|
||||
field.forEach(function(item) {
|
||||
emit(item, 1);
|
||||
});
|
||||
} else {
|
||||
} else if (field) {
|
||||
emit(field, 1);
|
||||
} else {
|
||||
emit(null, 1);
|
||||
}
|
||||
}
|
||||
""" % dict(field=field)
|
||||
@@ -1572,7 +1689,12 @@ class QuerySet(object):
|
||||
var total = 0.0;
|
||||
db[collection].find(query).forEach(function(doc) {
|
||||
field = doc;
|
||||
for (p in path) { field = field[path[p]]; }
|
||||
for (p in path) {
|
||||
if (field)
|
||||
field = field[path[p]];
|
||||
else
|
||||
break;
|
||||
}
|
||||
if (field && field.constructor == Array) {
|
||||
total += field.length;
|
||||
} else {
|
||||
@@ -1588,7 +1710,12 @@ class QuerySet(object):
|
||||
}
|
||||
db[collection].find(query).forEach(function(doc) {
|
||||
field = doc;
|
||||
for (p in path) { field = field[path[p]]; }
|
||||
for (p in path) {
|
||||
if (field)
|
||||
field = field[path[p]];
|
||||
else
|
||||
break;
|
||||
}
|
||||
if (field && field.constructor == Array) {
|
||||
field.forEach(function(item) {
|
||||
frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]);
|
||||
@@ -1609,10 +1736,16 @@ class QuerySet(object):
|
||||
|
||||
def __repr__(self):
|
||||
limit = REPR_OUTPUT_SIZE + 1
|
||||
if self._limit is not None and self._limit < limit:
|
||||
limit = self._limit
|
||||
start = (0 if self._skip is None else self._skip)
|
||||
if self._limit is None:
|
||||
stop = start + limit
|
||||
if self._limit is not None:
|
||||
if self._limit - start > limit:
|
||||
stop = start + limit
|
||||
else:
|
||||
stop = self._limit
|
||||
try:
|
||||
data = list(self[self._skip:limit])
|
||||
data = list(self[start:stop])
|
||||
except pymongo.errors.InvalidOperation:
|
||||
return ".. queryset mid-iteration .."
|
||||
if len(data) > REPR_OUTPUT_SIZE:
|
||||
@@ -1620,13 +1753,15 @@ class QuerySet(object):
|
||||
return repr(data)
|
||||
|
||||
def select_related(self, max_depth=1):
|
||||
"""Handles dereferencing of :class:`~pymongo.dbref.DBRef` objects to
|
||||
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects to
|
||||
a maximum depth in order to cut down the number queries to mongodb.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
from dereference import dereference
|
||||
return dereference(self, max_depth=max_depth)
|
||||
from dereference import DeReference
|
||||
# Make select related work the same for querysets
|
||||
max_depth += 1
|
||||
return DeReference()(self, max_depth=max_depth)
|
||||
|
||||
|
||||
class QuerySetManager(object):
|
||||
|
@@ -42,3 +42,5 @@ pre_save = _signals.signal('pre_save')
|
||||
post_save = _signals.signal('post_save')
|
||||
pre_delete = _signals.signal('pre_delete')
|
||||
post_delete = _signals.signal('post_delete')
|
||||
pre_bulk_insert = _signals.signal('pre_bulk_insert')
|
||||
post_bulk_insert = _signals.signal('post_bulk_insert')
|
||||
|
@@ -1,4 +1,4 @@
|
||||
from mongoengine.connection import _get_db
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
|
||||
class query_counter(object):
|
||||
@@ -7,7 +7,7 @@ class query_counter(object):
|
||||
def __init__(self):
|
||||
""" Construct the query_counter. """
|
||||
self.counter = 0
|
||||
self.db = _get_db()
|
||||
self.db = get_db()
|
||||
|
||||
def __enter__(self):
|
||||
""" On every with block we need to drop the profile collection. """
|
||||
|
62
python-mongoengine.spec
Normal file
62
python-mongoengine.spec
Normal file
@@ -0,0 +1,62 @@
|
||||
# sitelib for noarch packages, sitearch for others (remove the unneeded one)
|
||||
%{!?python_sitelib: %global python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")}
|
||||
%{!?python_sitearch: %global python_sitearch %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(1))")}
|
||||
|
||||
%define srcname mongoengine
|
||||
|
||||
Name: python-%{srcname}
|
||||
Version: 0.6
|
||||
Release: 1%{?dist}
|
||||
Summary: A Python Document-Object Mapper for working with MongoDB
|
||||
|
||||
Group: Development/Libraries
|
||||
License: MIT
|
||||
URL: https://github.com/MongoEngine/mongoengine
|
||||
Source0: %{srcname}-%{version}.tar.bz2
|
||||
|
||||
BuildRequires: python-devel
|
||||
BuildRequires: python-setuptools
|
||||
|
||||
Requires: mongodb
|
||||
Requires: pymongo
|
||||
Requires: python-blinker
|
||||
Requires: python-imaging
|
||||
|
||||
|
||||
%description
|
||||
MongoEngine is an ORM-like layer on top of PyMongo.
|
||||
|
||||
%prep
|
||||
%setup -q -n %{srcname}-%{version}
|
||||
|
||||
|
||||
%build
|
||||
# Remove CFLAGS=... for noarch packages (unneeded)
|
||||
CFLAGS="$RPM_OPT_FLAGS" %{__python} setup.py build
|
||||
|
||||
|
||||
%install
|
||||
rm -rf $RPM_BUILD_ROOT
|
||||
%{__python} setup.py install -O1 --skip-build --root $RPM_BUILD_ROOT
|
||||
|
||||
%clean
|
||||
rm -rf $RPM_BUILD_ROOT
|
||||
|
||||
%files
|
||||
%defattr(-,root,root,-)
|
||||
%doc docs AUTHORS LICENSE README.rst
|
||||
# For noarch packages: sitelib
|
||||
%{python_sitelib}/*
|
||||
# For arch-specific packages: sitearch
|
||||
# %{python_sitearch}/*
|
||||
|
||||
%changelog
|
||||
* Mon Mar 05 2012 Ross Lawley <ross.lawley@gmail.com> 0.6
|
||||
- 0.6 released
|
||||
* Thu Oct 27 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.3-1
|
||||
- Update to latest dev version
|
||||
- Add PIL dependency for ImageField
|
||||
* Wed Oct 12 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.2-1
|
||||
- Update version
|
||||
* Fri Sep 23 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.0-1
|
||||
- Initial version
|
6
setup.py
6
setup.py
@@ -38,7 +38,9 @@ setup(name='mongoengine',
|
||||
packages=find_packages(),
|
||||
author='Harry Marr',
|
||||
author_email='harry.marr@{nospam}gmail.com',
|
||||
url='http://hmarr.com/mongoengine/',
|
||||
maintainer="Ross Lawley",
|
||||
maintainer_email="ross.lawley@{nospam}gmail.com",
|
||||
url='http://mongoengine.org/',
|
||||
license='MIT',
|
||||
include_package_data=True,
|
||||
description=DESCRIPTION,
|
||||
@@ -47,5 +49,5 @@ setup(name='mongoengine',
|
||||
classifiers=CLASSIFIERS,
|
||||
install_requires=['pymongo'],
|
||||
test_suite='tests',
|
||||
tests_require=['blinker', 'django==1.3']
|
||||
tests_require=['blinker', 'django>=1.3', 'PIL']
|
||||
)
|
||||
|
70
tests/connection.py
Normal file
70
tests/connection.py
Normal file
@@ -0,0 +1,70 @@
|
||||
import unittest
|
||||
import pymongo
|
||||
|
||||
import mongoengine.connection
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db, get_connection, ConnectionError
|
||||
|
||||
|
||||
class ConnectionTest(unittest.TestCase):
|
||||
|
||||
def tearDown(self):
|
||||
mongoengine.connection._connection_settings = {}
|
||||
mongoengine.connection._connections = {}
|
||||
mongoengine.connection._dbs = {}
|
||||
|
||||
def test_connect(self):
|
||||
"""Ensure that the connect() method works properly.
|
||||
"""
|
||||
connect('mongoenginetest')
|
||||
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
|
||||
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
|
||||
connect('mongoenginetest2', alias='testdb')
|
||||
conn = get_connection('testdb')
|
||||
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
|
||||
|
||||
def test_connect_uri(self):
|
||||
"""Ensure that the connect() method works properly with uri's
|
||||
"""
|
||||
c = connect(db='mongoenginetest', alias='admin')
|
||||
c.admin.system.users.remove({})
|
||||
c.mongoenginetest.system.users.remove({})
|
||||
|
||||
c.admin.add_user("admin", "password")
|
||||
c.admin.authenticate("admin", "password")
|
||||
c.mongoenginetest.add_user("username", "password")
|
||||
|
||||
self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost')
|
||||
|
||||
connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest')
|
||||
|
||||
conn = get_connection()
|
||||
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
|
||||
|
||||
db = get_db()
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
|
||||
def test_register_connection(self):
|
||||
"""Ensure that connections with different aliases may be registered.
|
||||
"""
|
||||
register_connection('testdb', 'mongoenginetest2')
|
||||
|
||||
self.assertRaises(ConnectionError, get_connection)
|
||||
conn = get_connection('testdb')
|
||||
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
|
||||
|
||||
db = get_db('testdb')
|
||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
||||
self.assertEqual(db.name, 'mongoenginetest2')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@@ -1,7 +1,7 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import _get_db
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.tests import query_counter
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = _get_db()
|
||||
self.db = get_db()
|
||||
|
||||
def test_list_item_dereference(self):
|
||||
"""Ensure that DBRef items in ListFields are dereferenced.
|
||||
@@ -129,6 +129,110 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEquals(employee.friends, friends)
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
def test_circular_reference(self):
|
||||
"""Ensure you can handle circular references
|
||||
"""
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
relations = ListField(EmbeddedDocumentField('Relation'))
|
||||
|
||||
def __repr__(self):
|
||||
return "<Person: %s>" % self.name
|
||||
|
||||
class Relation(EmbeddedDocument):
|
||||
name = StringField()
|
||||
person = ReferenceField('Person')
|
||||
|
||||
Person.drop_collection()
|
||||
mother = Person(name="Mother")
|
||||
daughter = Person(name="Daughter")
|
||||
|
||||
mother.save()
|
||||
daughter.save()
|
||||
|
||||
daughter_rel = Relation(name="Daughter", person=daughter)
|
||||
mother.relations.append(daughter_rel)
|
||||
mother.save()
|
||||
|
||||
mother_rel = Relation(name="Daughter", person=mother)
|
||||
self_rel = Relation(name="Self", person=daughter)
|
||||
daughter.relations.append(mother_rel)
|
||||
daughter.relations.append(self_rel)
|
||||
daughter.save()
|
||||
|
||||
self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
|
||||
|
||||
def test_circular_reference_on_self(self):
|
||||
"""Ensure you can handle circular references
|
||||
"""
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
relations = ListField(ReferenceField('self'))
|
||||
|
||||
def __repr__(self):
|
||||
return "<Person: %s>" % self.name
|
||||
|
||||
Person.drop_collection()
|
||||
mother = Person(name="Mother")
|
||||
daughter = Person(name="Daughter")
|
||||
|
||||
mother.save()
|
||||
daughter.save()
|
||||
|
||||
mother.relations.append(daughter)
|
||||
mother.save()
|
||||
|
||||
daughter.relations.append(mother)
|
||||
daughter.relations.append(daughter)
|
||||
daughter.save()
|
||||
|
||||
self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
|
||||
|
||||
def test_circular_tree_reference(self):
|
||||
"""Ensure you can handle circular references with more than one level
|
||||
"""
|
||||
class Other(EmbeddedDocument):
|
||||
name = StringField()
|
||||
friends = ListField(ReferenceField('Person'))
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
other = EmbeddedDocumentField(Other, default=lambda: Other())
|
||||
|
||||
def __repr__(self):
|
||||
return "<Person: %s>" % self.name
|
||||
|
||||
Person.drop_collection()
|
||||
paul = Person(name="Paul")
|
||||
paul.save()
|
||||
maria = Person(name="Maria")
|
||||
maria.save()
|
||||
julia = Person(name='Julia')
|
||||
julia.save()
|
||||
anna = Person(name='Anna')
|
||||
anna.save()
|
||||
|
||||
paul.other.friends = [maria, julia, anna]
|
||||
paul.other.name = "Paul's friends"
|
||||
paul.save()
|
||||
|
||||
maria.other.friends = [paul, julia, anna]
|
||||
maria.other.name = "Maria's friends"
|
||||
maria.save()
|
||||
|
||||
julia.other.friends = [paul, maria, anna]
|
||||
julia.other.name = "Julia's friends"
|
||||
julia.save()
|
||||
|
||||
anna.other.friends = [paul, maria, julia]
|
||||
anna.other.name = "Anna's friends"
|
||||
anna.save()
|
||||
|
||||
self.assertEquals(
|
||||
"[<Person: Paul>, <Person: Maria>, <Person: Julia>, <Person: Anna>]",
|
||||
"%s" % Person.objects()
|
||||
)
|
||||
|
||||
def test_generic_reference(self):
|
||||
|
||||
class UserA(Document):
|
||||
@@ -656,3 +760,53 @@ class FieldTest(unittest.TestCase):
|
||||
UserB.drop_collection()
|
||||
UserC.drop_collection()
|
||||
Group.drop_collection()
|
||||
|
||||
def test_multidirectional_lists(self):
|
||||
|
||||
class Asset(Document):
|
||||
name = StringField(max_length=250, required=True)
|
||||
parent = GenericReferenceField(default=None)
|
||||
parents = ListField(GenericReferenceField())
|
||||
children = ListField(GenericReferenceField())
|
||||
|
||||
Asset.drop_collection()
|
||||
|
||||
root = Asset(name='', path="/", title="Site Root")
|
||||
root.save()
|
||||
|
||||
company = Asset(name='company', title='Company', parent=root, parents=[root])
|
||||
company.save()
|
||||
|
||||
root.children = [company]
|
||||
root.save()
|
||||
|
||||
root = root.reload()
|
||||
self.assertEquals(root.children, [company])
|
||||
self.assertEquals(company.parents, [root])
|
||||
|
||||
def test_dict_in_dbref_instance(self):
|
||||
|
||||
class Person(Document):
|
||||
name = StringField(max_length=250, required=True)
|
||||
|
||||
class Room(Document):
|
||||
number = StringField(max_length=250, required=True)
|
||||
staffs_with_position = ListField(DictField())
|
||||
|
||||
Person.drop_collection()
|
||||
Room.drop_collection()
|
||||
|
||||
bob = Person.objects.create(name='Bob')
|
||||
bob.save()
|
||||
sarah = Person.objects.create(name='Sarah')
|
||||
sarah.save()
|
||||
|
||||
room_101 = Room.objects.create(number="101")
|
||||
room_101.staffs_with_position = [
|
||||
{'position_key': 'window', 'staff': sarah},
|
||||
{'position_key': 'door', 'staff': bob.to_dbref()}]
|
||||
room_101.save()
|
||||
|
||||
room = Room.objects.first().select_related()
|
||||
self.assertEquals(room.staffs_with_position[0]['staff'], sarah)
|
||||
self.assertEquals(room.staffs_with_position[1]['staff'], bob)
|
||||
|
@@ -8,6 +8,8 @@ from mongoengine.django.shortcuts import get_document_or_404
|
||||
from django.http import Http404
|
||||
from django.template import Context, Template
|
||||
from django.conf import settings
|
||||
from django.core.paginator import Paginator
|
||||
|
||||
settings.configure()
|
||||
|
||||
class QuerySetTest(unittest.TestCase):
|
||||
@@ -67,3 +69,22 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234')
|
||||
self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk))
|
||||
|
||||
def test_pagination(self):
|
||||
"""Ensure that Pagination works as expected
|
||||
"""
|
||||
class Page(Document):
|
||||
name = StringField()
|
||||
|
||||
Page.drop_collection()
|
||||
|
||||
for i in xrange(1, 11):
|
||||
Page(name=str(i)).save()
|
||||
|
||||
paginator = Paginator(Page.objects.all(), 2)
|
||||
|
||||
t = Template("{% for i in page.object_list %}{{ i.name }}:{% endfor %}")
|
||||
for p in paginator.page_range:
|
||||
d = {"page": paginator.page(p)}
|
||||
end = p * 2
|
||||
start = end - 1
|
||||
self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end))
|
||||
|
@@ -1,35 +1,54 @@
|
||||
import pickle
|
||||
import pymongo
|
||||
import bson
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
import pymongo
|
||||
import pickle
|
||||
import weakref
|
||||
|
||||
from fixtures import Base, Mixin, PickleEmbedded, PickleTest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.base import _document_registry, NotRegistered, InvalidDocumentError
|
||||
from mongoengine.connection import _get_db
|
||||
from mongoengine.base import NotRegistered, InvalidDocumentError
|
||||
from mongoengine.queryset import InvalidQueryError
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
|
||||
class DocumentTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = _get_db()
|
||||
self.db = get_db()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
self.Person = Person
|
||||
|
||||
def tearDown(self):
|
||||
self.Person.drop_collection()
|
||||
|
||||
def test_future_warning(self):
|
||||
"""Add FutureWarning for future allow_inhertiance default change.
|
||||
"""
|
||||
|
||||
with warnings.catch_warnings(True) as errors:
|
||||
|
||||
class SimpleBase(Document):
|
||||
a = IntField()
|
||||
|
||||
class InheritedClass(SimpleBase):
|
||||
b = IntField()
|
||||
|
||||
InheritedClass()
|
||||
self.assertEquals(len(errors), 1)
|
||||
warning = errors[0]
|
||||
self.assertEquals(FutureWarning, warning.category)
|
||||
self.assertTrue("InheritedClass" in warning.message.message)
|
||||
|
||||
def test_drop_collection(self):
|
||||
"""Ensure that the collection may be dropped from the database.
|
||||
"""
|
||||
@@ -41,6 +60,21 @@ class DocumentTest(unittest.TestCase):
|
||||
self.Person.drop_collection()
|
||||
self.assertFalse(collection in self.db.collection_names())
|
||||
|
||||
def test_queryset_resurrects_dropped_collection(self):
|
||||
|
||||
self.Person.objects().item_frequencies('name')
|
||||
self.Person.drop_collection()
|
||||
|
||||
self.assertEqual({}, self.Person.objects().item_frequencies('name'))
|
||||
|
||||
class Actor(self.Person):
|
||||
pass
|
||||
|
||||
# Ensure works correctly with inhertited classes
|
||||
Actor.objects().item_frequencies('name')
|
||||
self.Person.drop_collection()
|
||||
self.assertEqual({}, Actor.objects().item_frequencies('name'))
|
||||
|
||||
def test_definition(self):
|
||||
"""Ensure that document may be defined using fields.
|
||||
"""
|
||||
@@ -132,7 +166,8 @@ class DocumentTest(unittest.TestCase):
|
||||
def test_get_superclasses(self):
|
||||
"""Ensure that the correct list of superclasses is assembled.
|
||||
"""
|
||||
class Animal(Document): pass
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
class Fish(Animal): pass
|
||||
class Mammal(Animal): pass
|
||||
class Human(Mammal): pass
|
||||
@@ -147,31 +182,8 @@ class DocumentTest(unittest.TestCase):
|
||||
}
|
||||
self.assertEqual(Dog._superclasses, dog_superclasses)
|
||||
|
||||
def test_get_subclasses(self):
|
||||
"""Ensure that the correct list of subclasses is retrieved by the
|
||||
_get_subclasses method.
|
||||
"""
|
||||
class Animal(Document): pass
|
||||
class Fish(Animal): pass
|
||||
class Mammal(Animal): pass
|
||||
class Human(Mammal): pass
|
||||
class Dog(Mammal): pass
|
||||
|
||||
mammal_subclasses = {
|
||||
'Animal.Mammal.Dog': Dog,
|
||||
'Animal.Mammal.Human': Human
|
||||
}
|
||||
self.assertEqual(Mammal._get_subclasses(), mammal_subclasses)
|
||||
|
||||
animal_subclasses = {
|
||||
'Animal.Fish': Fish,
|
||||
'Animal.Mammal': Mammal,
|
||||
'Animal.Mammal.Dog': Dog,
|
||||
'Animal.Mammal.Human': Human
|
||||
}
|
||||
self.assertEqual(Animal._get_subclasses(), animal_subclasses)
|
||||
|
||||
def test_external_super_and_sub_classes(self):
|
||||
def test_external_superclasses(self):
|
||||
"""Ensure that the correct list of sub and super classes is assembled.
|
||||
when importing part of the model
|
||||
"""
|
||||
@@ -191,20 +203,6 @@ class DocumentTest(unittest.TestCase):
|
||||
}
|
||||
self.assertEqual(Dog._superclasses, dog_superclasses)
|
||||
|
||||
animal_subclasses = {
|
||||
'Base.Animal.Fish': Fish,
|
||||
'Base.Animal.Mammal': Mammal,
|
||||
'Base.Animal.Mammal.Dog': Dog,
|
||||
'Base.Animal.Mammal.Human': Human
|
||||
}
|
||||
self.assertEqual(Animal._get_subclasses(), animal_subclasses)
|
||||
|
||||
mammal_subclasses = {
|
||||
'Base.Animal.Mammal.Dog': Dog,
|
||||
'Base.Animal.Mammal.Human': Human
|
||||
}
|
||||
self.assertEqual(Mammal._get_subclasses(), mammal_subclasses)
|
||||
|
||||
Base.drop_collection()
|
||||
|
||||
h = Human()
|
||||
@@ -218,7 +216,8 @@ class DocumentTest(unittest.TestCase):
|
||||
|
||||
def test_polymorphic_queries(self):
|
||||
"""Ensure that the correct subclasses are returned from a query"""
|
||||
class Animal(Document): pass
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
class Fish(Animal): pass
|
||||
class Mammal(Animal): pass
|
||||
class Human(Mammal): pass
|
||||
@@ -247,7 +246,8 @@ class DocumentTest(unittest.TestCase):
|
||||
"""Ensure that the correct subclasses are returned from a query when
|
||||
using references / generic references
|
||||
"""
|
||||
class Animal(Document): pass
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
class Fish(Animal): pass
|
||||
class Mammal(Animal): pass
|
||||
class Human(Mammal): pass
|
||||
@@ -326,7 +326,8 @@ class DocumentTest(unittest.TestCase):
|
||||
self.Person._get_collection_name())
|
||||
|
||||
# Ensure that MRO error is not raised
|
||||
class A(Document): pass
|
||||
class A(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
class B(A): pass
|
||||
class C(B): pass
|
||||
|
||||
@@ -621,6 +622,7 @@ class DocumentTest(unittest.TestCase):
|
||||
'tags',
|
||||
('category', '-date')
|
||||
],
|
||||
'allow_inheritance': True
|
||||
}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
@@ -658,6 +660,26 @@ class DocumentTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_explicit_geo2d_index(self):
|
||||
"""Ensure that geo2d indexes work when created via meta[indexes]
|
||||
"""
|
||||
class Place(Document):
|
||||
location = DictField()
|
||||
meta = {
|
||||
'indexes': [
|
||||
'*location.point',
|
||||
],
|
||||
}
|
||||
Place.drop_collection()
|
||||
|
||||
info = Place.objects._collection.index_information()
|
||||
# Indexes are lazy so use list() to perform query
|
||||
list(Place.objects)
|
||||
info = Place.objects._collection.index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
|
||||
self.assertTrue([('location.point', '2d')] in info)
|
||||
|
||||
def test_dictionary_indexes(self):
|
||||
"""Ensure that indexes are used when meta[indexes] contains dictionaries
|
||||
instead of lists.
|
||||
@@ -690,6 +712,34 @@ class DocumentTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_abstract_index_inheritance(self):
|
||||
|
||||
class UserBase(Document):
|
||||
meta = {
|
||||
'abstract': True,
|
||||
'indexes': ['user_guid']
|
||||
}
|
||||
|
||||
user_guid = StringField(required=True)
|
||||
|
||||
|
||||
class Person(UserBase):
|
||||
meta = {
|
||||
'indexes': ['name'],
|
||||
}
|
||||
|
||||
name = StringField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(name="test", user_guid='123')
|
||||
p.save()
|
||||
|
||||
self.assertEquals(1, Person.objects.count())
|
||||
info = Person.objects._collection.index_information()
|
||||
self.assertEqual(info.keys(), ['_types_1_user_guid_1', '_id_', '_types_1_name_1'])
|
||||
Person.drop_collection()
|
||||
|
||||
def test_embedded_document_index(self):
|
||||
"""Tests settings an index on an embedded document
|
||||
"""
|
||||
@@ -740,6 +790,17 @@ class DocumentTest(unittest.TestCase):
|
||||
post1.save()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_recursive_embedded_objects_dont_break_indexes(self):
|
||||
|
||||
class RecursiveObject(EmbeddedDocument):
|
||||
obj = EmbeddedDocumentField('self')
|
||||
|
||||
class RecursiveDocument(Document):
|
||||
recursive_obj = EmbeddedDocumentField(RecursiveObject)
|
||||
|
||||
info = RecursiveDocument.objects._collection.index_information()
|
||||
self.assertEqual(info.keys(), ['_id_', '_types_1'])
|
||||
|
||||
def test_geo_indexes_recursion(self):
|
||||
|
||||
class User(Document):
|
||||
@@ -752,6 +813,34 @@ class DocumentTest(unittest.TestCase):
|
||||
|
||||
self.assertEquals(len(User._geo_indices()), 2)
|
||||
|
||||
def test_covered_index(self):
|
||||
"""Ensure that covered indexes can be used
|
||||
"""
|
||||
|
||||
class Test(Document):
|
||||
a = IntField()
|
||||
|
||||
meta = {
|
||||
'indexes': ['a'],
|
||||
'allow_inheritance': False
|
||||
}
|
||||
|
||||
Test.drop_collection()
|
||||
|
||||
obj = Test(a=1)
|
||||
obj.save()
|
||||
|
||||
# Need to be explicit about covered indexes as mongoDB doesn't know if
|
||||
# the documents returned might have more keys in that here.
|
||||
query_plan = Test.objects(id=obj.id).exclude('a').explain()
|
||||
self.assertFalse(query_plan['indexOnly'])
|
||||
|
||||
query_plan = Test.objects(id=obj.id).only('id').explain()
|
||||
self.assertTrue(query_plan['indexOnly'])
|
||||
|
||||
query_plan = Test.objects(a=1).only('a').exclude('id').explain()
|
||||
self.assertTrue(query_plan['indexOnly'])
|
||||
|
||||
def test_hint(self):
|
||||
|
||||
class BlogPost(Document):
|
||||
@@ -933,6 +1022,8 @@ class DocumentTest(unittest.TestCase):
|
||||
username = StringField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
self.assertEqual(User._fields['username'].db_field, '_id')
|
||||
@@ -982,6 +1073,8 @@ class DocumentTest(unittest.TestCase):
|
||||
class Place(Document):
|
||||
name = StringField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class NicePlace(Place):
|
||||
pass
|
||||
|
||||
@@ -992,10 +1085,8 @@ class DocumentTest(unittest.TestCase):
|
||||
|
||||
# Mimic Place and NicePlace definitions being in a different file
|
||||
# and the NicePlace model not being imported in at query time.
|
||||
@classmethod
|
||||
def _get_subclasses(cls):
|
||||
return {}
|
||||
Place._get_subclasses = _get_subclasses
|
||||
from mongoengine.base import _document_registry
|
||||
del(_document_registry['Place.NicePlace'])
|
||||
|
||||
def query_without_importing_nice_place():
|
||||
print Place.objects.all()
|
||||
@@ -1058,7 +1149,7 @@ class DocumentTest(unittest.TestCase):
|
||||
doc.embedded_field = embedded_1
|
||||
doc.save()
|
||||
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
doc.list_field.append(1)
|
||||
doc.dict_field['woot'] = "woot"
|
||||
doc.embedded_field.list_field.append(1)
|
||||
@@ -1069,7 +1160,7 @@ class DocumentTest(unittest.TestCase):
|
||||
'embedded_field.dict_field'])
|
||||
doc.save()
|
||||
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
self.assertEquals(doc._get_changed_fields(), [])
|
||||
self.assertEquals(len(doc.list_field), 4)
|
||||
self.assertEquals(len(doc.dict_field), 2)
|
||||
@@ -1217,6 +1308,58 @@ class DocumentTest(unittest.TestCase):
|
||||
p1.reload()
|
||||
self.assertEquals(p1.name, p.parent.name)
|
||||
|
||||
def test_save_cascade_kwargs(self):
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField('self')
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="Wilson Snr")
|
||||
p1.parent = None
|
||||
p1.save()
|
||||
|
||||
p2 = Person(name="Wilson Jr")
|
||||
p2.parent = p1
|
||||
p2.save(force_insert=True, cascade_kwargs={"force_insert": False})
|
||||
|
||||
p = Person.objects(name="Wilson Jr").get()
|
||||
p.parent.name = "Daddy Wilson"
|
||||
p.save()
|
||||
|
||||
p1.reload()
|
||||
self.assertEquals(p1.name, p.parent.name)
|
||||
|
||||
def test_save_cascade_meta(self):
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField('self')
|
||||
|
||||
meta = {'cascade': False}
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="Wilson Snr")
|
||||
p1.parent = None
|
||||
p1.save()
|
||||
|
||||
p2 = Person(name="Wilson Jr")
|
||||
p2.parent = p1
|
||||
p2.save()
|
||||
|
||||
p = Person.objects(name="Wilson Jr").get()
|
||||
p.parent.name = "Daddy Wilson"
|
||||
p.save()
|
||||
|
||||
p1.reload()
|
||||
self.assertNotEquals(p1.name, p.parent.name)
|
||||
|
||||
p.save(cascade=True)
|
||||
p1.reload()
|
||||
self.assertEquals(p1.name, p.parent.name)
|
||||
|
||||
def test_save_cascades_generically(self):
|
||||
|
||||
class Person(Document):
|
||||
@@ -1346,6 +1489,12 @@ class DocumentTest(unittest.TestCase):
|
||||
|
||||
self.assertRaises(OperationError, update_no_value_raises)
|
||||
|
||||
def update_no_op_raises():
|
||||
person = self.Person.objects.first()
|
||||
person.update(name="Dan")
|
||||
|
||||
self.assertRaises(InvalidQueryError, update_no_op_raises)
|
||||
|
||||
def test_embedded_update(self):
|
||||
"""
|
||||
Test update on `EmbeddedDocumentField` fields
|
||||
@@ -1480,25 +1629,27 @@ class DocumentTest(unittest.TestCase):
|
||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field'])
|
||||
|
||||
embedded_delta = {
|
||||
'_types': ['Embedded'],
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello',
|
||||
'int_field': 1,
|
||||
'dict_field': {'hello': 'world'},
|
||||
'list_field': ['1', 2, {'hello': 'world'}]
|
||||
}
|
||||
self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||
embedded_delta.update({
|
||||
'_types': ['Embedded'],
|
||||
'_cls': 'Embedded',
|
||||
})
|
||||
self.assertEquals(doc._delta(), ({'embedded_field': embedded_delta}, {}))
|
||||
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.dict_field = {}
|
||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.dict_field'])
|
||||
self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
|
||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
self.assertEquals(doc.embedded_field.dict_field, {})
|
||||
|
||||
doc.embedded_field.list_field = []
|
||||
@@ -1506,7 +1657,7 @@ class DocumentTest(unittest.TestCase):
|
||||
self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1}))
|
||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
self.assertEquals(doc.embedded_field.list_field, [])
|
||||
|
||||
embedded_2 = Embedded()
|
||||
@@ -1539,7 +1690,7 @@ class DocumentTest(unittest.TestCase):
|
||||
}]
|
||||
}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
|
||||
self.assertEquals(doc.embedded_field.list_field[0], '1')
|
||||
self.assertEquals(doc.embedded_field.list_field[1], 2)
|
||||
@@ -1551,7 +1702,7 @@ class DocumentTest(unittest.TestCase):
|
||||
self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {}))
|
||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world')
|
||||
|
||||
# Test multiple assignments
|
||||
@@ -1576,40 +1727,40 @@ class DocumentTest(unittest.TestCase):
|
||||
'dict_field': {'hello': 'world'}}
|
||||
]}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world')
|
||||
|
||||
# Test list native methods
|
||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.append(1)
|
||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1])
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.sort()
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}])
|
||||
|
||||
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
|
||||
del(doc.embedded_field.list_field[2].list_field)
|
||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1}))
|
||||
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.dict_field['Embedded'] = embedded_1
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.dict_field['Embedded'].string_field = 'Hello World'
|
||||
self.assertEquals(doc._get_changed_fields(), ['dict_field.Embedded.string_field'])
|
||||
@@ -1673,7 +1824,7 @@ class DocumentTest(unittest.TestCase):
|
||||
doc.dict_field = {'hello': 'world'}
|
||||
doc.list_field = ['1', 2, {'hello': 'world'}]
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
|
||||
self.assertEquals(doc.string_field, 'hello')
|
||||
self.assertEquals(doc.int_field, 1)
|
||||
@@ -1713,25 +1864,27 @@ class DocumentTest(unittest.TestCase):
|
||||
self.assertEquals(doc._get_changed_fields(), ['db_embedded_field'])
|
||||
|
||||
embedded_delta = {
|
||||
'_types': ['Embedded'],
|
||||
'_cls': 'Embedded',
|
||||
'db_string_field': 'hello',
|
||||
'db_int_field': 1,
|
||||
'db_dict_field': {'hello': 'world'},
|
||||
'db_list_field': ['1', 2, {'hello': 'world'}]
|
||||
}
|
||||
self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||
embedded_delta.update({
|
||||
'_types': ['Embedded'],
|
||||
'_cls': 'Embedded',
|
||||
})
|
||||
self.assertEquals(doc._delta(), ({'db_embedded_field': embedded_delta}, {}))
|
||||
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.dict_field = {}
|
||||
self.assertEquals(doc._get_changed_fields(), ['db_embedded_field.db_dict_field'])
|
||||
self.assertEquals(doc.embedded_field._delta(), ({}, {'db_dict_field': 1}))
|
||||
self.assertEquals(doc._delta(), ({}, {'db_embedded_field.db_dict_field': 1}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
self.assertEquals(doc.embedded_field.dict_field, {})
|
||||
|
||||
doc.embedded_field.list_field = []
|
||||
@@ -1739,7 +1892,7 @@ class DocumentTest(unittest.TestCase):
|
||||
self.assertEquals(doc.embedded_field._delta(), ({}, {'db_list_field': 1}))
|
||||
self.assertEquals(doc._delta(), ({}, {'db_embedded_field.db_list_field': 1}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
self.assertEquals(doc.embedded_field.list_field, [])
|
||||
|
||||
embedded_2 = Embedded()
|
||||
@@ -1772,7 +1925,7 @@ class DocumentTest(unittest.TestCase):
|
||||
}]
|
||||
}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
|
||||
self.assertEquals(doc.embedded_field.list_field[0], '1')
|
||||
self.assertEquals(doc.embedded_field.list_field[1], 2)
|
||||
@@ -1784,7 +1937,7 @@ class DocumentTest(unittest.TestCase):
|
||||
self.assertEquals(doc.embedded_field._delta(), ({'db_list_field.2.db_string_field': 'world'}, {}))
|
||||
self.assertEquals(doc._delta(), ({'db_embedded_field.db_list_field.2.db_string_field': 'world'}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world')
|
||||
|
||||
# Test multiple assignments
|
||||
@@ -1809,30 +1962,30 @@ class DocumentTest(unittest.TestCase):
|
||||
'db_dict_field': {'hello': 'world'}}
|
||||
]}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world')
|
||||
|
||||
# Test list native methods
|
||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||
self.assertEquals(doc._delta(), ({'db_embedded_field.db_list_field.2.db_list_field': [2, {'hello': 'world'}]}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.append(1)
|
||||
self.assertEquals(doc._delta(), ({'db_embedded_field.db_list_field.2.db_list_field': [2, {'hello': 'world'}, 1]}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1])
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.sort()
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}])
|
||||
|
||||
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
||||
self.assertEquals(doc._delta(), ({'db_embedded_field.db_list_field.2.db_list_field': [1, 2, {}]}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
doc = doc.reload(10)
|
||||
|
||||
del(doc.embedded_field.list_field[2].list_field)
|
||||
self.assertEquals(doc._delta(), ({}, {'db_embedded_field.db_list_field.2.db_list_field': 1}))
|
||||
@@ -2045,6 +2198,30 @@ class DocumentTest(unittest.TestCase):
|
||||
# Ensure that the 'details' embedded object saved correctly
|
||||
self.assertEqual(employee_obj['details']['position'], 'Developer')
|
||||
|
||||
def test_embedded_update_after_save(self):
|
||||
"""
|
||||
Test update of `EmbeddedDocumentField` attached to a newly saved
|
||||
document.
|
||||
"""
|
||||
class Page(EmbeddedDocument):
|
||||
log_message = StringField(verbose_name="Log message",
|
||||
required=True)
|
||||
|
||||
class Site(Document):
|
||||
page = EmbeddedDocumentField(Page)
|
||||
|
||||
|
||||
Site.drop_collection()
|
||||
site = Site(page=Page(log_message="Warning: Dummy message"))
|
||||
site.save()
|
||||
|
||||
# Update
|
||||
site.page.log_message = "Error: Dummy message"
|
||||
site.save()
|
||||
|
||||
site = Site.objects.first()
|
||||
self.assertEqual(site.page.log_message, "Error: Dummy message")
|
||||
|
||||
def test_updating_an_embedded_document(self):
|
||||
"""Ensure that a document with an embedded document field may be
|
||||
saved in the database.
|
||||
@@ -2111,6 +2288,30 @@ class DocumentTest(unittest.TestCase):
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
def test_mixin_inheritance(self):
|
||||
class BaseMixIn(object):
|
||||
count = IntField()
|
||||
data = StringField()
|
||||
|
||||
class DoubleMixIn(BaseMixIn):
|
||||
comment = StringField()
|
||||
|
||||
class TestDoc(Document, DoubleMixIn):
|
||||
age = IntField()
|
||||
|
||||
TestDoc.drop_collection()
|
||||
t = TestDoc(count=12, data="test",
|
||||
comment="great!", age=19)
|
||||
|
||||
t.save()
|
||||
|
||||
t = TestDoc.objects.first()
|
||||
|
||||
self.assertEquals(t.age, 19)
|
||||
self.assertEquals(t.comment, "great!")
|
||||
self.assertEquals(t.data, "test")
|
||||
self.assertEquals(t.count, 12)
|
||||
|
||||
def test_save_reference(self):
|
||||
"""Ensure that a document reference field may be saved in the database.
|
||||
"""
|
||||
@@ -2134,7 +2335,7 @@ class DocumentTest(unittest.TestCase):
|
||||
|
||||
# Test laziness
|
||||
self.assertTrue(isinstance(post_obj._data['author'],
|
||||
pymongo.dbref.DBRef))
|
||||
bson.DBRef))
|
||||
self.assertTrue(isinstance(post_obj.author, self.Person))
|
||||
self.assertEqual(post_obj.author.name, 'Test User')
|
||||
|
||||
@@ -2147,6 +2348,32 @@ class DocumentTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_cannot_perform_joins_references(self):
|
||||
|
||||
class BlogPost(Document):
|
||||
author = ReferenceField(self.Person)
|
||||
author2 = GenericReferenceField()
|
||||
|
||||
def test_reference():
|
||||
list(BlogPost.objects(author__name="test"))
|
||||
|
||||
self.assertRaises(InvalidQueryError, test_reference)
|
||||
|
||||
def test_generic_reference():
|
||||
list(BlogPost.objects(author2__name="test"))
|
||||
|
||||
self.assertRaises(InvalidQueryError, test_generic_reference)
|
||||
|
||||
def test_duplicate_db_fields_raise_invalid_document_error(self):
|
||||
"""Ensure a InvalidDocumentError is thrown if duplicate fields
|
||||
declare the same db_field"""
|
||||
|
||||
def throw_invalid_document_error():
|
||||
class Foo(Document):
|
||||
name = StringField()
|
||||
name2 = StringField(db_field='name')
|
||||
|
||||
self.assertRaises(InvalidDocumentError, throw_invalid_document_error)
|
||||
|
||||
def test_reverse_delete_rule_cascade_and_nullify(self):
|
||||
"""Ensure that a referenced document is also deleted upon deletion.
|
||||
@@ -2179,6 +2406,54 @@ class DocumentTest(unittest.TestCase):
|
||||
author.delete()
|
||||
self.assertEqual(len(BlogPost.objects), 0)
|
||||
|
||||
def test_reverse_delete_rule_cascade_and_nullify_complex_field(self):
|
||||
"""Ensure that a referenced document is also deleted upon deletion.
|
||||
"""
|
||||
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
authors = ListField(ReferenceField(self.Person, reverse_delete_rule=CASCADE))
|
||||
reviewers = ListField(ReferenceField(self.Person, reverse_delete_rule=NULLIFY))
|
||||
|
||||
self.Person.drop_collection()
|
||||
BlogPost.drop_collection()
|
||||
|
||||
author = self.Person(name='Test User')
|
||||
author.save()
|
||||
|
||||
reviewer = self.Person(name='Re Viewer')
|
||||
reviewer.save()
|
||||
|
||||
post = BlogPost(content= 'Watched some TV')
|
||||
post.authors = [author]
|
||||
post.reviewers = [reviewer]
|
||||
post.save()
|
||||
|
||||
reviewer.delete()
|
||||
self.assertEqual(len(BlogPost.objects), 1) # No effect on the BlogPost
|
||||
self.assertEqual(BlogPost.objects.get().reviewers, [])
|
||||
|
||||
# Delete the Person, which should lead to deletion of the BlogPost, too
|
||||
author.delete()
|
||||
self.assertEqual(len(BlogPost.objects), 0)
|
||||
|
||||
def test_invalid_reverse_delete_rules_raise_errors(self):
|
||||
|
||||
def throw_invalid_document_error():
|
||||
class Blog(Document):
|
||||
content = StringField()
|
||||
authors = MapField(ReferenceField(self.Person, reverse_delete_rule=CASCADE))
|
||||
reviewers = DictField(field=ReferenceField(self.Person, reverse_delete_rule=NULLIFY))
|
||||
|
||||
self.assertRaises(InvalidDocumentError, throw_invalid_document_error)
|
||||
|
||||
def throw_invalid_document_error_embedded():
|
||||
class Parents(EmbeddedDocument):
|
||||
father = ReferenceField('Person', reverse_delete_rule=DENY)
|
||||
mother = ReferenceField('Person', reverse_delete_rule=DENY)
|
||||
|
||||
self.assertRaises(InvalidDocumentError, throw_invalid_document_error_embedded)
|
||||
|
||||
def test_reverse_delete_rule_cascade_recurs(self):
|
||||
"""Ensure that a chain of documents is also deleted upon cascaded
|
||||
deletion.
|
||||
@@ -2333,10 +2608,10 @@ class DocumentTest(unittest.TestCase):
|
||||
resurrected.string = "Two"
|
||||
resurrected.save()
|
||||
|
||||
pickle_doc.reload()
|
||||
pickle_doc = pickle_doc.reload()
|
||||
self.assertEquals(resurrected, pickle_doc)
|
||||
|
||||
def throw_invalid_document_error(self):
|
||||
def test_throw_invalid_document_error(self):
|
||||
|
||||
# test handles people trying to upsert
|
||||
def throw_invalid_document_error():
|
||||
@@ -2345,6 +2620,223 @@ class DocumentTest(unittest.TestCase):
|
||||
|
||||
self.assertRaises(InvalidDocumentError, throw_invalid_document_error)
|
||||
|
||||
def test_mutating_documents(self):
|
||||
|
||||
class B(EmbeddedDocument):
|
||||
field1 = StringField(default='field1')
|
||||
|
||||
class A(Document):
|
||||
b = EmbeddedDocumentField(B, default=lambda: B())
|
||||
|
||||
A.drop_collection()
|
||||
a = A()
|
||||
a.save()
|
||||
a.reload()
|
||||
self.assertEquals(a.b.field1, 'field1')
|
||||
|
||||
class C(EmbeddedDocument):
|
||||
c_field = StringField(default='cfield')
|
||||
|
||||
class B(EmbeddedDocument):
|
||||
field1 = StringField(default='field1')
|
||||
field2 = EmbeddedDocumentField(C, default=lambda: C())
|
||||
|
||||
class A(Document):
|
||||
b = EmbeddedDocumentField(B, default=lambda: B())
|
||||
|
||||
a = A.objects()[0]
|
||||
a.b.field2.c_field = 'new value'
|
||||
a.save()
|
||||
|
||||
a.reload()
|
||||
self.assertEquals(a.b.field2.c_field, 'new value')
|
||||
|
||||
def test_can_save_false_values(self):
|
||||
"""Ensures you can save False values on save"""
|
||||
class Doc(Document):
|
||||
foo = StringField()
|
||||
archived = BooleanField(default=False, required=True)
|
||||
|
||||
Doc.drop_collection()
|
||||
d = Doc()
|
||||
d.save()
|
||||
d.archived = False
|
||||
d.save()
|
||||
|
||||
self.assertEquals(Doc.objects(archived=False).count(), 1)
|
||||
|
||||
|
||||
def test_can_save_false_values_dynamic(self):
|
||||
"""Ensures you can save False values on dynamic docs"""
|
||||
class Doc(DynamicDocument):
|
||||
foo = StringField()
|
||||
|
||||
Doc.drop_collection()
|
||||
d = Doc()
|
||||
d.save()
|
||||
d.archived = False
|
||||
d.save()
|
||||
|
||||
self.assertEquals(Doc.objects(archived=False).count(), 1)
|
||||
|
||||
def test_do_not_save_unchanged_references(self):
|
||||
"""Ensures cascading saves dont auto update"""
|
||||
class Job(Document):
|
||||
name = StringField()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
job = ReferenceField(Job)
|
||||
|
||||
Job.drop_collection()
|
||||
Person.drop_collection()
|
||||
|
||||
job = Job(name="Job 1")
|
||||
# job should not have any changed fields after the save
|
||||
job.save()
|
||||
|
||||
person = Person(name="name", age=10, job=job)
|
||||
|
||||
from pymongo.collection import Collection
|
||||
orig_update = Collection.update
|
||||
try:
|
||||
def fake_update(*args, **kwargs):
|
||||
self.fail("Unexpected update for %s" % args[0].name)
|
||||
return orig_update(*args, **kwargs)
|
||||
|
||||
Collection.update = fake_update
|
||||
person.save()
|
||||
finally:
|
||||
Collection.update = orig_update
|
||||
|
||||
def test_db_alias_tests(self):
|
||||
""" DB Alias tests """
|
||||
# mongoenginetest - Is default connection alias from setUp()
|
||||
# Register Aliases
|
||||
register_connection('testdb-1', 'mongoenginetest2')
|
||||
register_connection('testdb-2', 'mongoenginetest3')
|
||||
register_connection('testdb-3', 'mongoenginetest4')
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
meta = {"db_alias": "testdb-1"}
|
||||
|
||||
class Book(Document):
|
||||
name = StringField()
|
||||
meta = {"db_alias": "testdb-2"}
|
||||
|
||||
# Drops
|
||||
User.drop_collection()
|
||||
Book.drop_collection()
|
||||
|
||||
# Create
|
||||
bob = User.objects.create(name="Bob")
|
||||
hp = Book.objects.create(name="Harry Potter")
|
||||
|
||||
# Selects
|
||||
self.assertEqual(User.objects.first(), bob)
|
||||
self.assertEqual(Book.objects.first(), hp)
|
||||
|
||||
# DeRefecence
|
||||
class AuthorBooks(Document):
|
||||
author = ReferenceField(User)
|
||||
book = ReferenceField(Book)
|
||||
meta = {"db_alias": "testdb-3"}
|
||||
|
||||
# Drops
|
||||
AuthorBooks.drop_collection()
|
||||
|
||||
ab = AuthorBooks.objects.create(author=bob, book=hp)
|
||||
|
||||
# select
|
||||
self.assertEqual(AuthorBooks.objects.first(), ab)
|
||||
self.assertEqual(AuthorBooks.objects.first().book, hp)
|
||||
self.assertEqual(AuthorBooks.objects.first().author, bob)
|
||||
self.assertEqual(AuthorBooks.objects.filter(author=bob).first(), ab)
|
||||
self.assertEqual(AuthorBooks.objects.filter(book=hp).first(), ab)
|
||||
|
||||
# DB Alias
|
||||
self.assertEqual(User._get_db(), get_db("testdb-1"))
|
||||
self.assertEqual(Book._get_db(), get_db("testdb-2"))
|
||||
self.assertEqual(AuthorBooks._get_db(), get_db("testdb-3"))
|
||||
|
||||
# Collections
|
||||
self.assertEqual(User._get_collection(), get_db("testdb-1")[User._get_collection_name()])
|
||||
self.assertEqual(Book._get_collection(), get_db("testdb-2")[Book._get_collection_name()])
|
||||
self.assertEqual(AuthorBooks._get_collection(), get_db("testdb-3")[AuthorBooks._get_collection_name()])
|
||||
|
||||
def test_db_ref_usage(self):
|
||||
""" DB Ref usage in __raw__ queries """
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
class Book(Document):
|
||||
name = StringField()
|
||||
author = ReferenceField(User)
|
||||
extra = DictField()
|
||||
meta = {
|
||||
'ordering': ['+name']
|
||||
}
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
# Drops
|
||||
User.drop_collection()
|
||||
Book.drop_collection()
|
||||
|
||||
# Authors
|
||||
bob = User.objects.create(name="Bob")
|
||||
jon = User.objects.create(name="Jon")
|
||||
|
||||
# Redactors
|
||||
karl = User.objects.create(name="Karl")
|
||||
susan = User.objects.create(name="Susan")
|
||||
peter = User.objects.create(name="Peter")
|
||||
|
||||
# Bob
|
||||
Book.objects.create(name="1", author=bob, extra={"a": bob.to_dbref(), "b": [karl.to_dbref(), susan.to_dbref()]})
|
||||
Book.objects.create(name="2", author=bob, extra={"a": bob.to_dbref(), "b": karl.to_dbref()} )
|
||||
Book.objects.create(name="3", author=bob, extra={"a": bob.to_dbref(), "c": [jon.to_dbref(), peter.to_dbref()]})
|
||||
Book.objects.create(name="4", author=bob)
|
||||
|
||||
# Jon
|
||||
Book.objects.create(name="5", author=jon)
|
||||
Book.objects.create(name="6", author=peter)
|
||||
Book.objects.create(name="7", author=jon)
|
||||
Book.objects.create(name="8", author=jon)
|
||||
Book.objects.create(name="9", author=jon, extra={"a": peter.to_dbref()})
|
||||
|
||||
# Checks
|
||||
self.assertEqual(u",".join([str(b) for b in Book.objects.all()] ) , "1,2,3,4,5,6,7,8,9" )
|
||||
# bob related books
|
||||
self.assertEqual(u",".join([str(b) for b in Book.objects.filter(
|
||||
Q(extra__a=bob ) |
|
||||
Q(author=bob) |
|
||||
Q(extra__b=bob))]) ,
|
||||
"1,2,3,4")
|
||||
|
||||
# Susan & Karl related books
|
||||
self.assertEqual(u",".join([str(b) for b in Book.objects.filter(
|
||||
Q(extra__a__all=[karl, susan] ) |
|
||||
Q(author__all=[karl, susan ] ) |
|
||||
Q(extra__b__all=[karl.to_dbref(), susan.to_dbref()] )
|
||||
) ] ) , "1" )
|
||||
|
||||
# $Where
|
||||
self.assertEqual(u",".join([str(b) for b in Book.objects.filter(
|
||||
__raw__={
|
||||
"$where": """
|
||||
function(){
|
||||
return this.name == '1' ||
|
||||
this.name == '2';}"""
|
||||
}
|
||||
) ]), "1,2")
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
502
tests/dynamic_document.py
Normal file
502
tests/dynamic_document.py
Normal file
@@ -0,0 +1,502 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
|
||||
class DynamicDocTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
|
||||
class Person(DynamicDocument):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
self.Person = Person
|
||||
|
||||
def test_simple_dynamic_document(self):
|
||||
"""Ensures simple dynamic documents are saved correctly"""
|
||||
|
||||
p = self.Person()
|
||||
p.name = "James"
|
||||
p.age = 34
|
||||
|
||||
self.assertEquals(p.to_mongo(),
|
||||
{"_types": ["Person"], "_cls": "Person",
|
||||
"name": "James", "age": 34}
|
||||
)
|
||||
|
||||
p.save()
|
||||
|
||||
self.assertEquals(self.Person.objects.first().age, 34)
|
||||
|
||||
# Confirm no changes to self.Person
|
||||
self.assertFalse(hasattr(self.Person, 'age'))
|
||||
|
||||
def test_dynamic_document_delta(self):
|
||||
"""Ensures simple dynamic documents can delta correctly"""
|
||||
p = self.Person(name="James", age=34)
|
||||
self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {}))
|
||||
|
||||
p.doc = 123
|
||||
del(p.doc)
|
||||
self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1}))
|
||||
|
||||
def test_change_scope_of_variable(self):
|
||||
"""Test changing the scope of a dynamic field has no adverse effects"""
|
||||
p = self.Person()
|
||||
p.name = "Dean"
|
||||
p.misc = 22
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
p.misc = {'hello': 'world'}
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertEquals(p.misc, {'hello': 'world'})
|
||||
|
||||
def test_delete_dynamic_field(self):
|
||||
"""Test deleting a dynamic field works"""
|
||||
self.Person.drop_collection()
|
||||
p = self.Person()
|
||||
p.name = "Dean"
|
||||
p.misc = 22
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
p.misc = {'hello': 'world'}
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertEquals(p.misc, {'hello': 'world'})
|
||||
collection = self.db[self.Person._get_collection_name()]
|
||||
obj = collection.find_one()
|
||||
self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name'])
|
||||
|
||||
del(p.misc)
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertFalse(hasattr(p, 'misc'))
|
||||
|
||||
obj = collection.find_one()
|
||||
self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'name'])
|
||||
|
||||
def test_dynamic_document_queries(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
p = self.Person()
|
||||
p.name = "Dean"
|
||||
p.age = 22
|
||||
p.save()
|
||||
|
||||
self.assertEquals(1, self.Person.objects(age=22).count())
|
||||
p = self.Person.objects(age=22)
|
||||
p = p.get()
|
||||
self.assertEquals(22, p.age)
|
||||
|
||||
def test_complex_dynamic_document_queries(self):
|
||||
class Person(DynamicDocument):
|
||||
name = StringField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(name="test")
|
||||
p.age = "ten"
|
||||
p.save()
|
||||
|
||||
p1 = Person(name="test1")
|
||||
p1.age = "less then ten and a half"
|
||||
p1.save()
|
||||
|
||||
p2 = Person(name="test2")
|
||||
p2.age = 10
|
||||
p2.save()
|
||||
|
||||
self.assertEquals(Person.objects(age__icontains='ten').count(), 2)
|
||||
self.assertEquals(Person.objects(age__gte=10).count(), 1)
|
||||
|
||||
def test_complex_data_lookups(self):
|
||||
"""Ensure you can query dynamic document dynamic fields"""
|
||||
p = self.Person()
|
||||
p.misc = {'hello': 'world'}
|
||||
p.save()
|
||||
|
||||
self.assertEquals(1, self.Person.objects(misc__hello='world').count())
|
||||
|
||||
def test_inheritance(self):
|
||||
"""Ensure that dynamic document plays nice with inheritance"""
|
||||
class Employee(self.Person):
|
||||
salary = IntField()
|
||||
|
||||
Employee.drop_collection()
|
||||
|
||||
self.assertTrue('name' in Employee._fields)
|
||||
self.assertTrue('salary' in Employee._fields)
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
self.Person._get_collection_name())
|
||||
|
||||
joe_bloggs = Employee()
|
||||
joe_bloggs.name = "Joe Bloggs"
|
||||
joe_bloggs.salary = 10
|
||||
joe_bloggs.age = 20
|
||||
joe_bloggs.save()
|
||||
|
||||
self.assertEquals(1, self.Person.objects(age=20).count())
|
||||
self.assertEquals(1, Employee.objects(age=20).count())
|
||||
|
||||
joe_bloggs = self.Person.objects.first()
|
||||
self.assertTrue(isinstance(joe_bloggs, Employee))
|
||||
|
||||
def test_embedded_dynamic_document(self):
|
||||
"""Test dynamic embedded documents"""
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
pass
|
||||
|
||||
class Doc(DynamicDocument):
|
||||
pass
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
|
||||
"embedded_field": {
|
||||
"_types": ['Embedded'], "_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ['1', 2, {'hello': 'world'}]
|
||||
}
|
||||
})
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEquals(doc.embedded_field.__class__, Embedded)
|
||||
self.assertEquals(doc.embedded_field.string_field, "hello")
|
||||
self.assertEquals(doc.embedded_field.int_field, 1)
|
||||
self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||
self.assertEquals(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}])
|
||||
|
||||
def test_complex_embedded_documents(self):
|
||||
"""Test complex dynamic embedded documents setups"""
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
pass
|
||||
|
||||
class Doc(DynamicDocument):
|
||||
pass
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = 'hello'
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {'hello': 'world'}
|
||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||
|
||||
embedded_1.list_field = ['1', 2, embedded_2]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
|
||||
"embedded_field": {
|
||||
"_types": ['Embedded'], "_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ['1', 2,
|
||||
{"_types": ['Embedded'], "_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ['1', 2, {'hello': 'world'}]}
|
||||
]
|
||||
}
|
||||
})
|
||||
doc.save()
|
||||
doc = Doc.objects.first()
|
||||
self.assertEquals(doc.embedded_field.__class__, Embedded)
|
||||
self.assertEquals(doc.embedded_field.string_field, "hello")
|
||||
self.assertEquals(doc.embedded_field.int_field, 1)
|
||||
self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||
self.assertEquals(doc.embedded_field.list_field[0], '1')
|
||||
self.assertEquals(doc.embedded_field.list_field[1], 2)
|
||||
|
||||
embedded_field = doc.embedded_field.list_field[2]
|
||||
|
||||
self.assertEquals(embedded_field.__class__, Embedded)
|
||||
self.assertEquals(embedded_field.string_field, "hello")
|
||||
self.assertEquals(embedded_field.int_field, 1)
|
||||
self.assertEquals(embedded_field.dict_field, {'hello': 'world'})
|
||||
self.assertEquals(embedded_field.list_field, ['1', 2, {'hello': 'world'}])
|
||||
|
||||
def test_delta_for_dynamic_documents(self):
|
||||
p = self.Person()
|
||||
p.name = "Dean"
|
||||
p.age = 22
|
||||
p.save()
|
||||
|
||||
p.age = 24
|
||||
self.assertEquals(p.age, 24)
|
||||
self.assertEquals(p._get_changed_fields(), ['age'])
|
||||
self.assertEquals(p._delta(), ({'age': 24}, {}))
|
||||
|
||||
p = self.Person.objects(age=22).get()
|
||||
p.age = 24
|
||||
self.assertEquals(p.age, 24)
|
||||
self.assertEquals(p._get_changed_fields(), ['age'])
|
||||
self.assertEquals(p._delta(), ({'age': 24}, {}))
|
||||
|
||||
p.save()
|
||||
self.assertEquals(1, self.Person.objects(age=24).count())
|
||||
|
||||
def test_delta(self):
|
||||
|
||||
class Doc(DynamicDocument):
|
||||
pass
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEquals(doc._get_changed_fields(), [])
|
||||
self.assertEquals(doc._delta(), ({}, {}))
|
||||
|
||||
doc.string_field = 'hello'
|
||||
self.assertEquals(doc._get_changed_fields(), ['string_field'])
|
||||
self.assertEquals(doc._delta(), ({'string_field': 'hello'}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
self.assertEquals(doc._get_changed_fields(), ['int_field'])
|
||||
self.assertEquals(doc._delta(), ({'int_field': 1}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||
doc.dict_field = dict_value
|
||||
self.assertEquals(doc._get_changed_fields(), ['dict_field'])
|
||||
self.assertEquals(doc._delta(), ({'dict_field': dict_value}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ['1', 2, {'hello': 'world'}]
|
||||
doc.list_field = list_value
|
||||
self.assertEquals(doc._get_changed_fields(), ['list_field'])
|
||||
self.assertEquals(doc._delta(), ({'list_field': list_value}, {}))
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
self.assertEquals(doc._get_changed_fields(), ['dict_field'])
|
||||
self.assertEquals(doc._delta(), ({}, {'dict_field': 1}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
self.assertEquals(doc._get_changed_fields(), ['list_field'])
|
||||
self.assertEquals(doc._delta(), ({}, {'list_field': 1}))
|
||||
|
||||
def test_delta_recursive(self):
|
||||
"""Testing deltaing works with dynamic documents"""
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
pass
|
||||
|
||||
class Doc(DynamicDocument):
|
||||
pass
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEquals(doc._get_changed_fields(), [])
|
||||
self.assertEquals(doc._delta(), ({}, {}))
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field'])
|
||||
|
||||
embedded_delta = {
|
||||
'string_field': 'hello',
|
||||
'int_field': 1,
|
||||
'dict_field': {'hello': 'world'},
|
||||
'list_field': ['1', 2, {'hello': 'world'}]
|
||||
}
|
||||
self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||
embedded_delta.update({
|
||||
'_types': ['Embedded'],
|
||||
'_cls': 'Embedded',
|
||||
})
|
||||
self.assertEquals(doc._delta(), ({'embedded_field': embedded_delta}, {}))
|
||||
|
||||
doc.save()
|
||||
doc.reload()
|
||||
|
||||
doc.embedded_field.dict_field = {}
|
||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.dict_field'])
|
||||
self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
|
||||
|
||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
|
||||
doc.embedded_field.list_field = []
|
||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
|
||||
self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1}))
|
||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = 'hello'
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {'hello': 'world'}
|
||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||
|
||||
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
|
||||
self.assertEquals(doc.embedded_field._delta(), ({
|
||||
'list_field': ['1', 2, {
|
||||
'_cls': 'Embedded',
|
||||
'_types': ['Embedded'],
|
||||
'string_field': 'hello',
|
||||
'dict_field': {'hello': 'world'},
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
}]
|
||||
}, {}))
|
||||
|
||||
self.assertEquals(doc._delta(), ({
|
||||
'embedded_field.list_field': ['1', 2, {
|
||||
'_cls': 'Embedded',
|
||||
'_types': ['Embedded'],
|
||||
'string_field': 'hello',
|
||||
'dict_field': {'hello': 'world'},
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
}]
|
||||
}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
|
||||
self.assertEquals(doc.embedded_field.list_field[2]._changed_fields, [])
|
||||
self.assertEquals(doc.embedded_field.list_field[0], '1')
|
||||
self.assertEquals(doc.embedded_field.list_field[1], 2)
|
||||
for k in doc.embedded_field.list_field[2]._fields:
|
||||
self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k])
|
||||
|
||||
doc.embedded_field.list_field[2].string_field = 'world'
|
||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field'])
|
||||
self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {}))
|
||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world')
|
||||
|
||||
# Test multiple assignments
|
||||
doc.embedded_field.list_field[2].string_field = 'hello world'
|
||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
|
||||
self.assertEquals(doc.embedded_field._delta(), ({
|
||||
'list_field': ['1', 2, {
|
||||
'_types': ['Embedded'],
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello world',
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
'dict_field': {'hello': 'world'}}]}, {}))
|
||||
self.assertEquals(doc._delta(), ({
|
||||
'embedded_field.list_field': ['1', 2, {
|
||||
'_types': ['Embedded'],
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello world',
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
'dict_field': {'hello': 'world'}}
|
||||
]}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world')
|
||||
|
||||
# Test list native methods
|
||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.append(1)
|
||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1])
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.sort()
|
||||
doc.save()
|
||||
doc.reload()
|
||||
self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}])
|
||||
|
||||
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
|
||||
doc.save()
|
||||
doc.reload()
|
||||
|
||||
del(doc.embedded_field.list_field[2].list_field)
|
||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1}))
|
||||
|
||||
doc.save()
|
||||
doc.reload()
|
||||
|
||||
doc.dict_field = {'embedded': embedded_1}
|
||||
doc.save()
|
||||
doc.reload()
|
||||
|
||||
doc.dict_field['embedded'].string_field = 'Hello World'
|
||||
self.assertEquals(doc._get_changed_fields(), ['dict_field.embedded.string_field'])
|
||||
self.assertEquals(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {}))
|
||||
|
||||
def test_indexes(self):
|
||||
"""Ensure that indexes are used when meta[indexes] is specified.
|
||||
"""
|
||||
class BlogPost(DynamicDocument):
|
||||
meta = {
|
||||
'indexes': [
|
||||
'-date',
|
||||
('category', '-date')
|
||||
],
|
||||
}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
# _id, '-date', ('cat', 'date')
|
||||
# NB: there is no index on _types by itself, since
|
||||
# the indices on -date and tags will both contain
|
||||
# _types as first element in the key
|
||||
self.assertEqual(len(info), 3)
|
||||
|
||||
# Indexes are lazy so use list() to perform query
|
||||
list(BlogPost.objects)
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('_types', 1), ('category', 1), ('date', -1)]
|
||||
in info)
|
||||
self.assertTrue([('_types', 1), ('date', -1)] in info)
|
452
tests/fields.py
452
tests/fields.py
@@ -1,20 +1,22 @@
|
||||
import unittest
|
||||
import datetime
|
||||
import os
|
||||
import unittest
|
||||
import uuid
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
import pymongo
|
||||
import gridfs
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import _get_db
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.base import _document_registry, NotRegistered
|
||||
|
||||
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png')
|
||||
|
||||
|
||||
class FieldTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = _get_db()
|
||||
self.db = get_db()
|
||||
|
||||
def test_default_values(self):
|
||||
"""Ensure that default field values are used when creating a document.
|
||||
@@ -44,6 +46,81 @@ class FieldTest(unittest.TestCase):
|
||||
person = Person(age=30)
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
def test_not_required_handles_none_in_update(self):
|
||||
"""Ensure that every fields should accept None if required is False.
|
||||
"""
|
||||
|
||||
class HandleNoneFields(Document):
|
||||
str_fld = StringField()
|
||||
int_fld = IntField()
|
||||
flt_fld = FloatField()
|
||||
comp_dt_fld = ComplexDateTimeField()
|
||||
|
||||
HandleNoneFields.drop_collection()
|
||||
|
||||
doc = HandleNoneFields()
|
||||
doc.str_fld = u'spam ham egg'
|
||||
doc.int_fld = 42
|
||||
doc.flt_fld = 4.2
|
||||
doc.com_dt_fld = datetime.datetime.utcnow()
|
||||
doc.save()
|
||||
|
||||
res = HandleNoneFields.objects(id=doc.id).update(
|
||||
set__str_fld=None,
|
||||
set__int_fld=None,
|
||||
set__flt_fld=None,
|
||||
set__comp_dt_fld=None,
|
||||
)
|
||||
self.assertEqual(res, 1)
|
||||
|
||||
# Retrive data from db and verify it.
|
||||
ret = HandleNoneFields.objects.all()[0]
|
||||
|
||||
self.assertEqual(ret.str_fld, None)
|
||||
self.assertEqual(ret.int_fld, None)
|
||||
self.assertEqual(ret.flt_fld, None)
|
||||
|
||||
# Return current time if retrived value is None.
|
||||
self.assertTrue(isinstance(ret.comp_dt_fld, datetime.datetime))
|
||||
|
||||
def test_not_required_handles_none_from_database(self):
|
||||
"""Ensure that every fields can handle null values from the database.
|
||||
"""
|
||||
|
||||
class HandleNoneFields(Document):
|
||||
str_fld = StringField(required=True)
|
||||
int_fld = IntField(required=True)
|
||||
flt_fld = FloatField(required=True)
|
||||
comp_dt_fld = ComplexDateTimeField(required=True)
|
||||
|
||||
HandleNoneFields.drop_collection()
|
||||
|
||||
doc = HandleNoneFields()
|
||||
doc.str_fld = u'spam ham egg'
|
||||
doc.int_fld = 42
|
||||
doc.flt_fld = 4.2
|
||||
doc.com_dt_fld = datetime.datetime.utcnow()
|
||||
doc.save()
|
||||
|
||||
collection = self.db[HandleNoneFields._get_collection_name()]
|
||||
obj = collection.update({"_id": doc.id}, {"$unset": {
|
||||
"str_fld": 1,
|
||||
"int_fld": 1,
|
||||
"flt_fld": 1,
|
||||
"comp_dt_fld": 1}
|
||||
})
|
||||
|
||||
# Retrive data from db and verify it.
|
||||
ret = HandleNoneFields.objects.all()[0]
|
||||
|
||||
self.assertEqual(ret.str_fld, None)
|
||||
self.assertEqual(ret.int_fld, None)
|
||||
self.assertEqual(ret.flt_fld, None)
|
||||
# Return current time if retrived value is None.
|
||||
self.assert_(isinstance(ret.comp_dt_fld, datetime.datetime))
|
||||
|
||||
self.assertRaises(ValidationError, ret.validate)
|
||||
|
||||
def test_object_id_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to string fields.
|
||||
"""
|
||||
@@ -175,6 +252,26 @@ class FieldTest(unittest.TestCase):
|
||||
person.admin = 'Yes'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
def test_uuid_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to UUID fields.
|
||||
"""
|
||||
class Person(Document):
|
||||
api_key = UUIDField()
|
||||
|
||||
person = Person()
|
||||
# any uuid type is valid
|
||||
person.api_key = uuid.uuid4()
|
||||
person.validate()
|
||||
person.api_key = uuid.uuid1()
|
||||
person.validate()
|
||||
|
||||
# last g cannot belong to an hex number
|
||||
person.api_key = '9d159858-549b-4975-9f98-dd2f987c113g'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
# short strings don't validate
|
||||
person.api_key = '9d159858-549b-4975-9f98-dd2f987c113'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
|
||||
def test_datetime_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to datetime fields.
|
||||
"""
|
||||
@@ -337,27 +434,27 @@ class FieldTest(unittest.TestCase):
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
count = logs.count()
|
||||
i = 0
|
||||
while i == count-1:
|
||||
self.assertTrue(logs[i].date <= logs[i+1].date)
|
||||
i +=1
|
||||
while i == count - 1:
|
||||
self.assertTrue(logs[i].date <= logs[i + 1].date)
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
count = logs.count()
|
||||
i = 0
|
||||
while i == count-1:
|
||||
self.assertTrue(logs[i].date >= logs[i+1].date)
|
||||
i +=1
|
||||
while i == count - 1:
|
||||
self.assertTrue(logs[i].date >= logs[i + 1].date)
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980,1,1))
|
||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 30)
|
||||
|
||||
logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980,1,1))
|
||||
logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 30)
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=datetime.datetime(2011,1,1),
|
||||
date__gte=datetime.datetime(2000,1,1),
|
||||
date__lte=datetime.datetime(2011, 1, 1),
|
||||
date__gte=datetime.datetime(2000, 1, 1),
|
||||
)
|
||||
self.assertEqual(logs.count(), 10)
|
||||
|
||||
@@ -459,6 +556,31 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_reverse_list_sorting(self):
|
||||
'''Ensure that a reverse sorted list field properly sorts values'''
|
||||
|
||||
class Category(EmbeddedDocument):
|
||||
count = IntField()
|
||||
name = StringField()
|
||||
|
||||
class CategoryList(Document):
|
||||
categories = SortedListField(EmbeddedDocumentField(Category), ordering='count', reverse=True)
|
||||
name = StringField()
|
||||
|
||||
catlist = CategoryList(name="Top categories")
|
||||
cat1 = Category(name='posts', count=10)
|
||||
cat2 = Category(name='food', count=100)
|
||||
cat3 = Category(name='drink', count=40)
|
||||
catlist.categories = [cat1, cat2, cat3]
|
||||
catlist.save()
|
||||
catlist.reload()
|
||||
|
||||
self.assertEqual(catlist.categories[0].name, cat2.name)
|
||||
self.assertEqual(catlist.categories[1].name, cat3.name)
|
||||
self.assertEqual(catlist.categories[2].name, cat1.name)
|
||||
|
||||
CategoryList.drop_collection()
|
||||
|
||||
def test_list_field(self):
|
||||
"""Ensure that list types work as expected.
|
||||
"""
|
||||
@@ -485,7 +607,6 @@ class FieldTest(unittest.TestCase):
|
||||
post.info = [{'test': 3}]
|
||||
post.save()
|
||||
|
||||
|
||||
self.assertEquals(BlogPost.objects.count(), 3)
|
||||
self.assertEquals(BlogPost.objects.filter(info__exact='test').count(), 1)
|
||||
self.assertEquals(BlogPost.objects.filter(info__0__test='test').count(), 1)
|
||||
@@ -495,6 +616,21 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEquals(BlogPost.objects.filter(info__100__test__exact='test').count(), 0)
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_list_field_passed_in_value(self):
|
||||
class Foo(Document):
|
||||
bars = ListField(ReferenceField("Bar"))
|
||||
|
||||
class Bar(Document):
|
||||
text = StringField()
|
||||
|
||||
bar = Bar(text="hi")
|
||||
bar.save()
|
||||
|
||||
foo = Foo(bars=[])
|
||||
foo.bars.append(bar)
|
||||
self.assertEquals(repr(foo.bars), '[<Bar: Bar object>]')
|
||||
|
||||
|
||||
def test_list_field_strict(self):
|
||||
"""Ensure that list field handles validation if provided a strict field type."""
|
||||
|
||||
@@ -515,6 +651,39 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
def test_list_field_rejects_strings(self):
|
||||
"""Strings aren't valid list field data types"""
|
||||
|
||||
class Simple(Document):
|
||||
mapping = ListField()
|
||||
|
||||
Simple.drop_collection()
|
||||
e = Simple()
|
||||
e.mapping = 'hello world'
|
||||
|
||||
self.assertRaises(ValidationError, e.save)
|
||||
|
||||
def test_complex_field_required(self):
|
||||
"""Ensure required cant be None / Empty"""
|
||||
|
||||
class Simple(Document):
|
||||
mapping = ListField(required=True)
|
||||
|
||||
Simple.drop_collection()
|
||||
e = Simple()
|
||||
e.mapping = []
|
||||
|
||||
self.assertRaises(ValidationError, e.save)
|
||||
|
||||
class Simple(Document):
|
||||
mapping = DictField(required=True)
|
||||
|
||||
Simple.drop_collection()
|
||||
e = Simple()
|
||||
e.mapping = {}
|
||||
|
||||
self.assertRaises(ValidationError, e.save)
|
||||
|
||||
def test_list_field_complex(self):
|
||||
"""Ensure that the list fields can handle the complex types."""
|
||||
|
||||
@@ -582,6 +751,9 @@ class FieldTest(unittest.TestCase):
|
||||
post.info = {'the.title': 'test'}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {1: 'test'}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = {'title': 'test'}
|
||||
post.save()
|
||||
|
||||
@@ -600,6 +772,13 @@ class FieldTest(unittest.TestCase):
|
||||
# Confirm handles non strings or non existing keys
|
||||
self.assertEquals(BlogPost.objects.filter(info__details__test__exact=5).count(), 0)
|
||||
self.assertEquals(BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0)
|
||||
|
||||
post = BlogPost.objects.create(info={'title': 'original'})
|
||||
post.info.update({'title': 'updated'})
|
||||
post.save()
|
||||
post.reload()
|
||||
self.assertEquals('updated', post.info['title'])
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_dictfield_strict(self):
|
||||
@@ -942,15 +1121,29 @@ class FieldTest(unittest.TestCase):
|
||||
class Company(Document):
|
||||
name = StringField()
|
||||
|
||||
Product.drop_collection()
|
||||
Company.drop_collection()
|
||||
|
||||
ten_gen = Company(name='10gen')
|
||||
ten_gen.save()
|
||||
mongodb = Product(name='MongoDB', company=ten_gen)
|
||||
mongodb.save()
|
||||
|
||||
me = Product(name='MongoEngine')
|
||||
me.save()
|
||||
|
||||
obj = Product.objects(company=ten_gen).first()
|
||||
self.assertEqual(obj, mongodb)
|
||||
self.assertEqual(obj.company, ten_gen)
|
||||
|
||||
obj = Product.objects(company=None).first()
|
||||
self.assertEqual(obj, me)
|
||||
|
||||
obj, created = Product.objects.get_or_create(company=None)
|
||||
|
||||
self.assertEqual(created, False)
|
||||
self.assertEqual(obj, me)
|
||||
|
||||
def test_reference_query_conversion(self):
|
||||
"""Ensure that ReferenceFields can be queried using objects and values
|
||||
of the type of the primary key of the referenced object.
|
||||
@@ -1062,7 +1255,6 @@ class FieldTest(unittest.TestCase):
|
||||
Post.drop_collection()
|
||||
User.drop_collection()
|
||||
|
||||
|
||||
def test_generic_reference_document_not_registered(self):
|
||||
"""Ensure dereferencing out of the document registry throws a
|
||||
`NotRegistered` error.
|
||||
@@ -1089,7 +1281,7 @@ class FieldTest(unittest.TestCase):
|
||||
user = User.objects.first()
|
||||
try:
|
||||
user.bookmarks
|
||||
raise AssertionError, "Link was removed from the registry"
|
||||
raise AssertionError("Link was removed from the registry")
|
||||
except NotRegistered:
|
||||
pass
|
||||
|
||||
@@ -1213,6 +1405,53 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
Shirt.drop_collection()
|
||||
|
||||
def test_simple_choices_validation(self):
|
||||
"""Ensure that value is in a container of allowed values.
|
||||
"""
|
||||
class Shirt(Document):
|
||||
size = StringField(max_length=3, choices=('S', 'M', 'L', 'XL', 'XXL'))
|
||||
|
||||
Shirt.drop_collection()
|
||||
|
||||
shirt = Shirt()
|
||||
shirt.validate()
|
||||
|
||||
shirt.size = "S"
|
||||
shirt.validate()
|
||||
|
||||
shirt.size = "XS"
|
||||
self.assertRaises(ValidationError, shirt.validate)
|
||||
|
||||
Shirt.drop_collection()
|
||||
|
||||
def test_simple_choices_get_field_display(self):
|
||||
"""Test dynamic helper for returning the display value of a choices field.
|
||||
"""
|
||||
class Shirt(Document):
|
||||
size = StringField(max_length=3, choices=('S', 'M', 'L', 'XL', 'XXL'))
|
||||
style = StringField(max_length=3, choices=('Small', 'Baggy', 'wide'), default='Small')
|
||||
|
||||
Shirt.drop_collection()
|
||||
|
||||
shirt = Shirt()
|
||||
|
||||
self.assertEqual(shirt.get_size_display(), None)
|
||||
self.assertEqual(shirt.get_style_display(), 'Small')
|
||||
|
||||
shirt.size = "XXL"
|
||||
shirt.style = "Baggy"
|
||||
self.assertEqual(shirt.get_size_display(), 'XXL')
|
||||
self.assertEqual(shirt.get_style_display(), 'Baggy')
|
||||
|
||||
# Set as Z - an invalid choice
|
||||
shirt.size = "Z"
|
||||
shirt.style = "Z"
|
||||
self.assertEqual(shirt.get_size_display(), 'Z')
|
||||
self.assertEqual(shirt.get_style_display(), 'Z')
|
||||
self.assertRaises(ValidationError, shirt.validate)
|
||||
|
||||
Shirt.drop_collection()
|
||||
|
||||
def test_file_fields(self):
|
||||
"""Ensure that file fields can be written to and their data retrieved
|
||||
"""
|
||||
@@ -1289,7 +1528,7 @@ class FieldTest(unittest.TestCase):
|
||||
# Make sure FileField is optional and not required
|
||||
class DemoFile(Document):
|
||||
file = FileField()
|
||||
d = DemoFile.objects.create()
|
||||
DemoFile.objects.create()
|
||||
|
||||
def test_file_uniqueness(self):
|
||||
"""Ensure that each instance of a FileField is unique
|
||||
@@ -1328,6 +1567,95 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
def test_image_field(self):
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField()
|
||||
|
||||
TestImage.drop_collection()
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(open(TEST_IMAGE_PATH, 'r'))
|
||||
t.save()
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEquals(t.image.format, 'PNG')
|
||||
|
||||
w, h = t.image.size
|
||||
self.assertEquals(w, 371)
|
||||
self.assertEquals(h, 76)
|
||||
|
||||
t.image.delete()
|
||||
|
||||
def test_image_field_resize(self):
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField(size=(185, 37))
|
||||
|
||||
TestImage.drop_collection()
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(open(TEST_IMAGE_PATH, 'r'))
|
||||
t.save()
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEquals(t.image.format, 'PNG')
|
||||
w, h = t.image.size
|
||||
|
||||
self.assertEquals(w, 185)
|
||||
self.assertEquals(h, 37)
|
||||
|
||||
t.image.delete()
|
||||
|
||||
def test_image_field_thumbnail(self):
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField(thumbnail_size=(92, 18))
|
||||
|
||||
TestImage.drop_collection()
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(open(TEST_IMAGE_PATH, 'r'))
|
||||
t.save()
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEquals(t.image.thumbnail.format, 'PNG')
|
||||
self.assertEquals(t.image.thumbnail.width, 92)
|
||||
self.assertEquals(t.image.thumbnail.height, 18)
|
||||
|
||||
t.image.delete()
|
||||
|
||||
|
||||
def test_file_multidb(self):
|
||||
register_connection('testfiles', 'testfiles')
|
||||
class TestFile(Document):
|
||||
name = StringField()
|
||||
file = FileField(db_alias="testfiles",
|
||||
collection_name="macumba")
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
# delete old filesystem
|
||||
get_db("testfiles").macumba.files.drop()
|
||||
get_db("testfiles").macumba.chunks.drop()
|
||||
|
||||
# First instance
|
||||
testfile = TestFile()
|
||||
testfile.name = "Hello, World!"
|
||||
testfile.file.put('Hello, World!',
|
||||
name="hello.txt")
|
||||
testfile.save()
|
||||
|
||||
data = get_db("testfiles").macumba.files.find_one()
|
||||
self.assertEquals(data.get('name'), 'hello.txt')
|
||||
|
||||
testfile = TestFile.objects.first()
|
||||
self.assertEquals(testfile.file.read(),
|
||||
'Hello, World!')
|
||||
|
||||
def test_geo_indexes(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
@@ -1488,7 +1816,6 @@ class FieldTest(unittest.TestCase):
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
|
||||
|
||||
def test_generic_embedded_document(self):
|
||||
class Car(EmbeddedDocument):
|
||||
name = StringField()
|
||||
@@ -1514,5 +1841,88 @@ class FieldTest(unittest.TestCase):
|
||||
person = Person.objects.first()
|
||||
self.assertTrue(isinstance(person.like, Dish))
|
||||
|
||||
def test_recursive_validation(self):
|
||||
"""Ensure that a validation result to_dict is available.
|
||||
"""
|
||||
class Author(EmbeddedDocument):
|
||||
name = StringField(required=True)
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
author = EmbeddedDocumentField(Author, required=True)
|
||||
content = StringField(required=True)
|
||||
|
||||
class Post(Document):
|
||||
title = StringField(required=True)
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
bob = Author(name='Bob')
|
||||
post = Post(title='hello world')
|
||||
post.comments.append(Comment(content='hello', author=bob))
|
||||
post.comments.append(Comment(author=bob))
|
||||
|
||||
try:
|
||||
post.validate()
|
||||
except ValidationError, error:
|
||||
pass
|
||||
|
||||
# ValidationError.errors property
|
||||
self.assertTrue(hasattr(error, 'errors'))
|
||||
self.assertTrue(isinstance(error.errors, dict))
|
||||
self.assertTrue('comments' in error.errors)
|
||||
self.assertTrue(1 in error.errors['comments'])
|
||||
self.assertTrue(isinstance(error.errors['comments'][1]['content'],
|
||||
ValidationError))
|
||||
|
||||
# ValidationError.schema property
|
||||
error_dict = error.to_dict()
|
||||
self.assertTrue(isinstance(error_dict, dict))
|
||||
self.assertTrue('comments' in error_dict)
|
||||
self.assertTrue(1 in error_dict['comments'])
|
||||
self.assertTrue('content' in error_dict['comments'][1])
|
||||
self.assertEquals(error_dict['comments'][1]['content'],
|
||||
u'Field is required ("content")')
|
||||
|
||||
post.comments[1].content = 'here we go'
|
||||
post.validate()
|
||||
|
||||
|
||||
class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
def test_to_dict(self):
|
||||
"""Ensure a ValidationError handles error to_dict correctly.
|
||||
"""
|
||||
error = ValidationError('root')
|
||||
self.assertEquals(error.to_dict(), {})
|
||||
|
||||
# 1st level error schema
|
||||
error.errors = {'1st': ValidationError('bad 1st'), }
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertEquals(error.to_dict()['1st'], 'bad 1st')
|
||||
|
||||
# 2nd level error schema
|
||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||
'2nd': ValidationError('bad 2nd'),
|
||||
})}
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertTrue(isinstance(error.to_dict()['1st'], dict))
|
||||
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||
self.assertEquals(error.to_dict()['1st']['2nd'], 'bad 2nd')
|
||||
|
||||
# moar levels
|
||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||
'2nd': ValidationError('bad 2nd', errors={
|
||||
'3rd': ValidationError('bad 3rd', errors={
|
||||
'4th': ValidationError('Inception'),
|
||||
}),
|
||||
}),
|
||||
})}
|
||||
self.assertTrue('1st' in error.to_dict())
|
||||
self.assertTrue('2nd' in error.to_dict()['1st'])
|
||||
self.assertTrue('3rd' in error.to_dict()['1st']['2nd'])
|
||||
self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd'])
|
||||
self.assertEquals(error.to_dict()['1st']['2nd']['3rd']['4th'],
|
||||
'Inception')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -1,9 +1,6 @@
|
||||
from datetime import datetime
|
||||
import pymongo
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.base import BaseField
|
||||
from mongoengine.connection import _get_db
|
||||
|
||||
|
||||
class PickleEmbedded(EmbeddedDocument):
|
||||
@@ -15,6 +12,7 @@ class PickleTest(Document):
|
||||
string = StringField(choices=(('One', '1'), ('Two', '2')))
|
||||
embedded = EmbeddedDocumentField(PickleEmbedded)
|
||||
lists = ListField(StringField())
|
||||
photo = FileField()
|
||||
|
||||
|
||||
class Mixin(object):
|
||||
@@ -22,4 +20,4 @@ class Mixin(object):
|
||||
|
||||
|
||||
class Base(Document):
|
||||
pass
|
||||
meta = {'allow_inheritance': True}
|
||||
|
BIN
tests/mongoengine.png
Normal file
BIN
tests/mongoengine.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 8.1 KiB |
@@ -1,13 +1,14 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
import pymongo
|
||||
from bson import ObjectId
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from mongoengine.queryset import (QuerySet, QuerySetManager,
|
||||
MultipleObjectsReturned, DoesNotExist,
|
||||
QueryFieldList)
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import _get_connection
|
||||
from mongoengine.connection import get_connection
|
||||
from mongoengine.tests import query_counter
|
||||
|
||||
|
||||
@@ -15,10 +16,11 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
meta = {'allow_inheritance': True}
|
||||
self.Person = Person
|
||||
|
||||
def test_initialisation(self):
|
||||
@@ -59,8 +61,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(len(people), 2)
|
||||
results = list(people)
|
||||
self.assertTrue(isinstance(results[0], self.Person))
|
||||
self.assertTrue(isinstance(results[0].id, (pymongo.objectid.ObjectId,
|
||||
str, unicode)))
|
||||
self.assertTrue(isinstance(results[0].id, (ObjectId, str, unicode)))
|
||||
self.assertEqual(results[0].name, "User A")
|
||||
self.assertEqual(results[0].age, 20)
|
||||
self.assertEqual(results[1].name, "User B")
|
||||
@@ -110,6 +111,16 @@ class QuerySetTest(unittest.TestCase):
|
||||
people = list(self.Person.objects[80000:80001])
|
||||
self.assertEqual(len(people), 0)
|
||||
|
||||
# Test larger slice __repr__
|
||||
self.Person.objects.delete()
|
||||
for i in xrange(55):
|
||||
self.Person(name='A%s' % i, age=i).save()
|
||||
|
||||
self.assertEqual(len(self.Person.objects), 55)
|
||||
self.assertEqual("Person object", "%s" % self.Person.objects[0])
|
||||
self.assertEqual("[<Person: Person object>, <Person: Person object>]", "%s" % self.Person.objects[1:3])
|
||||
self.assertEqual("[<Person: Person object>, <Person: Person object>]", "%s" % self.Person.objects[51:53])
|
||||
|
||||
def test_find_one(self):
|
||||
"""Ensure that a query using find_one returns a valid result.
|
||||
"""
|
||||
@@ -144,6 +155,8 @@ class QuerySetTest(unittest.TestCase):
|
||||
person = self.Person.objects.with_id(person1.id)
|
||||
self.assertEqual(person.name, "User A")
|
||||
|
||||
self.assertRaises(InvalidQueryError, self.Person.objects(name="User A").with_id, person1.id)
|
||||
|
||||
def test_find_only_one(self):
|
||||
"""Ensure that a query using ``get`` returns at most one result.
|
||||
"""
|
||||
@@ -368,6 +381,34 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertRaises(OperationError, update_nested)
|
||||
Simple.drop_collection()
|
||||
|
||||
def test_update_using_positional_operator_embedded_document(self):
|
||||
"""Ensure that the embedded documents can be updated using the positional
|
||||
operator."""
|
||||
|
||||
class Vote(EmbeddedDocument):
|
||||
score = IntField()
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
by = StringField()
|
||||
votes = EmbeddedDocumentField(Vote)
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
c1 = Comment(by="joe", votes=Vote(score=3))
|
||||
c2 = Comment(by="jane", votes=Vote(score=7))
|
||||
|
||||
BlogPost(title="ABC", comments=[c1, c2]).save()
|
||||
|
||||
BlogPost.objects(comments__by="joe").update(set__comments__S__votes=Vote(score=4))
|
||||
|
||||
post = BlogPost.objects.first()
|
||||
self.assertEquals(post.comments[0].by, 'joe')
|
||||
self.assertEquals(post.comments[0].votes.score, 4)
|
||||
|
||||
def test_mapfield_update(self):
|
||||
"""Ensure that the MapField can be updated."""
|
||||
class Member(EmbeddedDocument):
|
||||
@@ -455,6 +496,9 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
Blog.drop_collection()
|
||||
|
||||
# Recreates the collection
|
||||
self.assertEqual(0, Blog.objects.count())
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
@@ -468,10 +512,10 @@ class QuerySetTest(unittest.TestCase):
|
||||
blogs.append(Blog(title="post %s" % i, posts=[post1, post2]))
|
||||
|
||||
Blog.objects.insert(blogs, load_bulk=False)
|
||||
self.assertEqual(q, 2) # 1 for the inital connection and 1 for the insert
|
||||
self.assertEqual(q, 1) # 1 for the insert
|
||||
|
||||
Blog.objects.insert(blogs)
|
||||
self.assertEqual(q, 4) # 1 for insert, and 1 for in bulk
|
||||
self.assertEqual(q, 3) # 1 for insert, and 1 for in bulk fetch (3 in total)
|
||||
|
||||
Blog.drop_collection()
|
||||
|
||||
@@ -567,7 +611,13 @@ class QuerySetTest(unittest.TestCase):
|
||||
people1 = [person for person in queryset]
|
||||
people2 = [person for person in queryset]
|
||||
|
||||
# Check that it still works even if iteration is interrupted.
|
||||
for person in queryset:
|
||||
break
|
||||
people3 = [person for person in queryset]
|
||||
|
||||
self.assertEqual(people1, people2)
|
||||
self.assertEqual(people1, people3)
|
||||
|
||||
def test_repr_iteration(self):
|
||||
"""Ensure that QuerySet __repr__ can handle loops
|
||||
@@ -1371,20 +1421,39 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_update_pull(self):
|
||||
def test_update_push_and_pull(self):
|
||||
"""Ensure that the 'pull' update operation works correctly.
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
slug = StringField()
|
||||
tags = ListField(StringField())
|
||||
|
||||
post = BlogPost(slug="test", tags=['code', 'mongodb', 'code'])
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(slug="test")
|
||||
post.save()
|
||||
|
||||
BlogPost.objects.filter(id=post.id).update(push__tags="code")
|
||||
post.reload()
|
||||
self.assertEqual(post.tags, ["code"])
|
||||
|
||||
BlogPost.objects.filter(id=post.id).update(push_all__tags=["mongodb", "code"])
|
||||
post.reload()
|
||||
self.assertEqual(post.tags, ["code", "mongodb", "code"])
|
||||
|
||||
BlogPost.objects(slug="test").update(pull__tags="code")
|
||||
post.reload()
|
||||
self.assertTrue('code' not in post.tags)
|
||||
self.assertEqual(len(post.tags), 1)
|
||||
self.assertEqual(post.tags, ["mongodb"])
|
||||
|
||||
|
||||
BlogPost.objects(slug="test").update(pull_all__tags=["mongodb", "code"])
|
||||
post.reload()
|
||||
self.assertEqual(post.tags, [])
|
||||
|
||||
BlogPost.objects(slug="test").update(__raw__={"$addToSet": {"tags": {"$each": ["code", "mongodb", "code"]}}})
|
||||
post.reload()
|
||||
self.assertEqual(post.tags, ["code", "mongodb"])
|
||||
|
||||
|
||||
def test_update_one_pop_generic_reference(self):
|
||||
|
||||
@@ -1840,6 +1909,35 @@ class QuerySetTest(unittest.TestCase):
|
||||
freq = Person.objects.item_frequencies('city', normalize=True, map_reduce=True)
|
||||
self.assertEquals(freq, {'CRB': 0.5, None: 0.5})
|
||||
|
||||
def test_item_frequencies_with_null_embedded(self):
|
||||
class Data(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class Extra(EmbeddedDocument):
|
||||
tag = StringField()
|
||||
|
||||
class Person(Document):
|
||||
data = EmbeddedDocumentField(Data, required=True)
|
||||
extra = EmbeddedDocumentField(Extra)
|
||||
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person()
|
||||
p.data = Data(name="Wilson Jr")
|
||||
p.save()
|
||||
|
||||
p = Person()
|
||||
p.data = Data(name="Wesley")
|
||||
p.extra = Extra(tag="friend")
|
||||
p.save()
|
||||
|
||||
ot = Person.objects.item_frequencies('extra.tag', map_reduce=False)
|
||||
self.assertEquals(ot, {None: 1.0, u'friend': 1.0})
|
||||
|
||||
ot = Person.objects.item_frequencies('extra.tag', map_reduce=True)
|
||||
self.assertEquals(ot, {None: 1.0, u'friend': 1.0})
|
||||
|
||||
def test_average(self):
|
||||
"""Ensure that field can be averaged correctly.
|
||||
"""
|
||||
@@ -1882,6 +1980,24 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.assertEqual(set(self.Person.objects(age=30).distinct('name')),
|
||||
set(['Mr Orange', 'Mr Pink']))
|
||||
|
||||
def test_distinct_handles_references(self):
|
||||
class Foo(Document):
|
||||
bar = ReferenceField("Bar")
|
||||
|
||||
class Bar(Document):
|
||||
text = StringField()
|
||||
|
||||
Bar.drop_collection()
|
||||
Foo.drop_collection()
|
||||
|
||||
bar = Bar(text="hi")
|
||||
bar.save()
|
||||
|
||||
foo = Foo(bar=bar)
|
||||
foo.save()
|
||||
|
||||
self.assertEquals(Foo.objects.distinct("bar"), [bar])
|
||||
|
||||
def test_custom_manager(self):
|
||||
"""Ensure that custom QuerySetManager instances work as expected.
|
||||
"""
|
||||
@@ -2197,10 +2313,10 @@ class QuerySetTest(unittest.TestCase):
|
||||
events = Event.objects(location__within_box=box)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0].id, event2.id)
|
||||
|
||||
|
||||
# check that polygon works for users who have a server >= 1.9
|
||||
server_version = tuple(
|
||||
_get_connection().server_info()['version'].split('.')
|
||||
get_connection().server_info()['version'].split('.')
|
||||
)
|
||||
required_version = tuple("1.9.0".split("."))
|
||||
if server_version >= required_version:
|
||||
@@ -2214,7 +2330,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
events = Event.objects(location__within_polygon=polygon)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0].id, event1.id)
|
||||
|
||||
|
||||
polygon2 = [
|
||||
(54.033586,-1.742249),
|
||||
(52.792797,-1.225891),
|
||||
@@ -2222,7 +2338,7 @@ class QuerySetTest(unittest.TestCase):
|
||||
]
|
||||
events = Event.objects(location__within_polygon=polygon2)
|
||||
self.assertEqual(events.count(), 0)
|
||||
|
||||
|
||||
Event.drop_collection()
|
||||
|
||||
def test_spherical_geospatial_operators(self):
|
||||
@@ -2569,6 +2685,265 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
self.assertRaises(TypeError, invalid_where)
|
||||
|
||||
def test_scalar(self):
|
||||
|
||||
class Organization(Document):
|
||||
id = ObjectIdField('_id')
|
||||
name = StringField()
|
||||
|
||||
class User(Document):
|
||||
id = ObjectIdField('_id')
|
||||
name = StringField()
|
||||
organization = ObjectIdField()
|
||||
|
||||
User.drop_collection()
|
||||
Organization.drop_collection()
|
||||
|
||||
whitehouse = Organization(name="White House")
|
||||
whitehouse.save()
|
||||
User(name="Bob Dole", organization=whitehouse.id).save()
|
||||
|
||||
# Efficient way to get all unique organization names for a given
|
||||
# set of users (Pretend this has additional filtering.)
|
||||
user_orgs = set(User.objects.scalar('organization'))
|
||||
orgs = Organization.objects(id__in=user_orgs).scalar('name')
|
||||
self.assertEqual(list(orgs), ['White House'])
|
||||
|
||||
# Efficient for generating listings, too.
|
||||
orgs = Organization.objects.scalar('name').in_bulk(list(user_orgs))
|
||||
user_map = User.objects.scalar('name', 'organization')
|
||||
user_listing = [(user, orgs[org]) for user, org in user_map]
|
||||
self.assertEqual([("Bob Dole", "White House")], user_listing)
|
||||
|
||||
def test_scalar_simple(self):
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
y = BooleanField()
|
||||
|
||||
TestDoc.drop_collection()
|
||||
|
||||
TestDoc(x=10, y=True).save()
|
||||
TestDoc(x=20, y=False).save()
|
||||
TestDoc(x=30, y=True).save()
|
||||
|
||||
plist = list(TestDoc.objects.scalar('x', 'y'))
|
||||
|
||||
self.assertEqual(len(plist), 3)
|
||||
self.assertEqual(plist[0], (10, True))
|
||||
self.assertEqual(plist[1], (20, False))
|
||||
self.assertEqual(plist[2], (30, True))
|
||||
|
||||
class UserDoc(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
UserDoc.drop_collection()
|
||||
|
||||
UserDoc(name="Wilson Jr", age=19).save()
|
||||
UserDoc(name="Wilson", age=43).save()
|
||||
UserDoc(name="Eliana", age=37).save()
|
||||
UserDoc(name="Tayza", age=15).save()
|
||||
|
||||
ulist = list(UserDoc.objects.scalar('name', 'age'))
|
||||
|
||||
self.assertEqual(ulist, [
|
||||
(u'Wilson Jr', 19),
|
||||
(u'Wilson', 43),
|
||||
(u'Eliana', 37),
|
||||
(u'Tayza', 15)])
|
||||
|
||||
ulist = list(UserDoc.objects.scalar('name').order_by('age'))
|
||||
|
||||
self.assertEqual(ulist, [
|
||||
(u'Tayza'),
|
||||
(u'Wilson Jr'),
|
||||
(u'Eliana'),
|
||||
(u'Wilson')])
|
||||
|
||||
def test_scalar_embedded(self):
|
||||
class Profile(EmbeddedDocument):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
class Locale(EmbeddedDocument):
|
||||
city = StringField()
|
||||
country = StringField()
|
||||
|
||||
class Person(Document):
|
||||
profile = EmbeddedDocumentField(Profile)
|
||||
locale = EmbeddedDocumentField(Locale)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(profile=Profile(name="Wilson Jr", age=19),
|
||||
locale=Locale(city="Corumba-GO", country="Brazil")).save()
|
||||
|
||||
Person(profile=Profile(name="Gabriel Falcao", age=23),
|
||||
locale=Locale(city="New York", country="USA")).save()
|
||||
|
||||
Person(profile=Profile(name="Lincoln de souza", age=28),
|
||||
locale=Locale(city="Belo Horizonte", country="Brazil")).save()
|
||||
|
||||
Person(profile=Profile(name="Walter cruz", age=30),
|
||||
locale=Locale(city="Brasilia", country="Brazil")).save()
|
||||
|
||||
self.assertEqual(
|
||||
list(Person.objects.order_by('profile__age').scalar('profile__name')),
|
||||
[u'Wilson Jr', u'Gabriel Falcao', u'Lincoln de souza', u'Walter cruz'])
|
||||
|
||||
ulist = list(Person.objects.order_by('locale.city')
|
||||
.scalar('profile__name', 'profile__age', 'locale__city'))
|
||||
self.assertEqual(ulist,
|
||||
[(u'Lincoln de souza', 28, u'Belo Horizonte'),
|
||||
(u'Walter cruz', 30, u'Brasilia'),
|
||||
(u'Wilson Jr', 19, u'Corumba-GO'),
|
||||
(u'Gabriel Falcao', 23, u'New York')])
|
||||
|
||||
def test_scalar_decimal(self):
|
||||
from decimal import Decimal
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
rating = DecimalField()
|
||||
|
||||
Person.drop_collection()
|
||||
Person(name="Wilson Jr", rating=Decimal('1.0')).save()
|
||||
|
||||
ulist = list(Person.objects.scalar('name', 'rating'))
|
||||
self.assertEqual(ulist, [(u'Wilson Jr', Decimal('1.0'))])
|
||||
|
||||
|
||||
def test_scalar_reference_field(self):
|
||||
class State(Document):
|
||||
name = StringField()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
state = ReferenceField(State)
|
||||
|
||||
State.drop_collection()
|
||||
Person.drop_collection()
|
||||
|
||||
s1 = State(name="Goias")
|
||||
s1.save()
|
||||
|
||||
Person(name="Wilson JR", state=s1).save()
|
||||
|
||||
plist = list(Person.objects.scalar('name', 'state'))
|
||||
self.assertEqual(plist, [(u'Wilson JR', s1)])
|
||||
|
||||
def test_scalar_generic_reference_field(self):
|
||||
class State(Document):
|
||||
name = StringField()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
state = GenericReferenceField()
|
||||
|
||||
State.drop_collection()
|
||||
Person.drop_collection()
|
||||
|
||||
s1 = State(name="Goias")
|
||||
s1.save()
|
||||
|
||||
Person(name="Wilson JR", state=s1).save()
|
||||
|
||||
plist = list(Person.objects.scalar('name', 'state'))
|
||||
self.assertEqual(plist, [(u'Wilson JR', s1)])
|
||||
|
||||
def test_scalar_db_field(self):
|
||||
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
y = BooleanField()
|
||||
|
||||
TestDoc.drop_collection()
|
||||
|
||||
TestDoc(x=10, y=True).save()
|
||||
TestDoc(x=20, y=False).save()
|
||||
TestDoc(x=30, y=True).save()
|
||||
|
||||
plist = list(TestDoc.objects.scalar('x', 'y'))
|
||||
self.assertEqual(len(plist), 3)
|
||||
self.assertEqual(plist[0], (10, True))
|
||||
self.assertEqual(plist[1], (20, False))
|
||||
self.assertEqual(plist[2], (30, True))
|
||||
|
||||
def test_scalar_cursor_behaviour(self):
|
||||
"""Ensure that a query returns a valid set of results.
|
||||
"""
|
||||
person1 = self.Person(name="User A", age=20)
|
||||
person1.save()
|
||||
person2 = self.Person(name="User B", age=30)
|
||||
person2.save()
|
||||
|
||||
# Find all people in the collection
|
||||
people = self.Person.objects.scalar('name')
|
||||
self.assertEqual(len(people), 2)
|
||||
results = list(people)
|
||||
self.assertEqual(results[0], "User A")
|
||||
self.assertEqual(results[1], "User B")
|
||||
|
||||
# Use a query to filter the people found to just person1
|
||||
people = self.Person.objects(age=20).scalar('name')
|
||||
self.assertEqual(len(people), 1)
|
||||
person = people.next()
|
||||
self.assertEqual(person, "User A")
|
||||
|
||||
# Test limit
|
||||
people = list(self.Person.objects.limit(1).scalar('name'))
|
||||
self.assertEqual(len(people), 1)
|
||||
self.assertEqual(people[0], 'User A')
|
||||
|
||||
# Test skip
|
||||
people = list(self.Person.objects.skip(1).scalar('name'))
|
||||
self.assertEqual(len(people), 1)
|
||||
self.assertEqual(people[0], 'User B')
|
||||
|
||||
person3 = self.Person(name="User C", age=40)
|
||||
person3.save()
|
||||
|
||||
# Test slice limit
|
||||
people = list(self.Person.objects[:2].scalar('name'))
|
||||
self.assertEqual(len(people), 2)
|
||||
self.assertEqual(people[0], 'User A')
|
||||
self.assertEqual(people[1], 'User B')
|
||||
|
||||
# Test slice skip
|
||||
people = list(self.Person.objects[1:].scalar('name'))
|
||||
self.assertEqual(len(people), 2)
|
||||
self.assertEqual(people[0], 'User B')
|
||||
self.assertEqual(people[1], 'User C')
|
||||
|
||||
# Test slice limit and skip
|
||||
people = list(self.Person.objects[1:2].scalar('name'))
|
||||
self.assertEqual(len(people), 1)
|
||||
self.assertEqual(people[0], 'User B')
|
||||
|
||||
people = list(self.Person.objects[1:1].scalar('name'))
|
||||
self.assertEqual(len(people), 0)
|
||||
|
||||
# Test slice out of range
|
||||
people = list(self.Person.objects.scalar('name')[80000:80001])
|
||||
self.assertEqual(len(people), 0)
|
||||
|
||||
# Test larger slice __repr__
|
||||
self.Person.objects.delete()
|
||||
for i in xrange(55):
|
||||
self.Person(name='A%s' % i, age=i).save()
|
||||
|
||||
self.assertEqual(len(self.Person.objects.scalar('name')), 55)
|
||||
self.assertEqual("A0", "%s" % self.Person.objects.order_by('name').scalar('name').first())
|
||||
self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').order_by('name')[0])
|
||||
self.assertEqual("[u'A1', u'A2']", "%s" % self.Person.objects.order_by('age').scalar('name')[1:3])
|
||||
self.assertEqual("[u'A51', u'A52']", "%s" % self.Person.objects.order_by('age').scalar('name')[51:53])
|
||||
|
||||
# with_id and in_bulk
|
||||
person = self.Person.objects.order_by('name').first()
|
||||
self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').with_id(person.id))
|
||||
|
||||
pks = self.Person.objects.order_by('age').scalar('pk')[1:3]
|
||||
self.assertEqual("[u'A1', u'A2']", "%s" % sorted(self.Person.objects.scalar('name').in_bulk(list(pks)).values()))
|
||||
|
||||
|
||||
class QTest(unittest.TestCase):
|
||||
|
||||
@@ -2790,6 +3165,30 @@ class QueryFieldListTest(unittest.TestCase):
|
||||
q += QueryFieldList(fields=['a'], value={"$slice": 5})
|
||||
self.assertEqual(q.as_dict(), {'a': {"$slice": 5}})
|
||||
|
||||
def test_elem_match(self):
|
||||
class Foo(EmbeddedDocument):
|
||||
shape = StringField()
|
||||
color = StringField()
|
||||
trick = BooleanField()
|
||||
meta = {'allow_inheritance': False}
|
||||
|
||||
class Bar(Document):
|
||||
foo = ListField(EmbeddedDocumentField(Foo))
|
||||
meta = {'allow_inheritance': False}
|
||||
|
||||
Bar.drop_collection()
|
||||
|
||||
b1 = Bar(foo=[Foo(shape= "square", color ="purple", thick = False),
|
||||
Foo(shape= "circle", color ="red", thick = True)])
|
||||
b1.save()
|
||||
|
||||
b2 = Bar(foo=[Foo(shape= "square", color ="red", thick = True),
|
||||
Foo(shape= "circle", color ="purple", thick = False)])
|
||||
b2.save()
|
||||
|
||||
ak = list(Bar.objects(foo__match={'shape': "square", "color": "purple"}))
|
||||
self.assertEqual([b1], ak)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -56,6 +56,18 @@ class SignalTests(unittest.TestCase):
|
||||
@classmethod
|
||||
def post_delete(cls, sender, document, **kwargs):
|
||||
signal_output.append('post_delete signal, %s' % document)
|
||||
|
||||
@classmethod
|
||||
def pre_bulk_insert(cls, sender, documents, **kwargs):
|
||||
signal_output.append('pre_bulk_insert signal, %s' % documents)
|
||||
|
||||
@classmethod
|
||||
def post_bulk_insert(cls, sender, documents, **kwargs):
|
||||
signal_output.append('post_bulk_insert signal, %s' % documents)
|
||||
if kwargs.get('loaded', False):
|
||||
signal_output.append('Is loaded')
|
||||
else:
|
||||
signal_output.append('Not loaded')
|
||||
self.Author = Author
|
||||
|
||||
|
||||
@@ -104,7 +116,9 @@ class SignalTests(unittest.TestCase):
|
||||
len(signals.pre_save.receivers),
|
||||
len(signals.post_save.receivers),
|
||||
len(signals.pre_delete.receivers),
|
||||
len(signals.post_delete.receivers)
|
||||
len(signals.post_delete.receivers),
|
||||
len(signals.pre_bulk_insert.receivers),
|
||||
len(signals.post_bulk_insert.receivers),
|
||||
)
|
||||
|
||||
signals.pre_init.connect(Author.pre_init, sender=Author)
|
||||
@@ -113,6 +127,8 @@ class SignalTests(unittest.TestCase):
|
||||
signals.post_save.connect(Author.post_save, sender=Author)
|
||||
signals.pre_delete.connect(Author.pre_delete, sender=Author)
|
||||
signals.post_delete.connect(Author.post_delete, sender=Author)
|
||||
signals.pre_bulk_insert.connect(Author.pre_bulk_insert, sender=Author)
|
||||
signals.post_bulk_insert.connect(Author.post_bulk_insert, sender=Author)
|
||||
|
||||
signals.pre_init.connect(Another.pre_init, sender=Another)
|
||||
signals.post_init.connect(Another.post_init, sender=Another)
|
||||
@@ -128,6 +144,8 @@ class SignalTests(unittest.TestCase):
|
||||
signals.pre_delete.disconnect(self.Author.pre_delete)
|
||||
signals.post_save.disconnect(self.Author.post_save)
|
||||
signals.pre_save.disconnect(self.Author.pre_save)
|
||||
signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert)
|
||||
signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert)
|
||||
|
||||
signals.pre_init.disconnect(self.Another.pre_init)
|
||||
signals.post_init.disconnect(self.Another.post_init)
|
||||
@@ -143,7 +161,9 @@ class SignalTests(unittest.TestCase):
|
||||
len(signals.pre_save.receivers),
|
||||
len(signals.post_save.receivers),
|
||||
len(signals.pre_delete.receivers),
|
||||
len(signals.post_delete.receivers)
|
||||
len(signals.post_delete.receivers),
|
||||
len(signals.pre_bulk_insert.receivers),
|
||||
len(signals.post_bulk_insert.receivers),
|
||||
)
|
||||
|
||||
self.assertEqual(self.pre_signals, post_signals)
|
||||
@@ -154,6 +174,14 @@ class SignalTests(unittest.TestCase):
|
||||
def create_author():
|
||||
a1 = self.Author(name='Bill Shakespeare')
|
||||
|
||||
def bulk_create_author_with_load():
|
||||
a1 = self.Author(name='Bill Shakespeare')
|
||||
self.Author.objects.insert([a1], load_bulk=True)
|
||||
|
||||
def bulk_create_author_without_load():
|
||||
a1 = self.Author(name='Bill Shakespeare')
|
||||
self.Author.objects.insert([a1], load_bulk=False)
|
||||
|
||||
self.assertEqual(self.get_signal_output(create_author), [
|
||||
"pre_init signal, Author",
|
||||
"{'name': 'Bill Shakespeare'}",
|
||||
@@ -178,4 +206,25 @@ class SignalTests(unittest.TestCase):
|
||||
self.assertEqual(self.get_signal_output(a1.delete), [
|
||||
'pre_delete signal, William Shakespeare',
|
||||
'post_delete signal, William Shakespeare',
|
||||
])
|
||||
])
|
||||
|
||||
signal_output = self.get_signal_output(bulk_create_author_with_load)
|
||||
|
||||
# The output of this signal is not entirely deterministic. The reloaded
|
||||
# object will have an object ID. Hence, we only check part of the output
|
||||
self.assertEquals(signal_output[3],
|
||||
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]")
|
||||
self.assertEquals(signal_output[-2:],
|
||||
["post_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||
"Is loaded",])
|
||||
|
||||
self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [
|
||||
"pre_init signal, Author",
|
||||
"{'name': 'Bill Shakespeare'}",
|
||||
"post_init signal, Bill Shakespeare",
|
||||
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||
"post_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||
"Not loaded",
|
||||
])
|
||||
|
||||
self.Author.objects.delete()
|
||||
|
Reference in New Issue
Block a user