Compare commits
1 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
3da37fbf6e |
11
.gitignore
vendored
11
.gitignore
vendored
@@ -1,8 +1,5 @@
|
|||||||
.*
|
*.pyc
|
||||||
!.gitignore
|
.*.swp
|
||||||
*~
|
|
||||||
*.py[co]
|
|
||||||
.*.sw[po]
|
|
||||||
*.egg
|
*.egg
|
||||||
docs/.build
|
docs/.build
|
||||||
docs/_build
|
docs/_build
|
||||||
@@ -10,7 +7,3 @@ build/
|
|||||||
dist/
|
dist/
|
||||||
mongoengine.egg-info/
|
mongoengine.egg-info/
|
||||||
env/
|
env/
|
||||||
.settings
|
|
||||||
.project
|
|
||||||
.pydevproject
|
|
||||||
tests/bugfix.py
|
|
||||||
|
97
AUTHORS
97
AUTHORS
@@ -1,102 +1,5 @@
|
|||||||
The PRIMARY AUTHORS are (and/or have been):
|
|
||||||
|
|
||||||
Ross Lawley <ross.lawley@gmail.com>
|
|
||||||
Harry Marr <harry@hmarr.com>
|
Harry Marr <harry@hmarr.com>
|
||||||
Matt Dennewitz <mattdennewitz@gmail.com>
|
Matt Dennewitz <mattdennewitz@gmail.com>
|
||||||
Deepak Thukral <iapain@yahoo.com>
|
Deepak Thukral <iapain@yahoo.com>
|
||||||
Florian Schlachter <flori@n-schlachter.de>
|
Florian Schlachter <flori@n-schlachter.de>
|
||||||
Steve Challis <steve@stevechallis.com>
|
Steve Challis <steve@stevechallis.com>
|
||||||
Wilson Júnior <wilsonpjunior@gmail.com>
|
|
||||||
Dan Crosta https://github.com/dcrosta
|
|
||||||
|
|
||||||
CONTRIBUTORS
|
|
||||||
|
|
||||||
Dervived from the git logs, inevitably incomplete but all of whom and others
|
|
||||||
have submitted patches, reported bugs and generally helped make MongoEngine
|
|
||||||
that much better:
|
|
||||||
|
|
||||||
* Harry Marr
|
|
||||||
* Ross Lawley
|
|
||||||
* blackbrrr
|
|
||||||
* Florian Schlachter
|
|
||||||
* Vincent Driessen
|
|
||||||
* Steve Challis
|
|
||||||
* flosch
|
|
||||||
* Deepak Thukral
|
|
||||||
* Colin Howe
|
|
||||||
* Wilson Júnior
|
|
||||||
* Alistair Roche
|
|
||||||
* Dan Crosta
|
|
||||||
* Viktor Kerkez
|
|
||||||
* Stephan Jaekel
|
|
||||||
* Rached Ben Mustapha
|
|
||||||
* Greg Turner
|
|
||||||
* Daniel Hasselrot
|
|
||||||
* Mircea Pasoi
|
|
||||||
* Matt Chisholm
|
|
||||||
* James Punteney
|
|
||||||
* TimothéePeignier
|
|
||||||
* Stuart Rackham
|
|
||||||
* Serge Matveenko
|
|
||||||
* Matt Dennewitz
|
|
||||||
* Don Spaulding
|
|
||||||
* Ales Zoulek
|
|
||||||
* sshwsfc
|
|
||||||
* sib
|
|
||||||
* Samuel Clay
|
|
||||||
* Nick Vlku
|
|
||||||
* martin
|
|
||||||
* Flavio Amieiro
|
|
||||||
* Анхбаяр Лхагвадорж
|
|
||||||
* Zak Johnson
|
|
||||||
* Victor Farazdagi
|
|
||||||
* vandersonmota
|
|
||||||
* Theo Julienne
|
|
||||||
* sp
|
|
||||||
* Slavi Pantaleev
|
|
||||||
* Richard Henry
|
|
||||||
* Nicolas Perriault
|
|
||||||
* Nick Vlku Jr
|
|
||||||
* Michael Henson
|
|
||||||
* Leo Honkanen
|
|
||||||
* kuno
|
|
||||||
* Josh Ourisman
|
|
||||||
* Jaime
|
|
||||||
* Igor Ivanov
|
|
||||||
* Gregg Lind
|
|
||||||
* Gareth Lloyd
|
|
||||||
* Albert Choi
|
|
||||||
* John Arnfield
|
|
||||||
* grubberr
|
|
||||||
* Paul Aliagas
|
|
||||||
* Paul Cunnane
|
|
||||||
* Julien Rebetez
|
|
||||||
* Marc Tamlyn
|
|
||||||
* Karim Allah
|
|
||||||
* Adam Parrish
|
|
||||||
* jpfarias
|
|
||||||
* jonrscott
|
|
||||||
* Alice Zoë Bevan-McGregor
|
|
||||||
* Stephen Young
|
|
||||||
* tkloc
|
|
||||||
* aid
|
|
||||||
* yamaneko1212
|
|
||||||
* dave mankoff
|
|
||||||
* Alexander G. Morano
|
|
||||||
* jwilder
|
|
||||||
* Joe Shaw
|
|
||||||
* Adam Flynn
|
|
||||||
* Ankhbayar
|
|
||||||
* Jan Schrewe
|
|
||||||
* David Koblas
|
|
||||||
* Crittercism
|
|
||||||
* Alvin Liang
|
|
||||||
* andrewmlevy
|
|
||||||
* Chris Faulkner
|
|
||||||
* Ashwin Purohit
|
|
||||||
* Shalabh Aggarwal
|
|
||||||
* Chris Williams
|
|
||||||
* Robert Kajic
|
|
||||||
* Jacob Peddicord
|
|
||||||
* Nils Hasenbanck
|
|
||||||
* mostlystatic
|
|
13
README.rst
13
README.rst
@@ -3,21 +3,20 @@ MongoEngine
|
|||||||
===========
|
===========
|
||||||
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
:Info: MongoEngine is an ORM-like layer on top of PyMongo.
|
||||||
:Author: Harry Marr (http://github.com/hmarr)
|
:Author: Harry Marr (http://github.com/hmarr)
|
||||||
:Maintainer: Ross Lawley (http://github.com/rozza)
|
|
||||||
|
|
||||||
About
|
About
|
||||||
=====
|
=====
|
||||||
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
MongoEngine is a Python Object-Document Mapper for working with MongoDB.
|
||||||
Documentation available at http://mongoengine-odm.rtfd.org - there is currently
|
Documentation available at http://hmarr.com/mongoengine/ - there is currently
|
||||||
a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide
|
a `tutorial <http://hmarr.com/mongoengine/tutorial.html>`_, a `user guide
|
||||||
<http://readthedocs.org/docs/mongoengine-odm/en/latest/userguide.html>`_ and an `API reference
|
<http://hmarr.com/mongoengine/userguide.html>`_ and an `API reference
|
||||||
<http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_.
|
<http://hmarr.com/mongoengine/apireference.html>`_.
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
============
|
============
|
||||||
If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
||||||
you can use ``easy_install -U mongoengine``. Otherwise, you can download the
|
you can use ``easy_install -U mongoengine``. Otherwise, you can download the
|
||||||
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
|
source from `GitHub <http://github.com/hmarr/mongoengine>`_ and run ``python
|
||||||
setup.py install``.
|
setup.py install``.
|
||||||
|
|
||||||
Dependencies
|
Dependencies
|
||||||
@@ -93,6 +92,6 @@ Community
|
|||||||
|
|
||||||
Contributing
|
Contributing
|
||||||
============
|
============
|
||||||
The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to
|
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ - to
|
||||||
contribute to the project, fork it on GitHub and send a pull request, all
|
contribute to the project, fork it on GitHub and send a pull request, all
|
||||||
contributions and suggestions are welcome!
|
contributions and suggestions are welcome!
|
||||||
|
182
benchmark.py
182
benchmark.py
@@ -1,182 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import timeit
|
|
||||||
|
|
||||||
|
|
||||||
def cprofile_main():
|
|
||||||
from pymongo import Connection
|
|
||||||
connection = Connection()
|
|
||||||
connection.drop_database('timeit_test')
|
|
||||||
connection.disconnect()
|
|
||||||
|
|
||||||
from mongoengine import Document, DictField, connect
|
|
||||||
connect("timeit_test")
|
|
||||||
|
|
||||||
class Noddy(Document):
|
|
||||||
fields = DictField()
|
|
||||||
|
|
||||||
for i in xrange(1):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key" + str(j)] = "value " + str(j)
|
|
||||||
noddy.save()
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""
|
|
||||||
0.4 Performance Figures ...
|
|
||||||
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - Pymongo
|
|
||||||
1.1141769886
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine
|
|
||||||
2.37724113464
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
|
||||||
1.92479610443
|
|
||||||
|
|
||||||
0.5.X
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - Pymongo
|
|
||||||
1.10552310944
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine
|
|
||||||
16.5169169903
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
|
||||||
14.9446101189
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
|
||||||
14.912801981
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, force=True
|
|
||||||
14.9617750645
|
|
||||||
|
|
||||||
Performance
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - Pymongo
|
|
||||||
1.10072994232
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine
|
|
||||||
5.27341103554
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False
|
|
||||||
4.49365401268
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False
|
|
||||||
4.43459296227
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, force=True
|
|
||||||
4.40114378929
|
|
||||||
"""
|
|
||||||
|
|
||||||
setup = """
|
|
||||||
from pymongo import Connection
|
|
||||||
connection = Connection()
|
|
||||||
connection.drop_database('timeit_test')
|
|
||||||
"""
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
from pymongo import Connection
|
|
||||||
connection = Connection()
|
|
||||||
|
|
||||||
db = connection.timeit_test
|
|
||||||
noddy = db.noddy
|
|
||||||
|
|
||||||
for i in xrange(10000):
|
|
||||||
example = {'fields': {}}
|
|
||||||
for j in range(20):
|
|
||||||
example['fields']["key"+str(j)] = "value "+str(j)
|
|
||||||
|
|
||||||
noddy.insert(example)
|
|
||||||
|
|
||||||
myNoddys = noddy.find()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print "-" * 100
|
|
||||||
print """Creating 10000 dictionaries - Pymongo"""
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print t.timeit(1)
|
|
||||||
|
|
||||||
setup = """
|
|
||||||
from pymongo import Connection
|
|
||||||
connection = Connection()
|
|
||||||
connection.drop_database('timeit_test')
|
|
||||||
connection.disconnect()
|
|
||||||
|
|
||||||
from mongoengine import Document, DictField, connect
|
|
||||||
connect("timeit_test")
|
|
||||||
|
|
||||||
class Noddy(Document):
|
|
||||||
fields = DictField()
|
|
||||||
"""
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in xrange(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.save()
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print "-" * 100
|
|
||||||
print """Creating 10000 dictionaries - MongoEngine"""
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print t.timeit(1)
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in xrange(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.save(safe=False, validate=False)
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print "-" * 100
|
|
||||||
print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False"""
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print t.timeit(1)
|
|
||||||
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in xrange(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.save(safe=False, validate=False, cascade=False)
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print "-" * 100
|
|
||||||
print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False"""
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print t.timeit(1)
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in xrange(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.save(force_insert=True, safe=False, validate=False, cascade=False)
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print "-" * 100
|
|
||||||
print """Creating 10000 dictionaries - MongoEngine, force=True"""
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print t.timeit(1)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
@@ -6,7 +6,6 @@ Connecting
|
|||||||
==========
|
==========
|
||||||
|
|
||||||
.. autofunction:: mongoengine.connect
|
.. autofunction:: mongoengine.connect
|
||||||
.. autofunction:: mongoengine.register_connection
|
|
||||||
|
|
||||||
Documents
|
Documents
|
||||||
=========
|
=========
|
||||||
@@ -22,12 +21,6 @@ Documents
|
|||||||
.. autoclass:: mongoengine.EmbeddedDocument
|
.. autoclass:: mongoengine.EmbeddedDocument
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
.. autoclass:: mongoengine.DynamicDocument
|
|
||||||
:members:
|
|
||||||
|
|
||||||
.. autoclass:: mongoengine.DynamicEmbeddedDocument
|
|
||||||
:members:
|
|
||||||
|
|
||||||
.. autoclass:: mongoengine.document.MapReduceDocument
|
.. autoclass:: mongoengine.document.MapReduceDocument
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
@@ -45,24 +38,33 @@ Fields
|
|||||||
======
|
======
|
||||||
|
|
||||||
.. autoclass:: mongoengine.StringField
|
.. autoclass:: mongoengine.StringField
|
||||||
|
|
||||||
.. autoclass:: mongoengine.URLField
|
.. autoclass:: mongoengine.URLField
|
||||||
.. autoclass:: mongoengine.EmailField
|
|
||||||
.. autoclass:: mongoengine.IntField
|
.. autoclass:: mongoengine.IntField
|
||||||
|
|
||||||
.. autoclass:: mongoengine.FloatField
|
.. autoclass:: mongoengine.FloatField
|
||||||
|
|
||||||
.. autoclass:: mongoengine.DecimalField
|
.. autoclass:: mongoengine.DecimalField
|
||||||
.. autoclass:: mongoengine.DateTimeField
|
|
||||||
.. autoclass:: mongoengine.ComplexDateTimeField
|
|
||||||
.. autoclass:: mongoengine.ListField
|
|
||||||
.. autoclass:: mongoengine.SortedListField
|
|
||||||
.. autoclass:: mongoengine.DictField
|
|
||||||
.. autoclass:: mongoengine.MapField
|
|
||||||
.. autoclass:: mongoengine.ObjectIdField
|
|
||||||
.. autoclass:: mongoengine.ReferenceField
|
|
||||||
.. autoclass:: mongoengine.GenericReferenceField
|
|
||||||
.. autoclass:: mongoengine.EmbeddedDocumentField
|
|
||||||
.. autoclass:: mongoengine.GenericEmbeddedDocumentField
|
|
||||||
.. autoclass:: mongoengine.BooleanField
|
.. autoclass:: mongoengine.BooleanField
|
||||||
.. autoclass:: mongoengine.FileField
|
|
||||||
|
.. autoclass:: mongoengine.DateTimeField
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.EmbeddedDocumentField
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.DictField
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.ListField
|
||||||
|
|
||||||
.. autoclass:: mongoengine.BinaryField
|
.. autoclass:: mongoengine.BinaryField
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.ObjectIdField
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.ReferenceField
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.GenericReferenceField
|
||||||
|
|
||||||
|
.. autoclass:: mongoengine.FileField
|
||||||
|
|
||||||
.. autoclass:: mongoengine.GeoPointField
|
.. autoclass:: mongoengine.GeoPointField
|
||||||
.. autoclass:: mongoengine.SequenceField
|
|
||||||
|
@@ -2,165 +2,6 @@
|
|||||||
Changelog
|
Changelog
|
||||||
=========
|
=========
|
||||||
|
|
||||||
Changes in 0.6.X
|
|
||||||
================
|
|
||||||
|
|
||||||
- updated replicasetconnection - pop port if exists
|
|
||||||
- bug fix for unknown connection alias error message
|
|
||||||
|
|
||||||
Changes in 0.6.3
|
|
||||||
================
|
|
||||||
- Updated sessions for Django 1.4
|
|
||||||
- Bug fix for updates where listfields contain embedded documents
|
|
||||||
- Bug fix for collection naming and mixins
|
|
||||||
|
|
||||||
Changes in 0.6.2
|
|
||||||
================
|
|
||||||
- Updated documentation for ReplicaSet connections
|
|
||||||
- Hack round _types issue with SERVER-5247 - querying other arrays may also cause problems.
|
|
||||||
|
|
||||||
Changes in 0.6.1
|
|
||||||
================
|
|
||||||
- Fix for replicaSet connections
|
|
||||||
|
|
||||||
Changes in 0.6
|
|
||||||
================
|
|
||||||
|
|
||||||
- Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7
|
|
||||||
- Added support for covered indexes when inheritance is off
|
|
||||||
- No longer always upsert on save for items with a '_id'
|
|
||||||
- Error raised if update doesn't have an operation
|
|
||||||
- DeReferencing is now thread safe
|
|
||||||
- Errors raised if trying to perform a join in a query
|
|
||||||
- Updates can now take __raw__ queries
|
|
||||||
- Added custom 2D index declarations
|
|
||||||
- Added replicaSet connection support
|
|
||||||
- Updated deprecated imports from pymongo (safe for pymongo 2.2)
|
|
||||||
- Added uri support for connections
|
|
||||||
- Added scalar for efficiently returning partial data values (aliased to values_list)
|
|
||||||
- Fixed limit skip bug
|
|
||||||
- Improved Inheritance / Mixin
|
|
||||||
- Added sharding support
|
|
||||||
- Added pymongo 2.1 support
|
|
||||||
- Fixed Abstract documents can now declare indexes
|
|
||||||
- Added db_alias support to individual documents
|
|
||||||
- Fixed GridFS documents can now be pickled
|
|
||||||
- Added Now raises an InvalidDocumentError when declaring multiple fields with the same db_field
|
|
||||||
- Added InvalidQueryError when calling with_id with a filter
|
|
||||||
- Added support for DBRefs in distinct()
|
|
||||||
- Fixed issue saving False booleans
|
|
||||||
- Fixed issue with dynamic documents deltas
|
|
||||||
- Added Reverse Delete Rule support to ListFields - MapFields aren't supported
|
|
||||||
- Added customisable cascade kwarg options
|
|
||||||
- Fixed Handle None values for non-required fields
|
|
||||||
- Removed Document._get_subclasses() - no longer required
|
|
||||||
- Fixed bug requiring subclasses when not actually needed
|
|
||||||
- Fixed deletion of dynamic data
|
|
||||||
- Added support for the $elementMatch operator
|
|
||||||
- Added reverse option to SortedListFields
|
|
||||||
- Fixed dereferencing - multi directional list dereferencing
|
|
||||||
- Fixed issue creating indexes with recursive embedded documents
|
|
||||||
- Fixed recursive lookup in _unique_with_indexes
|
|
||||||
- Fixed passing ComplexField defaults to constructor for ReferenceFields
|
|
||||||
- Fixed validation of DictField Int keys
|
|
||||||
- Added optional cascade saving
|
|
||||||
- Fixed dereferencing - max_depth now taken into account
|
|
||||||
- Fixed document mutation saving issue
|
|
||||||
- Fixed positional operator when replacing embedded documents
|
|
||||||
- Added Non-Django Style choices back (you can have either)
|
|
||||||
- Fixed __repr__ of a sliced queryset
|
|
||||||
- Added recursive validation error of documents / complex fields
|
|
||||||
- Fixed breaking during queryset iteration
|
|
||||||
- Added pre and post bulk-insert signals
|
|
||||||
- Added ImageField - requires PIL
|
|
||||||
- Fixed Reference Fields can be None in get_or_create / queries
|
|
||||||
- Fixed accessing pk on an embedded document
|
|
||||||
- Fixed calling a queryset after drop_collection now recreates the collection
|
|
||||||
- Add field name to validation exception messages
|
|
||||||
- Added UUID field
|
|
||||||
- Improved efficiency of .get()
|
|
||||||
- Updated ComplexFields so if required they won't accept empty lists / dicts
|
|
||||||
- Added spec file for rpm-based distributions
|
|
||||||
- Fixed ListField so it doesnt accept strings
|
|
||||||
- Added DynamicDocument and EmbeddedDynamicDocument classes for expando schemas
|
|
||||||
|
|
||||||
Changes in v0.5.2
|
|
||||||
=================
|
|
||||||
|
|
||||||
- A Robust Circular reference bugfix
|
|
||||||
|
|
||||||
|
|
||||||
Changes in v0.5.1
|
|
||||||
=================
|
|
||||||
|
|
||||||
- Fixed simple circular reference bug
|
|
||||||
|
|
||||||
Changes in v0.5
|
|
||||||
===============
|
|
||||||
|
|
||||||
- Added InvalidDocumentError - so Document core methods can't be overwritten
|
|
||||||
- Added GenericEmbeddedDocument - so you can embed any type of embeddable document
|
|
||||||
- Added within_polygon support - for those with mongodb 1.9
|
|
||||||
- Updated sum / average to use map_reduce as db.eval doesn't work in sharded environments
|
|
||||||
- Added where() - filter to allowing users to specify query expressions as Javascript
|
|
||||||
- Added SequenceField - for creating sequential counters
|
|
||||||
- Added update() convenience method to a document
|
|
||||||
- Added cascading saves - so changes to Referenced documents are saved on .save()
|
|
||||||
- Added select_related() support
|
|
||||||
- Added support for the positional operator
|
|
||||||
- Updated geo index checking to be recursive and check in embedded documents
|
|
||||||
- Updated default collection naming convention
|
|
||||||
- Added Document Mixin support
|
|
||||||
- Fixed queryet __repr__ mid iteration
|
|
||||||
- Added hint() support, so cantell Mongo the proper index to use for the query
|
|
||||||
- Fixed issue with inconsitent setting of _cls breaking inherited referencing
|
|
||||||
- Added help_text and verbose_name to fields to help with some form libs
|
|
||||||
- Updated item_frequencies to handle embedded document lookups
|
|
||||||
- Added delta tracking now only sets / unsets explicitly changed fields
|
|
||||||
- Fixed saving so sets updated values rather than overwrites
|
|
||||||
- Added ComplexDateTimeField - Handles datetimes correctly with microseconds
|
|
||||||
- Added ComplexBaseField - for improved flexibility and performance
|
|
||||||
- Added get_FIELD_display() method for easy choice field displaying
|
|
||||||
- Added queryset.slave_okay(enabled) method
|
|
||||||
- Updated queryset.timeout(enabled) and queryset.snapshot(enabled) to be chainable
|
|
||||||
- Added insert method for bulk inserts
|
|
||||||
- Added blinker signal support
|
|
||||||
- Added query_counter context manager for tests
|
|
||||||
- Added map_reduce method item_frequencies and set as default (as db.eval doesn't work in sharded environments)
|
|
||||||
- Added inline_map_reduce option to map_reduce
|
|
||||||
- Updated connection exception so it provides more info on the cause.
|
|
||||||
- Added searching multiple levels deep in ``DictField``
|
|
||||||
- Added ``DictField`` entries containing strings to use matching operators
|
|
||||||
- Added ``MapField``, similar to ``DictField``
|
|
||||||
- Added Abstract Base Classes
|
|
||||||
- Added Custom Objects Managers
|
|
||||||
- Added sliced subfields updating
|
|
||||||
- Added ``NotRegistered`` exception if dereferencing ``Document`` not in the registry
|
|
||||||
- Added a write concern for ``save``, ``update``, ``update_one`` and ``get_or_create``
|
|
||||||
- Added slicing / subarray fetching controls
|
|
||||||
- Fixed various unique index and other index issues
|
|
||||||
- Fixed threaded connection issues
|
|
||||||
- Added spherical geospatial query operators
|
|
||||||
- Updated queryset to handle latest version of pymongo
|
|
||||||
map_reduce now requires an output.
|
|
||||||
- Added ``Document`` __hash__, __ne__ for pickling
|
|
||||||
- Added ``FileField`` optional size arg for read method
|
|
||||||
- Fixed ``FileField`` seek and tell methods for reading files
|
|
||||||
- Added ``QuerySet.clone`` to support copying querysets
|
|
||||||
- Fixed item_frequencies when using name thats the same as a native js function
|
|
||||||
- Added reverse delete rules
|
|
||||||
- Fixed issue with unset operation
|
|
||||||
- Fixed Q-object bug
|
|
||||||
- Added ``QuerySet.all_fields`` resets previous .only() and .exclude()
|
|
||||||
- Added ``QuerySet.exclude``
|
|
||||||
- Added django style choices
|
|
||||||
- Fixed order and filter issue
|
|
||||||
- Added ``QuerySet.only`` subfield support
|
|
||||||
- Added creation_counter to ``BaseField`` allowing fields to be sorted in the
|
|
||||||
way the user has specified them
|
|
||||||
- Fixed various errors
|
|
||||||
- Added many tests
|
|
||||||
|
|
||||||
Changes in v0.4
|
Changes in v0.4
|
||||||
===============
|
===============
|
||||||
- Added ``GridFSStorage`` Django storage backend
|
- Added ``GridFSStorage`` Django storage backend
|
||||||
|
@@ -38,7 +38,7 @@ master_doc = 'index'
|
|||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u'MongoEngine'
|
project = u'MongoEngine'
|
||||||
copyright = u'2009-2012, MongoEngine Authors'
|
copyright = u'2009-2010, Harry Marr'
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
@@ -121,7 +121,7 @@ html_theme_path = ['_themes']
|
|||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
#html_static_path = ['_static']
|
html_static_path = ['_static']
|
||||||
|
|
||||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||||
# using the given strftime format.
|
# using the given strftime format.
|
||||||
|
@@ -2,8 +2,6 @@
|
|||||||
Using MongoEngine with Django
|
Using MongoEngine with Django
|
||||||
=============================
|
=============================
|
||||||
|
|
||||||
.. note :: Updated to support Django 1.4
|
|
||||||
|
|
||||||
Connecting
|
Connecting
|
||||||
==========
|
==========
|
||||||
In your **settings.py** file, ignore the standard database settings (unless you
|
In your **settings.py** file, ignore the standard database settings (unless you
|
||||||
@@ -51,11 +49,10 @@ Storage
|
|||||||
=======
|
=======
|
||||||
With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`,
|
With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`,
|
||||||
it is useful to have a Django file storage backend that wraps this. The new
|
it is useful to have a Django file storage backend that wraps this. The new
|
||||||
storage module is called :class:`~mongoengine.django.storage.GridFSStorage`.
|
storage module is called :class:`~mongoengine.django.GridFSStorage`. Using it
|
||||||
Using it is very similar to using the default FileSystemStorage.::
|
is very similar to using the default FileSystemStorage.::
|
||||||
|
|
||||||
from mongoengine.django.storage import GridFSStorage
|
fs = mongoengine.django.GridFSStorage()
|
||||||
fs = GridFSStorage()
|
|
||||||
|
|
||||||
filename = fs.save('hello.txt', 'Hello, World!')
|
filename = fs.save('hello.txt', 'Hello, World!')
|
||||||
|
|
||||||
|
@@ -3,7 +3,6 @@
|
|||||||
=====================
|
=====================
|
||||||
Connecting to MongoDB
|
Connecting to MongoDB
|
||||||
=====================
|
=====================
|
||||||
|
|
||||||
To connect to a running instance of :program:`mongod`, use the
|
To connect to a running instance of :program:`mongod`, use the
|
||||||
:func:`~mongoengine.connect` function. The first argument is the name of the
|
:func:`~mongoengine.connect` function. The first argument is the name of the
|
||||||
database to connect to. If the database does not exist, it will be created. If
|
database to connect to. If the database does not exist, it will be created. If
|
||||||
@@ -19,47 +18,3 @@ provide :attr:`host` and :attr:`port` arguments to
|
|||||||
:func:`~mongoengine.connect`::
|
:func:`~mongoengine.connect`::
|
||||||
|
|
||||||
connect('project1', host='192.168.1.35', port=12345)
|
connect('project1', host='192.168.1.35', port=12345)
|
||||||
|
|
||||||
Uri style connections are also supported as long as you include the database
|
|
||||||
name - just supply the uri as the :attr:`host` to
|
|
||||||
:func:`~mongoengine.connect`::
|
|
||||||
|
|
||||||
connect('project1', host='mongodb://localhost/database_name')
|
|
||||||
|
|
||||||
ReplicaSets
|
|
||||||
===========
|
|
||||||
|
|
||||||
MongoEngine now supports :func:`~pymongo.replica_set_connection.ReplicaSetConnection`
|
|
||||||
to use them please use a URI style connection and provide the `replicaSet` name in the
|
|
||||||
connection kwargs.
|
|
||||||
|
|
||||||
Multiple Databases
|
|
||||||
==================
|
|
||||||
|
|
||||||
Multiple database support was added in MongoEngine 0.6. To use multiple
|
|
||||||
databases you can use :func:`~mongoengine.connect` and provide an `alias` name
|
|
||||||
for the connection - if no `alias` is provided then "default" is used.
|
|
||||||
|
|
||||||
In the background this uses :func:`~mongoengine.register_connection` to
|
|
||||||
store the data and you can register all aliases up front if required.
|
|
||||||
|
|
||||||
Individual documents can also support multiple databases by providing a
|
|
||||||
`db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` objects
|
|
||||||
to point across databases and collections. Below is an example schema, using
|
|
||||||
3 different databases to store data::
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
meta = {"db_alias": "user-db"}
|
|
||||||
|
|
||||||
class Book(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
meta = {"db_alias": "book-db"}
|
|
||||||
|
|
||||||
class AuthorBooks(Document):
|
|
||||||
author = ReferenceField(User)
|
|
||||||
book = ReferenceField(Book)
|
|
||||||
|
|
||||||
meta = {"db_alias": "users-books-db"}
|
|
||||||
|
@@ -24,34 +24,6 @@ objects** as class attributes to the document class::
|
|||||||
title = StringField(max_length=200, required=True)
|
title = StringField(max_length=200, required=True)
|
||||||
date_modified = DateTimeField(default=datetime.datetime.now)
|
date_modified = DateTimeField(default=datetime.datetime.now)
|
||||||
|
|
||||||
Dynamic document schemas
|
|
||||||
========================
|
|
||||||
One of the benefits of MongoDb is dynamic schemas for a collection, whilst data
|
|
||||||
should be planned and organised (after all explicit is better than implicit!)
|
|
||||||
there are scenarios where having dynamic / expando style documents is desirable.
|
|
||||||
|
|
||||||
:class:`~mongoengine.DynamicDocument` documents work in the same way as
|
|
||||||
:class:`~mongoengine.Document` but any data / attributes set to them will also
|
|
||||||
be saved ::
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
|
|
||||||
class Page(DynamicDocument):
|
|
||||||
title = StringField(max_length=200, required=True)
|
|
||||||
|
|
||||||
# Create a new page and add tags
|
|
||||||
>>> page = Page(title='Using MongoEngine')
|
|
||||||
>>> page.tags = ['mongodb', 'mongoengine']
|
|
||||||
>>> page.save()
|
|
||||||
|
|
||||||
>>> Page.objects(tags='mongoengine').count()
|
|
||||||
>>> 1
|
|
||||||
|
|
||||||
..note::
|
|
||||||
|
|
||||||
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
|
||||||
|
|
||||||
|
|
||||||
Fields
|
Fields
|
||||||
======
|
======
|
||||||
By default, fields are not required. To make a field mandatory, set the
|
By default, fields are not required. To make a field mandatory, set the
|
||||||
@@ -64,26 +36,22 @@ are as follows:
|
|||||||
|
|
||||||
* :class:`~mongoengine.StringField`
|
* :class:`~mongoengine.StringField`
|
||||||
* :class:`~mongoengine.URLField`
|
* :class:`~mongoengine.URLField`
|
||||||
* :class:`~mongoengine.EmailField`
|
|
||||||
* :class:`~mongoengine.IntField`
|
* :class:`~mongoengine.IntField`
|
||||||
* :class:`~mongoengine.FloatField`
|
* :class:`~mongoengine.FloatField`
|
||||||
* :class:`~mongoengine.DecimalField`
|
* :class:`~mongoengine.DecimalField`
|
||||||
* :class:`~mongoengine.DateTimeField`
|
* :class:`~mongoengine.DateTimeField`
|
||||||
* :class:`~mongoengine.ComplexDateTimeField`
|
|
||||||
* :class:`~mongoengine.ListField`
|
* :class:`~mongoengine.ListField`
|
||||||
* :class:`~mongoengine.SortedListField`
|
|
||||||
* :class:`~mongoengine.DictField`
|
* :class:`~mongoengine.DictField`
|
||||||
* :class:`~mongoengine.MapField`
|
|
||||||
* :class:`~mongoengine.ObjectIdField`
|
* :class:`~mongoengine.ObjectIdField`
|
||||||
|
* :class:`~mongoengine.EmbeddedDocumentField`
|
||||||
* :class:`~mongoengine.ReferenceField`
|
* :class:`~mongoengine.ReferenceField`
|
||||||
* :class:`~mongoengine.GenericReferenceField`
|
* :class:`~mongoengine.GenericReferenceField`
|
||||||
* :class:`~mongoengine.EmbeddedDocumentField`
|
|
||||||
* :class:`~mongoengine.GenericEmbeddedDocumentField`
|
|
||||||
* :class:`~mongoengine.BooleanField`
|
* :class:`~mongoengine.BooleanField`
|
||||||
* :class:`~mongoengine.FileField`
|
* :class:`~mongoengine.FileField`
|
||||||
|
* :class:`~mongoengine.EmailField`
|
||||||
|
* :class:`~mongoengine.SortedListField`
|
||||||
* :class:`~mongoengine.BinaryField`
|
* :class:`~mongoengine.BinaryField`
|
||||||
* :class:`~mongoengine.GeoPointField`
|
* :class:`~mongoengine.GeoPointField`
|
||||||
* :class:`~mongoengine.SequenceField`
|
|
||||||
|
|
||||||
Field arguments
|
Field arguments
|
||||||
---------------
|
---------------
|
||||||
@@ -135,34 +103,7 @@ arguments can be set on all fields:
|
|||||||
When True, use this field as a primary key for the collection.
|
When True, use this field as a primary key for the collection.
|
||||||
|
|
||||||
:attr:`choices` (Default: None)
|
:attr:`choices` (Default: None)
|
||||||
An iterable (e.g. a list or tuple) of choices to which the value of this
|
An iterable of choices to which the value of this field should be limited.
|
||||||
field should be limited.
|
|
||||||
|
|
||||||
Can be either be a nested tuples of value (stored in mongo) and a
|
|
||||||
human readable key ::
|
|
||||||
|
|
||||||
SIZE = (('S', 'Small'),
|
|
||||||
('M', 'Medium'),
|
|
||||||
('L', 'Large'),
|
|
||||||
('XL', 'Extra Large'),
|
|
||||||
('XXL', 'Extra Extra Large'))
|
|
||||||
|
|
||||||
|
|
||||||
class Shirt(Document):
|
|
||||||
size = StringField(max_length=3, choices=SIZE)
|
|
||||||
|
|
||||||
Or a flat iterable just containing values ::
|
|
||||||
|
|
||||||
SIZE = ('S', 'M', 'L', 'XL', 'XXL')
|
|
||||||
|
|
||||||
class Shirt(Document):
|
|
||||||
size = StringField(max_length=3, choices=SIZE)
|
|
||||||
|
|
||||||
:attr:`help_text` (Default: None)
|
|
||||||
Optional help text to output with the field - used by form libraries
|
|
||||||
|
|
||||||
:attr:`verbose_name` (Default: None)
|
|
||||||
Optional human-readable name for the field - used by form libraries
|
|
||||||
|
|
||||||
|
|
||||||
List fields
|
List fields
|
||||||
@@ -193,8 +134,8 @@ document class as the first argument::
|
|||||||
class Page(Document):
|
class Page(Document):
|
||||||
comments = ListField(EmbeddedDocumentField(Comment))
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
|
|
||||||
comment1 = Comment(content='Good work!')
|
comment1 = Comment('Good work!')
|
||||||
comment2 = Comment(content='Nice article!')
|
comment2 = Comment('Nice article!')
|
||||||
page = Page(comments=[comment1, comment2])
|
page = Page(comments=[comment1, comment2])
|
||||||
|
|
||||||
Dictionary Fields
|
Dictionary Fields
|
||||||
@@ -214,9 +155,6 @@ store; in this situation a :class:`~mongoengine.DictField` is appropriate::
|
|||||||
survey_response.answers = response_form.cleaned_data()
|
survey_response.answers = response_form.cleaned_data()
|
||||||
survey_response.save()
|
survey_response.save()
|
||||||
|
|
||||||
Dictionaries can store complex data, other dictionaries, lists, references to
|
|
||||||
other objects, so are the most flexible field type available.
|
|
||||||
|
|
||||||
Reference fields
|
Reference fields
|
||||||
----------------
|
----------------
|
||||||
References may be stored to other documents in the database using the
|
References may be stored to other documents in the database using the
|
||||||
@@ -255,59 +193,6 @@ as the constructor's argument::
|
|||||||
class ProfilePage(Document):
|
class ProfilePage(Document):
|
||||||
content = StringField()
|
content = StringField()
|
||||||
|
|
||||||
|
|
||||||
Dealing with deletion of referred documents
|
|
||||||
'''''''''''''''''''''''''''''''''''''''''''
|
|
||||||
By default, MongoDB doesn't check the integrity of your data, so deleting
|
|
||||||
documents that other documents still hold references to will lead to consistency
|
|
||||||
issues. Mongoengine's :class:`ReferenceField` adds some functionality to
|
|
||||||
safeguard against these kinds of database integrity problems, providing each
|
|
||||||
reference with a delete rule specification. A delete rule is specified by
|
|
||||||
supplying the :attr:`reverse_delete_rule` attributes on the
|
|
||||||
:class:`ReferenceField` definition, like this::
|
|
||||||
|
|
||||||
class Employee(Document):
|
|
||||||
...
|
|
||||||
profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY)
|
|
||||||
|
|
||||||
The declaration in this example means that when an :class:`Employee` object is
|
|
||||||
removed, the :class:`ProfilePage` that belongs to that employee is removed as
|
|
||||||
well. If a whole batch of employees is removed, all profile pages that are
|
|
||||||
linked are removed as well.
|
|
||||||
|
|
||||||
Its value can take any of the following constants:
|
|
||||||
|
|
||||||
:const:`mongoengine.DO_NOTHING`
|
|
||||||
This is the default and won't do anything. Deletes are fast, but may cause
|
|
||||||
database inconsistency or dangling references.
|
|
||||||
:const:`mongoengine.DENY`
|
|
||||||
Deletion is denied if there still exist references to the object being
|
|
||||||
deleted.
|
|
||||||
:const:`mongoengine.NULLIFY`
|
|
||||||
Any object's fields still referring to the object being deleted are removed
|
|
||||||
(using MongoDB's "unset" operation), effectively nullifying the relationship.
|
|
||||||
:const:`mongoengine.CASCADE`
|
|
||||||
Any object containing fields that are refererring to the object being deleted
|
|
||||||
are deleted first.
|
|
||||||
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
A safety note on setting up these delete rules! Since the delete rules are
|
|
||||||
not recorded on the database level by MongoDB itself, but instead at runtime,
|
|
||||||
in-memory, by the MongoEngine module, it is of the upmost importance
|
|
||||||
that the module that declares the relationship is loaded **BEFORE** the
|
|
||||||
delete is invoked.
|
|
||||||
|
|
||||||
If, for example, the :class:`Employee` object lives in the
|
|
||||||
:mod:`payroll` app, and the :class:`ProfilePage` in the :mod:`people`
|
|
||||||
app, it is extremely important that the :mod:`people` app is loaded
|
|
||||||
before any employee is removed, because otherwise, MongoEngine could
|
|
||||||
never know this relationship exists.
|
|
||||||
|
|
||||||
In Django, be sure to put all apps that have such delete rule declarations in
|
|
||||||
their :file:`models.py` in the :const:`INSTALLED_APPS` tuple.
|
|
||||||
|
|
||||||
|
|
||||||
Generic reference fields
|
Generic reference fields
|
||||||
''''''''''''''''''''''''
|
''''''''''''''''''''''''
|
||||||
A second kind of reference field also exists,
|
A second kind of reference field also exists,
|
||||||
@@ -334,7 +219,6 @@ kind of :class:`~mongoengine.Document`, and hence doesn't take a
|
|||||||
Bookmark(bookmark_object=post).save()
|
Bookmark(bookmark_object=post).save()
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less
|
Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less
|
||||||
efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if
|
efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if
|
||||||
you will only be referencing one document type, prefer the standard
|
you will only be referencing one document type, prefer the standard
|
||||||
@@ -404,10 +288,9 @@ Indexes
|
|||||||
You can specify indexes on collections to make querying faster. This is done
|
You can specify indexes on collections to make querying faster. This is done
|
||||||
by creating a list of index specifications called :attr:`indexes` in the
|
by creating a list of index specifications called :attr:`indexes` in the
|
||||||
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
|
:attr:`~mongoengine.Document.meta` dictionary, where an index specification may
|
||||||
either be a single field name, a tuple containing multiple field names, or a
|
either be a single field name, or a tuple containing multiple field names. A
|
||||||
dictionary containing a full index definition. A direction may be specified on
|
direction may be specified on fields by prefixing the field name with a **+**
|
||||||
fields by prefixing the field name with a **+** or a **-** sign. Note that
|
or a **-** sign. Note that direction only matters on multi-field indexes. ::
|
||||||
direction only matters on multi-field indexes. ::
|
|
||||||
|
|
||||||
class Page(Document):
|
class Page(Document):
|
||||||
title = StringField()
|
title = StringField()
|
||||||
@@ -416,47 +299,10 @@ direction only matters on multi-field indexes. ::
|
|||||||
'indexes': ['title', ('title', '-rating')]
|
'indexes': ['title', ('title', '-rating')]
|
||||||
}
|
}
|
||||||
|
|
||||||
If a dictionary is passed then the following options are available:
|
.. note::
|
||||||
|
|
||||||
:attr:`fields` (Default: None)
|
|
||||||
The fields to index. Specified in the same format as described above.
|
|
||||||
|
|
||||||
:attr:`types` (Default: True)
|
|
||||||
Whether the index should have the :attr:`_types` field added automatically
|
|
||||||
to the start of the index.
|
|
||||||
|
|
||||||
:attr:`sparse` (Default: False)
|
|
||||||
Whether the index should be sparse.
|
|
||||||
|
|
||||||
:attr:`unique` (Default: False)
|
|
||||||
Whether the index should be sparse.
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
|
|
||||||
Inheritance adds extra indices.
|
|
||||||
If don't need inheritance for a document turn inheritance off - see :ref:`document-inheritance`.
|
|
||||||
|
|
||||||
|
|
||||||
Geospatial indexes
|
|
||||||
---------------------------
|
|
||||||
Geospatial indexes will be automatically created for all
|
Geospatial indexes will be automatically created for all
|
||||||
:class:`~mongoengine.GeoPointField`\ s
|
:class:`~mongoengine.GeoPointField`\ s
|
||||||
|
|
||||||
It is also possible to explicitly define geospatial indexes. This is
|
|
||||||
useful if you need to define a geospatial index on a subfield of a
|
|
||||||
:class:`~mongoengine.DictField` or a custom field that contains a
|
|
||||||
point. To create a geospatial index you must prefix the field with the
|
|
||||||
***** sign. ::
|
|
||||||
|
|
||||||
class Place(Document):
|
|
||||||
location = DictField()
|
|
||||||
meta = {
|
|
||||||
'indexes': [
|
|
||||||
'*location.point',
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
Ordering
|
Ordering
|
||||||
========
|
========
|
||||||
A default ordering can be specified for your
|
A default ordering can be specified for your
|
||||||
@@ -497,31 +343,8 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. ::
|
|||||||
first_post = BlogPost.objects.order_by("+published_date").first()
|
first_post = BlogPost.objects.order_by("+published_date").first()
|
||||||
assert first_post.title == "Blog Post #1"
|
assert first_post.title == "Blog Post #1"
|
||||||
|
|
||||||
Shard keys
|
|
||||||
==========
|
|
||||||
|
|
||||||
If your collection is sharded, then you need to specify the shard key as a tuple,
|
|
||||||
using the :attr:`shard_key` attribute of :attr:`-mongoengine.Document.meta`.
|
|
||||||
This ensures that the shard key is sent with the query when calling the
|
|
||||||
:meth:`~mongoengine.document.Document.save` or
|
|
||||||
:meth:`~mongoengine.document.Document.update` method on an existing
|
|
||||||
:class:`-mongoengine.Document` instance::
|
|
||||||
|
|
||||||
class LogEntry(Document):
|
|
||||||
machine = StringField()
|
|
||||||
app = StringField()
|
|
||||||
timestamp = DateTimeField()
|
|
||||||
data = StringField()
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'shard_key': ('machine', 'timestamp',)
|
|
||||||
}
|
|
||||||
|
|
||||||
.. _document-inheritance:
|
|
||||||
|
|
||||||
Document inheritance
|
Document inheritance
|
||||||
====================
|
====================
|
||||||
|
|
||||||
To create a specialised type of a :class:`~mongoengine.Document` you have
|
To create a specialised type of a :class:`~mongoengine.Document` you have
|
||||||
defined, you may subclass it and add any extra fields or methods you may need.
|
defined, you may subclass it and add any extra fields or methods you may need.
|
||||||
As this is new class is not a direct subclass of
|
As this is new class is not a direct subclass of
|
||||||
@@ -533,15 +356,10 @@ convenient and efficient retrieval of related documents::
|
|||||||
class Page(Document):
|
class Page(Document):
|
||||||
title = StringField(max_length=200, required=True)
|
title = StringField(max_length=200, required=True)
|
||||||
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
# Also stored in the collection named 'page'
|
# Also stored in the collection named 'page'
|
||||||
class DatedPage(Page):
|
class DatedPage(Page):
|
||||||
date = DateTimeField()
|
date = DateTimeField()
|
||||||
|
|
||||||
.. note:: From 0.7 onwards you must declare `allow_inheritance` in the document meta.
|
|
||||||
|
|
||||||
|
|
||||||
Working with existing data
|
Working with existing data
|
||||||
--------------------------
|
--------------------------
|
||||||
To enable correct retrieval of documents involved in this kind of heirarchy,
|
To enable correct retrieval of documents involved in this kind of heirarchy,
|
||||||
|
@@ -18,40 +18,18 @@ attribute syntax::
|
|||||||
|
|
||||||
Saving and deleting documents
|
Saving and deleting documents
|
||||||
=============================
|
=============================
|
||||||
MongoEngine tracks changes to documents to provide efficient saving. To save
|
To save the document to the database, call the
|
||||||
the document to the database, call the :meth:`~mongoengine.Document.save` method.
|
:meth:`~mongoengine.Document.save` method. If the document does not exist in
|
||||||
If the document does not exist in the database, it will be created. If it does
|
the database, it will be created. If it does already exist, it will be
|
||||||
already exist, then any changes will be updated atomically. For example::
|
updated.
|
||||||
|
|
||||||
>>> page = Page(title="Test Page")
|
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
|
||||||
>>> page.save() # Performs an insert
|
Note that this will only work if the document exists in the database and has a
|
||||||
>>> page.title = "My Page"
|
valide :attr:`id`.
|
||||||
>>> page.save() # Performs an atomic set on the title field.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
Changes to documents are tracked and on the whole perform `set` operations.
|
|
||||||
|
|
||||||
* ``list_field.pop(0)`` - *sets* the resulting list
|
|
||||||
* ``del(list_field)`` - *unsets* whole list
|
|
||||||
|
|
||||||
.. seealso::
|
.. seealso::
|
||||||
:ref:`guide-atomic-updates`
|
:ref:`guide-atomic-updates`
|
||||||
|
|
||||||
Cascading Saves
|
|
||||||
---------------
|
|
||||||
If your document contains :class:`~mongoengine.ReferenceField` or
|
|
||||||
:class:`~mongoengine.GenericReferenceField` objects, then by default the
|
|
||||||
:meth:`~mongoengine.Document.save` method will automatically save any changes to
|
|
||||||
those objects as well. If this is not desired passing :attr:`cascade` as False
|
|
||||||
to the save method turns this feature off.
|
|
||||||
|
|
||||||
Deleting documents
|
|
||||||
------------------
|
|
||||||
To delete a document, call the :meth:`~mongoengine.Document.delete` method.
|
|
||||||
Note that this will only work if the document exists in the database and has a
|
|
||||||
valid :attr:`id`.
|
|
||||||
|
|
||||||
Document IDs
|
Document IDs
|
||||||
============
|
============
|
||||||
Each document in the database has a unique id. This may be accessed through the
|
Each document in the database has a unique id. This may be accessed through the
|
||||||
@@ -89,7 +67,6 @@ is an alias to :attr:`id`::
|
|||||||
>>> page.id == page.pk
|
>>> page.id == page.pk
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
If you define your own primary key field, the field implicitly becomes
|
If you define your own primary key field, the field implicitly becomes
|
||||||
required, so a :class:`ValidationError` will be thrown if you don't provide
|
required, so a :class:`ValidationError` will be thrown if you don't provide
|
||||||
it.
|
it.
|
||||||
|
@@ -66,7 +66,6 @@ Deleting stored files is achieved with the :func:`delete` method::
|
|||||||
marmot.photo.delete()
|
marmot.photo.delete()
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
The FileField in a Document actually only stores the ID of a file in a
|
The FileField in a Document actually only stores the ID of a file in a
|
||||||
separate GridFS collection. This means that deleting a document
|
separate GridFS collection. This means that deleting a document
|
||||||
with a defined FileField does not actually delete the file. You must be
|
with a defined FileField does not actually delete the file. You must be
|
||||||
|
@@ -11,4 +11,3 @@ User Guide
|
|||||||
document-instances
|
document-instances
|
||||||
querying
|
querying
|
||||||
gridfs
|
gridfs
|
||||||
signals
|
|
||||||
|
@@ -1,31 +1,31 @@
|
|||||||
======================
|
======================
|
||||||
Installing MongoEngine
|
Installing MongoEngine
|
||||||
======================
|
======================
|
||||||
|
|
||||||
To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_
|
To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_
|
||||||
and ensure it is running in an accessible location. You will also need
|
and ensure it is running in an accessible location. You will also need
|
||||||
`PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you
|
`PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you
|
||||||
install MongoEngine using setuptools, then the dependencies will be handled for
|
install MongoEngine using setuptools, then the dependencies will be handled for
|
||||||
you.
|
you.
|
||||||
|
|
||||||
MongoEngine is available on PyPI, so to use it you can use :program:`pip`:
|
MongoEngine is available on PyPI, so to use it you can use
|
||||||
|
:program:`easy_install`:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ pip install mongoengine
|
# easy_install mongoengine
|
||||||
|
|
||||||
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
Alternatively, if you don't have setuptools installed, `download it from PyPi
|
||||||
<http://pypi.python.org/pypi/mongoengine/>`_ and run
|
<http://pypi.python.org/pypi/mongoengine/>`_ and run
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ python setup.py install
|
# python setup.py install
|
||||||
|
|
||||||
To use the bleeding-edge version of MongoEngine, you can get the source from
|
To use the bleeding-edge version of MongoEngine, you can get the source from
|
||||||
`GitHub <http://github.com/hmarr/mongoengine/>`_ and install it as above:
|
`GitHub <http://github.com/hmarr/mongoengine/>`_ and install it as above:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ git clone git://github.com/hmarr/mongoengine
|
# git clone git://github.com/hmarr/mongoengine
|
||||||
$ cd mongoengine
|
# cd mongoengine
|
||||||
$ python setup.py install
|
# python setup.py install
|
||||||
|
@@ -5,8 +5,8 @@ Querying the database
|
|||||||
is used for accessing the objects in the database associated with the class.
|
is used for accessing the objects in the database associated with the class.
|
||||||
The :attr:`objects` attribute is actually a
|
The :attr:`objects` attribute is actually a
|
||||||
:class:`~mongoengine.queryset.QuerySetManager`, which creates and returns a new
|
:class:`~mongoengine.queryset.QuerySetManager`, which creates and returns a new
|
||||||
:class:`~mongoengine.queryset.QuerySet` object on access. The
|
a new :class:`~mongoengine.queryset.QuerySet` object on access. The
|
||||||
:class:`~mongoengine.queryset.QuerySet` object may be iterated over to
|
:class:`~mongoengine.queryset.QuerySet` object may may be iterated over to
|
||||||
fetch documents from the database::
|
fetch documents from the database::
|
||||||
|
|
||||||
# Prints out the names of all the users in the database
|
# Prints out the names of all the users in the database
|
||||||
@@ -14,7 +14,6 @@ fetch documents from the database::
|
|||||||
print user.name
|
print user.name
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Once the iteration finishes (when :class:`StopIteration` is raised),
|
Once the iteration finishes (when :class:`StopIteration` is raised),
|
||||||
:meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the
|
:meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the
|
||||||
:class:`~mongoengine.queryset.QuerySet` may be iterated over again. The
|
:class:`~mongoengine.queryset.QuerySet` may be iterated over again. The
|
||||||
@@ -40,6 +39,29 @@ syntax::
|
|||||||
# been written by a user whose 'country' field is set to 'uk'
|
# been written by a user whose 'country' field is set to 'uk'
|
||||||
uk_pages = Page.objects(author__country='uk')
|
uk_pages = Page.objects(author__country='uk')
|
||||||
|
|
||||||
|
Querying lists
|
||||||
|
--------------
|
||||||
|
On most fields, this syntax will look up documents where the field specified
|
||||||
|
matches the given value exactly, but when the field refers to a
|
||||||
|
:class:`~mongoengine.ListField`, a single item may be provided, in which case
|
||||||
|
lists that contain that item will be matched::
|
||||||
|
|
||||||
|
class Page(Document):
|
||||||
|
tags = ListField(StringField())
|
||||||
|
|
||||||
|
# This will match all pages that have the word 'coding' as an item in the
|
||||||
|
# 'tags' list
|
||||||
|
Page.objects(tags='coding')
|
||||||
|
|
||||||
|
Raw queries
|
||||||
|
-----------
|
||||||
|
It is possible to provide a raw PyMongo query as a query parameter, which will
|
||||||
|
be integrated directly into the query. This is done using the ``__raw__``
|
||||||
|
keyword argument::
|
||||||
|
|
||||||
|
Page.objects(__raw__={'tags': 'coding'})
|
||||||
|
|
||||||
|
.. versionadded:: 0.4
|
||||||
|
|
||||||
Query operators
|
Query operators
|
||||||
===============
|
===============
|
||||||
@@ -76,69 +98,27 @@ expressions:
|
|||||||
* ``istartswith`` -- string field starts with value (case insensitive)
|
* ``istartswith`` -- string field starts with value (case insensitive)
|
||||||
* ``endswith`` -- string field ends with value
|
* ``endswith`` -- string field ends with value
|
||||||
* ``iendswith`` -- string field ends with value (case insensitive)
|
* ``iendswith`` -- string field ends with value (case insensitive)
|
||||||
* ``match`` -- performs an $elemMatch so you can match an entire document within an array
|
|
||||||
|
.. versionadded:: 0.3
|
||||||
|
|
||||||
There are a few special operators for performing geographical queries, that
|
There are a few special operators for performing geographical queries, that
|
||||||
may used with :class:`~mongoengine.GeoPointField`\ s:
|
may used with :class:`~mongoengine.GeoPointField`\ s:
|
||||||
|
|
||||||
* ``within_distance`` -- provide a list containing a point and a maximum
|
* ``within_distance`` -- provide a list containing a point and a maximum
|
||||||
distance (e.g. [(41.342, -87.653), 5])
|
distance (e.g. [(41.342, -87.653), 5])
|
||||||
* ``within_spherical_distance`` -- Same as above but using the spherical geo model
|
|
||||||
(e.g. [(41.342, -87.653), 5/earth_radius])
|
|
||||||
* ``near`` -- order the documents by how close they are to a given point
|
|
||||||
* ``near_sphere`` -- Same as above but using the spherical geo model
|
|
||||||
* ``within_box`` -- filter documents to those within a given bounding box (e.g.
|
* ``within_box`` -- filter documents to those within a given bounding box (e.g.
|
||||||
[(35.0, -125.0), (40.0, -100.0)])
|
[(35.0, -125.0), (40.0, -100.0)])
|
||||||
* ``within_polygon`` -- filter documents to those within a given polygon (e.g.
|
* ``near`` -- order the documents by how close they are to a given point
|
||||||
[(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]).
|
|
||||||
.. note:: Requires Mongo Server 2.0
|
|
||||||
|
|
||||||
|
.. versionadded:: 0.4
|
||||||
|
|
||||||
Querying lists
|
Querying by position
|
||||||
--------------
|
====================
|
||||||
On most fields, this syntax will look up documents where the field specified
|
|
||||||
matches the given value exactly, but when the field refers to a
|
|
||||||
:class:`~mongoengine.ListField`, a single item may be provided, in which case
|
|
||||||
lists that contain that item will be matched::
|
|
||||||
|
|
||||||
class Page(Document):
|
|
||||||
tags = ListField(StringField())
|
|
||||||
|
|
||||||
# This will match all pages that have the word 'coding' as an item in the
|
|
||||||
# 'tags' list
|
|
||||||
Page.objects(tags='coding')
|
|
||||||
|
|
||||||
It is possible to query by position in a list by using a numerical value as a
|
It is possible to query by position in a list by using a numerical value as a
|
||||||
query operator. So if you wanted to find all pages whose first tag was ``db``,
|
query operator. So if you wanted to find all pages whose first tag was ``db``,
|
||||||
you could use the following query::
|
you could use the following query::
|
||||||
|
|
||||||
Page.objects(tags__0='db')
|
BlogPost.objects(tags__0='db')
|
||||||
|
|
||||||
If you only want to fetch part of a list eg: you want to paginate a list, then
|
|
||||||
the `slice` operator is required::
|
|
||||||
|
|
||||||
# comments - skip 5, limit 10
|
|
||||||
Page.objects.fields(slice__comments=[5, 10])
|
|
||||||
|
|
||||||
For updating documents, if you don't know the position in a list, you can use
|
|
||||||
the $ positional operator ::
|
|
||||||
|
|
||||||
Post.objects(comments__by="joe").update(**{'inc__comments__$__votes': 1})
|
|
||||||
|
|
||||||
However, this doesn't map well to the syntax so you can also use a capital S instead ::
|
|
||||||
|
|
||||||
Post.objects(comments__by="joe").update(inc__comments__S__votes=1)
|
|
||||||
|
|
||||||
.. note:: Due to Mongo currently the $ operator only applies to the first matched item in the query.
|
|
||||||
|
|
||||||
|
|
||||||
Raw queries
|
|
||||||
-----------
|
|
||||||
It is possible to provide a raw PyMongo query as a query parameter, which will
|
|
||||||
be integrated directly into the query. This is done using the ``__raw__``
|
|
||||||
keyword argument::
|
|
||||||
|
|
||||||
Page.objects(__raw__={'tags': 'coding'})
|
|
||||||
|
|
||||||
.. versionadded:: 0.4
|
.. versionadded:: 0.4
|
||||||
|
|
||||||
@@ -274,7 +254,6 @@ You may sum over the values of a specific field on documents using
|
|||||||
yearly_expense = Employee.objects.sum('salary')
|
yearly_expense = Employee.objects.sum('salary')
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
If the field isn't present on a document, that document will be ignored from
|
If the field isn't present on a document, that document will be ignored from
|
||||||
the sum.
|
the sum.
|
||||||
|
|
||||||
@@ -297,16 +276,8 @@ would be generating "tag-clouds"::
|
|||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
|
top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10]
|
||||||
|
|
||||||
|
|
||||||
Query efficiency and performance
|
|
||||||
================================
|
|
||||||
|
|
||||||
There are a couple of methods to improve efficiency when querying, reducing the
|
|
||||||
information returned by the query or efficient dereferencing .
|
|
||||||
|
|
||||||
Retrieving a subset of fields
|
Retrieving a subset of fields
|
||||||
-----------------------------
|
=============================
|
||||||
|
|
||||||
Sometimes a subset of fields on a :class:`~mongoengine.Document` is required,
|
Sometimes a subset of fields on a :class:`~mongoengine.Document` is required,
|
||||||
and for efficiency only these should be retrieved from the database. This issue
|
and for efficiency only these should be retrieved from the database. This issue
|
||||||
is especially important for MongoDB, as fields may often be extremely large
|
is especially important for MongoDB, as fields may often be extremely large
|
||||||
@@ -331,35 +302,9 @@ will be given::
|
|||||||
>>> f.rating # default value
|
>>> f.rating # default value
|
||||||
3
|
3
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
The :meth:`~mongoengine.queryset.QuerySet.exclude` is the opposite of
|
|
||||||
:meth:`~mongoengine.queryset.QuerySet.only` if you want to exclude a field.
|
|
||||||
|
|
||||||
If you later need the missing fields, just call
|
If you later need the missing fields, just call
|
||||||
:meth:`~mongoengine.Document.reload` on your document.
|
:meth:`~mongoengine.Document.reload` on your document.
|
||||||
|
|
||||||
Getting related data
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
When iterating the results of :class:`~mongoengine.ListField` or
|
|
||||||
:class:`~mongoengine.DictField` we automatically dereference any
|
|
||||||
:class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the
|
|
||||||
number the queries to mongo.
|
|
||||||
|
|
||||||
There are times when that efficiency is not enough, documents that have
|
|
||||||
:class:`~mongoengine.ReferenceField` objects or
|
|
||||||
:class:`~mongoengine.GenericReferenceField` objects at the top level are
|
|
||||||
expensive as the number of queries to MongoDB can quickly rise.
|
|
||||||
|
|
||||||
To limit the number of queries use
|
|
||||||
:func:`~mongoengine.queryset.QuerySet.select_related` which converts the
|
|
||||||
QuerySet to a list and dereferences as efficiently as possible. By default
|
|
||||||
:func:`~mongoengine.queryset.QuerySet.select_related` only dereferences any
|
|
||||||
references to the depth of 1 level. If you have more complicated documents and
|
|
||||||
want to dereference more of the object at once then increasing the :attr:`max_depth`
|
|
||||||
will dereference more levels of the document.
|
|
||||||
|
|
||||||
Advanced queries
|
Advanced queries
|
||||||
================
|
================
|
||||||
Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword
|
Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword
|
||||||
@@ -380,66 +325,11 @@ calling it with keyword arguments::
|
|||||||
# Get top posts
|
# Get top posts
|
||||||
Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000))
|
Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000))
|
||||||
|
|
||||||
.. _guide-atomic-updates:
|
.. warning::
|
||||||
|
Only use these advanced queries if absolutely necessary as they will execute
|
||||||
Atomic updates
|
significantly slower than regular queries. This is because they are not
|
||||||
==============
|
natively supported by MongoDB -- they are compiled to Javascript and sent
|
||||||
Documents may be updated atomically by using the
|
to the server for execution.
|
||||||
:meth:`~mongoengine.queryset.QuerySet.update_one` and
|
|
||||||
:meth:`~mongoengine.queryset.QuerySet.update` methods on a
|
|
||||||
:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers"
|
|
||||||
that you may use with these methods:
|
|
||||||
|
|
||||||
* ``set`` -- set a particular value
|
|
||||||
* ``unset`` -- delete a particular value (since MongoDB v1.3+)
|
|
||||||
* ``inc`` -- increment a value by a given amount
|
|
||||||
* ``dec`` -- decrement a value by a given amount
|
|
||||||
* ``pop`` -- remove the last item from a list
|
|
||||||
* ``push`` -- append a value to a list
|
|
||||||
* ``push_all`` -- append several values to a list
|
|
||||||
* ``pop`` -- remove the first or last element of a list
|
|
||||||
* ``pull`` -- remove a value from a list
|
|
||||||
* ``pull_all`` -- remove several values from a list
|
|
||||||
* ``add_to_set`` -- add value to a list only if its not in the list already
|
|
||||||
|
|
||||||
The syntax for atomic updates is similar to the querying syntax, but the
|
|
||||||
modifier comes before the field, not after it::
|
|
||||||
|
|
||||||
>>> post = BlogPost(title='Test', page_views=0, tags=['database'])
|
|
||||||
>>> post.save()
|
|
||||||
>>> BlogPost.objects(id=post.id).update_one(inc__page_views=1)
|
|
||||||
>>> post.reload() # the document has been changed, so we need to reload it
|
|
||||||
>>> post.page_views
|
|
||||||
1
|
|
||||||
>>> BlogPost.objects(id=post.id).update_one(set__title='Example Post')
|
|
||||||
>>> post.reload()
|
|
||||||
>>> post.title
|
|
||||||
'Example Post'
|
|
||||||
>>> BlogPost.objects(id=post.id).update_one(push__tags='nosql')
|
|
||||||
>>> post.reload()
|
|
||||||
>>> post.tags
|
|
||||||
['database', 'nosql']
|
|
||||||
|
|
||||||
.. note ::
|
|
||||||
|
|
||||||
In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates
|
|
||||||
on changed documents by tracking changes to that document.
|
|
||||||
|
|
||||||
The positional operator allows you to update list items without knowing the
|
|
||||||
index position, therefore making the update a single atomic operation. As we
|
|
||||||
cannot use the `$` syntax in keyword arguments it has been mapped to `S`::
|
|
||||||
|
|
||||||
>>> post = BlogPost(title='Test', page_views=0, tags=['database', 'mongo'])
|
|
||||||
>>> post.save()
|
|
||||||
>>> BlogPost.objects(id=post.id, tags='mongo').update(set__tags__S='mongodb')
|
|
||||||
>>> post.reload()
|
|
||||||
>>> post.tags
|
|
||||||
['database', 'mongodb']
|
|
||||||
|
|
||||||
.. note ::
|
|
||||||
Currently only top level lists are handled, future versions of mongodb /
|
|
||||||
pymongo plan to support nested positional operators. See `The $ positional
|
|
||||||
operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_.
|
|
||||||
|
|
||||||
Server-side javascript execution
|
Server-side javascript execution
|
||||||
================================
|
================================
|
||||||
@@ -543,3 +433,43 @@ following example shows how the substitutions are made::
|
|||||||
return comments;
|
return comments;
|
||||||
}
|
}
|
||||||
""")
|
""")
|
||||||
|
|
||||||
|
.. _guide-atomic-updates:
|
||||||
|
|
||||||
|
Atomic updates
|
||||||
|
==============
|
||||||
|
Documents may be updated atomically by using the
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.update_one` and
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet.update` methods on a
|
||||||
|
:meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers"
|
||||||
|
that you may use with these methods:
|
||||||
|
|
||||||
|
* ``set`` -- set a particular value
|
||||||
|
* ``unset`` -- delete a particular value (since MongoDB v1.3+)
|
||||||
|
* ``inc`` -- increment a value by a given amount
|
||||||
|
* ``dec`` -- decrement a value by a given amount
|
||||||
|
* ``pop`` -- remove the last item from a list
|
||||||
|
* ``push`` -- append a value to a list
|
||||||
|
* ``push_all`` -- append several values to a list
|
||||||
|
* ``pop`` -- remove the first or last element of a list
|
||||||
|
* ``pull`` -- remove a value from a list
|
||||||
|
* ``pull_all`` -- remove several values from a list
|
||||||
|
* ``add_to_set`` -- add value to a list only if its not in the list already
|
||||||
|
|
||||||
|
The syntax for atomic updates is similar to the querying syntax, but the
|
||||||
|
modifier comes before the field, not after it::
|
||||||
|
|
||||||
|
>>> post = BlogPost(title='Test', page_views=0, tags=['database'])
|
||||||
|
>>> post.save()
|
||||||
|
>>> BlogPost.objects(id=post.id).update_one(inc__page_views=1)
|
||||||
|
>>> post.reload() # the document has been changed, so we need to reload it
|
||||||
|
>>> post.page_views
|
||||||
|
1
|
||||||
|
>>> BlogPost.objects(id=post.id).update_one(set__title='Example Post')
|
||||||
|
>>> post.reload()
|
||||||
|
>>> post.title
|
||||||
|
'Example Post'
|
||||||
|
>>> BlogPost.objects(id=post.id).update_one(push__tags='nosql')
|
||||||
|
>>> post.reload()
|
||||||
|
>>> post.tags
|
||||||
|
['database', 'nosql']
|
||||||
|
@@ -1,53 +0,0 @@
|
|||||||
.. _signals:
|
|
||||||
|
|
||||||
Signals
|
|
||||||
=======
|
|
||||||
|
|
||||||
.. versionadded:: 0.5
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
Signal support is provided by the excellent `blinker`_ library and
|
|
||||||
will gracefully fall back if it is not available.
|
|
||||||
|
|
||||||
|
|
||||||
The following document signals exist in MongoEngine and are pretty self-explanatory:
|
|
||||||
|
|
||||||
* `mongoengine.signals.pre_init`
|
|
||||||
* `mongoengine.signals.post_init`
|
|
||||||
* `mongoengine.signals.pre_save`
|
|
||||||
* `mongoengine.signals.post_save`
|
|
||||||
* `mongoengine.signals.pre_delete`
|
|
||||||
* `mongoengine.signals.post_delete`
|
|
||||||
* `mongoengine.signals.pre_bulk_insert`
|
|
||||||
* `mongoengine.signals.post_bulk_insert`
|
|
||||||
|
|
||||||
Example usage::
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine import signals
|
|
||||||
|
|
||||||
class Author(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
def __unicode__(self):
|
|
||||||
return self.name
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def pre_save(cls, sender, document, **kwargs):
|
|
||||||
logging.debug("Pre Save: %s" % document.name)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def post_save(cls, sender, document, **kwargs):
|
|
||||||
logging.debug("Post Save: %s" % document.name)
|
|
||||||
if 'created' in kwargs:
|
|
||||||
if kwargs['created']:
|
|
||||||
logging.debug("Created")
|
|
||||||
else:
|
|
||||||
logging.debug("Updated")
|
|
||||||
|
|
||||||
signals.pre_save.connect(Author.pre_save, sender=Author)
|
|
||||||
signals.post_save.connect(Author.post_save, sender=Author)
|
|
||||||
|
|
||||||
|
|
||||||
.. _blinker: http://pypi.python.org/pypi/blinker
|
|
@@ -2,66 +2,34 @@
|
|||||||
MongoEngine User Documentation
|
MongoEngine User Documentation
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
**MongoEngine** is an Object-Document Mapper, written in Python for working with
|
MongoEngine is an Object-Document Mapper, written in Python for working with
|
||||||
MongoDB. To install it, simply run
|
MongoDB. To install it, simply run
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
# pip install -U mongoengine
|
# pip install -U mongoengine
|
||||||
|
|
||||||
:doc:`tutorial`
|
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_.
|
||||||
Start here for a quick overview.
|
|
||||||
|
|
||||||
:doc:`guide/index`
|
|
||||||
The Full guide to MongoEngine
|
|
||||||
|
|
||||||
:doc:`apireference`
|
|
||||||
The complete API documentation.
|
|
||||||
|
|
||||||
:doc:`upgrade`
|
|
||||||
How to upgrade MongoEngine.
|
|
||||||
|
|
||||||
:doc:`django`
|
|
||||||
Using MongoEngine and Django
|
|
||||||
|
|
||||||
Community
|
|
||||||
---------
|
|
||||||
|
|
||||||
To get help with using MongoEngine, use the `MongoEngine Users mailing list
|
To get help with using MongoEngine, use the `MongoEngine Users mailing list
|
||||||
<http://groups.google.com/group/mongoengine-users>`_ or come chat on the
|
<http://groups.google.com/group/mongoengine-users>`_ or come chat on the
|
||||||
`#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_.
|
`#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_.
|
||||||
|
|
||||||
Contributing
|
If you are interested in contributing, join the developers' `mailing list
|
||||||
------------
|
|
||||||
|
|
||||||
The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ and
|
|
||||||
contributions are always encouraged. Contributions can be as simple as
|
|
||||||
minor tweaks to this documentation. To contribute, fork the project on
|
|
||||||
`GitHub <http://github.com/hmarr/mongoengine>`_ and send a
|
|
||||||
pull request.
|
|
||||||
|
|
||||||
Also, you can join the developers' `mailing list
|
|
||||||
<http://groups.google.com/group/mongoengine-dev>`_.
|
<http://groups.google.com/group/mongoengine-dev>`_.
|
||||||
|
|
||||||
Changes
|
|
||||||
-------
|
|
||||||
See the :doc:`changelog` for a full list of changes to MongoEngine and
|
|
||||||
:doc:`upgrade` for upgrade information.
|
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:hidden:
|
:maxdepth: 2
|
||||||
|
|
||||||
tutorial
|
tutorial
|
||||||
guide/index
|
guide/index
|
||||||
apireference
|
apireference
|
||||||
django
|
django
|
||||||
changelog
|
changelog
|
||||||
upgrade
|
|
||||||
|
|
||||||
Indices and tables
|
Indices and tables
|
||||||
------------------
|
==================
|
||||||
|
|
||||||
* :ref:`genindex`
|
* :ref:`genindex`
|
||||||
* :ref:`modindex`
|
|
||||||
* :ref:`search`
|
* :ref:`search`
|
||||||
|
|
||||||
|
@@ -152,26 +152,6 @@ We can then store a list of comment documents in our post document::
|
|||||||
tags = ListField(StringField(max_length=30))
|
tags = ListField(StringField(max_length=30))
|
||||||
comments = ListField(EmbeddedDocumentField(Comment))
|
comments = ListField(EmbeddedDocumentField(Comment))
|
||||||
|
|
||||||
Handling deletions of references
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
The :class:`~mongoengine.ReferenceField` object takes a keyword
|
|
||||||
`reverse_delete_rule` for handling deletion rules if the reference is deleted.
|
|
||||||
To delete all the posts if a user is deleted set the rule::
|
|
||||||
|
|
||||||
class Post(Document):
|
|
||||||
title = StringField(max_length=120, required=True)
|
|
||||||
author = ReferenceField(User, reverse_delete_rule=CASCADE)
|
|
||||||
tags = ListField(StringField(max_length=30))
|
|
||||||
comments = ListField(EmbeddedDocumentField(Comment))
|
|
||||||
|
|
||||||
See :class:`~mongoengine.ReferenceField` for more information.
|
|
||||||
|
|
||||||
..note::
|
|
||||||
MapFields and DictFields currently don't support automatic handling of
|
|
||||||
deleted references
|
|
||||||
|
|
||||||
|
|
||||||
Adding data to our Tumblelog
|
Adding data to our Tumblelog
|
||||||
============================
|
============================
|
||||||
Now that we've defined how our documents will be structured, let's start adding
|
Now that we've defined how our documents will be structured, let's start adding
|
||||||
@@ -270,5 +250,5 @@ the first matched by the query you provide. Aggregation functions may also be
|
|||||||
used on :class:`~mongoengine.queryset.QuerySet` objects::
|
used on :class:`~mongoengine.queryset.QuerySet` objects::
|
||||||
|
|
||||||
num_posts = Post.objects(tags='mongodb').count()
|
num_posts = Post.objects(tags='mongodb').count()
|
||||||
print 'Found %d posts with tag "mongodb"' % num_posts
|
print 'Found % posts with tag "mongodb"' % num_posts
|
||||||
|
|
||||||
|
115
docs/upgrade.rst
115
docs/upgrade.rst
@@ -1,115 +0,0 @@
|
|||||||
=========
|
|
||||||
Upgrading
|
|
||||||
=========
|
|
||||||
|
|
||||||
0.5 to 0.6
|
|
||||||
==========
|
|
||||||
|
|
||||||
Embedded Documents - if you had a `pk` field you will have to rename it from `_id`
|
|
||||||
to `pk` as pk is no longer a property of Embedded Documents.
|
|
||||||
|
|
||||||
Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw
|
|
||||||
an InvalidDocument error as they aren't currently supported.
|
|
||||||
|
|
||||||
Document._get_subclasses - Is no longer used and the class method has been removed.
|
|
||||||
|
|
||||||
Document.objects.with_id - now raises an InvalidQueryError if used with a filter.
|
|
||||||
|
|
||||||
FutureWarning - A future warning has been added to all inherited classes that
|
|
||||||
don't define `allow_inheritance` in their meta.
|
|
||||||
|
|
||||||
You may need to update pyMongo to 2.0 for use with Sharding.
|
|
||||||
|
|
||||||
0.4 to 0.5
|
|
||||||
===========
|
|
||||||
|
|
||||||
There have been the following backwards incompatibilities from 0.4 to 0.5. The
|
|
||||||
main areas of changed are: choices in fields, map_reduce and collection names.
|
|
||||||
|
|
||||||
Choice options:
|
|
||||||
---------------
|
|
||||||
|
|
||||||
Are now expected to be an iterable of tuples, with the first element in each
|
|
||||||
tuple being the actual value to be stored. The second element is the
|
|
||||||
human-readable name for the option.
|
|
||||||
|
|
||||||
|
|
||||||
PyMongo / MongoDB
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
map reduce now requires pymongo 1.11+- The pymongo merge_output and reduce_output
|
|
||||||
parameters, have been depreciated.
|
|
||||||
|
|
||||||
More methods now use map_reduce as db.eval is not supported for sharding as such
|
|
||||||
the following have been changed:
|
|
||||||
|
|
||||||
* :meth:`~mongoengine.queryset.QuerySet.sum`
|
|
||||||
* :meth:`~mongoengine.queryset.QuerySet.average`
|
|
||||||
* :meth:`~mongoengine.queryset.QuerySet.item_frequencies`
|
|
||||||
|
|
||||||
|
|
||||||
Default collection naming
|
|
||||||
-------------------------
|
|
||||||
|
|
||||||
Previously it was just lowercase, its now much more pythonic and readable as its
|
|
||||||
lowercase and underscores, previously ::
|
|
||||||
|
|
||||||
class MyAceDocument(Document):
|
|
||||||
pass
|
|
||||||
|
|
||||||
MyAceDocument._meta['collection'] == myacedocument
|
|
||||||
|
|
||||||
In 0.5 this will change to ::
|
|
||||||
|
|
||||||
class MyAceDocument(Document):
|
|
||||||
pass
|
|
||||||
|
|
||||||
MyAceDocument._get_collection_name() == my_ace_document
|
|
||||||
|
|
||||||
To upgrade use a Mixin class to set meta like so ::
|
|
||||||
|
|
||||||
class BaseMixin(object):
|
|
||||||
meta = {
|
|
||||||
'collection': lambda c: c.__name__.lower()
|
|
||||||
}
|
|
||||||
|
|
||||||
class MyAceDocument(Document, BaseMixin):
|
|
||||||
pass
|
|
||||||
|
|
||||||
MyAceDocument._get_collection_name() == "myacedocument"
|
|
||||||
|
|
||||||
Alternatively, you can rename your collections eg ::
|
|
||||||
|
|
||||||
from mongoengine.connection import _get_db
|
|
||||||
from mongoengine.base import _document_registry
|
|
||||||
|
|
||||||
def rename_collections():
|
|
||||||
db = _get_db()
|
|
||||||
|
|
||||||
failure = False
|
|
||||||
|
|
||||||
collection_names = [d._get_collection_name() for d in _document_registry.values()]
|
|
||||||
|
|
||||||
for new_style_name in collection_names:
|
|
||||||
if not new_style_name: # embedded documents don't have collections
|
|
||||||
continue
|
|
||||||
old_style_name = new_style_name.replace('_', '')
|
|
||||||
|
|
||||||
if old_style_name == new_style_name:
|
|
||||||
continue # Nothing to do
|
|
||||||
|
|
||||||
existing = db.collection_names()
|
|
||||||
if old_style_name in existing:
|
|
||||||
if new_style_name in existing:
|
|
||||||
failure = True
|
|
||||||
print "FAILED to rename: %s to %s (already exists)" % (
|
|
||||||
old_style_name, new_style_name)
|
|
||||||
else:
|
|
||||||
db[old_style_name].rename(new_style_name)
|
|
||||||
print "Renamed: %s to %s" % (old_style_name, new_style_name)
|
|
||||||
|
|
||||||
if failure:
|
|
||||||
print "Upgrading collection names failed"
|
|
||||||
else:
|
|
||||||
print "Upgraded collection names"
|
|
||||||
|
|
@@ -6,14 +6,13 @@ import connection
|
|||||||
from connection import *
|
from connection import *
|
||||||
import queryset
|
import queryset
|
||||||
from queryset import *
|
from queryset import *
|
||||||
import signals
|
|
||||||
from signals import *
|
|
||||||
|
|
||||||
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
__all__ = (document.__all__ + fields.__all__ + connection.__all__ +
|
||||||
queryset.__all__ + signals.__all__)
|
queryset.__all__)
|
||||||
|
|
||||||
VERSION = (0, 6, 3)
|
__author__ = 'Harry Marr'
|
||||||
|
|
||||||
|
VERSION = (0, 4, 1)
|
||||||
|
|
||||||
def get_version():
|
def get_version():
|
||||||
version = '%s.%s' % (VERSION[0], VERSION[1])
|
version = '%s.%s' % (VERSION[0], VERSION[1])
|
||||||
@@ -22,3 +21,4 @@ def get_version():
|
|||||||
return version
|
return version
|
||||||
|
|
||||||
__version__ = get_version()
|
__version__ = get_version()
|
||||||
|
|
||||||
|
1118
mongoengine/base.py
1118
mongoengine/base.py
File diff suppressed because it is too large
Load Diff
@@ -1,159 +1,71 @@
|
|||||||
import pymongo
|
from pymongo import Connection
|
||||||
from pymongo import Connection, ReplicaSetConnection, uri_parser
|
import multiprocessing
|
||||||
|
|
||||||
|
__all__ = ['ConnectionError', 'connect']
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['ConnectionError', 'connect', 'register_connection',
|
_connection_defaults = {
|
||||||
'DEFAULT_CONNECTION_NAME']
|
'host': 'localhost',
|
||||||
|
'port': 27017,
|
||||||
|
}
|
||||||
|
_connection = {}
|
||||||
|
_connection_settings = _connection_defaults.copy()
|
||||||
|
|
||||||
|
_db_name = None
|
||||||
DEFAULT_CONNECTION_NAME = 'default'
|
_db_username = None
|
||||||
|
_db_password = None
|
||||||
|
_db = {}
|
||||||
|
|
||||||
|
|
||||||
class ConnectionError(Exception):
|
class ConnectionError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
_connection_settings = {}
|
def _get_connection(reconnect=False):
|
||||||
_connections = {}
|
global _connection
|
||||||
_dbs = {}
|
identity = get_identity()
|
||||||
|
|
||||||
|
|
||||||
def register_connection(alias, name, host='localhost', port=27017,
|
|
||||||
is_slave=False, read_preference=False, slaves=None,
|
|
||||||
username=None, password=None, **kwargs):
|
|
||||||
"""Add a connection.
|
|
||||||
|
|
||||||
:param alias: the name that will be used to refer to this connection
|
|
||||||
throughout MongoEngine
|
|
||||||
:param name: the name of the specific database to use
|
|
||||||
:param host: the host name of the :program:`mongod` instance to connect to
|
|
||||||
:param port: the port that the :program:`mongod` instance is running on
|
|
||||||
:param is_slave: whether the connection can act as a slave ** Depreciated pymongo 2.0.1+
|
|
||||||
:param read_preference: The read preference for the collection ** Added pymongo 2.1
|
|
||||||
:param slaves: a list of aliases of slave connections; each of these must
|
|
||||||
be a registered connection that has :attr:`is_slave` set to ``True``
|
|
||||||
:param username: username to authenticate with
|
|
||||||
:param password: password to authenticate with
|
|
||||||
:param kwargs: allow ad-hoc parameters to be passed into the pymongo driver
|
|
||||||
|
|
||||||
"""
|
|
||||||
global _connection_settings
|
|
||||||
|
|
||||||
# Handle uri style connections
|
|
||||||
if "://" in host:
|
|
||||||
uri_dict = uri_parser.parse_uri(host)
|
|
||||||
if uri_dict.get('database') is None:
|
|
||||||
raise ConnectionError("If using URI style connection include "\
|
|
||||||
"database name in string")
|
|
||||||
_connection_settings[alias] = {
|
|
||||||
'host': host,
|
|
||||||
'name': uri_dict.get('database'),
|
|
||||||
'username': uri_dict.get('username'),
|
|
||||||
'password': uri_dict.get('password')
|
|
||||||
}
|
|
||||||
_connection_settings[alias].update(kwargs)
|
|
||||||
return
|
|
||||||
|
|
||||||
_connection_settings[alias] = {
|
|
||||||
'name': name,
|
|
||||||
'host': host,
|
|
||||||
'port': port,
|
|
||||||
'is_slave': is_slave,
|
|
||||||
'slaves': slaves or [],
|
|
||||||
'username': username,
|
|
||||||
'password': password,
|
|
||||||
'read_preference': read_preference
|
|
||||||
}
|
|
||||||
_connection_settings[alias].update(kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def disconnect(alias=DEFAULT_CONNECTION_NAME):
|
|
||||||
global _connections
|
|
||||||
global _dbs
|
|
||||||
|
|
||||||
if alias in _connections:
|
|
||||||
get_connection(alias=alias).disconnect()
|
|
||||||
del _connections[alias]
|
|
||||||
if alias in _dbs:
|
|
||||||
del _dbs[alias]
|
|
||||||
|
|
||||||
|
|
||||||
def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
|
||||||
global _connections
|
|
||||||
# Connect to the database if not already connected
|
# Connect to the database if not already connected
|
||||||
if reconnect:
|
if _connection.get(identity) is None or reconnect:
|
||||||
disconnect(alias)
|
|
||||||
|
|
||||||
if alias not in _connections:
|
|
||||||
if alias not in _connection_settings:
|
|
||||||
msg = 'Connection with alias "%s" has not been defined' % alias
|
|
||||||
if alias == DEFAULT_CONNECTION_NAME:
|
|
||||||
msg = 'You have not defined a default connection'
|
|
||||||
raise ConnectionError(msg)
|
|
||||||
conn_settings = _connection_settings[alias].copy()
|
|
||||||
|
|
||||||
if hasattr(pymongo, 'version_tuple'): # Support for 2.1+
|
|
||||||
conn_settings.pop('name', None)
|
|
||||||
conn_settings.pop('slaves', None)
|
|
||||||
conn_settings.pop('is_slave', None)
|
|
||||||
conn_settings.pop('username', None)
|
|
||||||
conn_settings.pop('password', None)
|
|
||||||
else:
|
|
||||||
# Get all the slave connections
|
|
||||||
if 'slaves' in conn_settings:
|
|
||||||
slaves = []
|
|
||||||
for slave_alias in conn_settings['slaves']:
|
|
||||||
slaves.append(get_connection(slave_alias))
|
|
||||||
conn_settings['slaves'] = slaves
|
|
||||||
conn_settings.pop('read_preference', None)
|
|
||||||
|
|
||||||
connection_class = Connection
|
|
||||||
if 'replicaSet' in conn_settings:
|
|
||||||
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
|
|
||||||
# Discard port since it can't be used on ReplicaSetConnection
|
|
||||||
conn_settings.pop('port', None)
|
|
||||||
connection_class = ReplicaSetConnection
|
|
||||||
try:
|
try:
|
||||||
_connections[alias] = connection_class(**conn_settings)
|
_connection[identity] = Connection(**_connection_settings)
|
||||||
except Exception, e:
|
except:
|
||||||
raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e))
|
raise ConnectionError('Cannot connect to the database')
|
||||||
return _connections[alias]
|
return _connection[identity]
|
||||||
|
|
||||||
|
def _get_db(reconnect=False):
|
||||||
|
global _db, _connection
|
||||||
|
identity = get_identity()
|
||||||
|
# Connect if not already connected
|
||||||
|
if _connection.get(identity) is None or reconnect:
|
||||||
|
_connection[identity] = _get_connection(reconnect=reconnect)
|
||||||
|
|
||||||
def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
if _db.get(identity) is None or reconnect:
|
||||||
global _dbs
|
# _db_name will be None if the user hasn't called connect()
|
||||||
if reconnect:
|
if _db_name is None:
|
||||||
disconnect(alias)
|
raise ConnectionError('Not connected to the database')
|
||||||
|
|
||||||
if alias not in _dbs:
|
# Get DB from current connection and authenticate if necessary
|
||||||
conn = get_connection(alias)
|
_db[identity] = _connection[identity][_db_name]
|
||||||
conn_settings = _connection_settings[alias]
|
if _db_username and _db_password:
|
||||||
_dbs[alias] = conn[conn_settings['name']]
|
_db[identity].authenticate(_db_username, _db_password)
|
||||||
# Authenticate if necessary
|
|
||||||
if conn_settings['username'] and conn_settings['password']:
|
|
||||||
_dbs[alias].authenticate(conn_settings['username'],
|
|
||||||
conn_settings['password'])
|
|
||||||
return _dbs[alias]
|
|
||||||
|
|
||||||
|
return _db[identity]
|
||||||
|
|
||||||
def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
def get_identity():
|
||||||
"""Connect to the database specified by the 'db' argument.
|
identity = multiprocessing.current_process()._identity
|
||||||
|
identity = 0 if not identity else identity[0]
|
||||||
|
return identity
|
||||||
|
|
||||||
Connection settings may be provided here as well if the database is not
|
def connect(db, username=None, password=None, **kwargs):
|
||||||
running on the default port on localhost. If authentication is needed,
|
"""Connect to the database specified by the 'db' argument. Connection
|
||||||
provide username and password arguments as well.
|
settings may be provided here as well if the database is not running on
|
||||||
|
the default port on localhost. If authentication is needed, provide
|
||||||
Multiple databases are supported by using aliases. Provide a separate
|
username and password arguments as well.
|
||||||
`alias` to connect to a different instance of :program:`mongod`.
|
|
||||||
|
|
||||||
.. versionchanged:: 0.6 - added multiple database support.
|
|
||||||
"""
|
"""
|
||||||
global _connections
|
global _connection_settings, _db_name, _db_username, _db_password, _db
|
||||||
if alias not in _connections:
|
_connection_settings = dict(_connection_defaults, **kwargs)
|
||||||
register_connection(alias, db, **kwargs)
|
_db_name = db
|
||||||
|
_db_username = username
|
||||||
|
_db_password = password
|
||||||
|
return _get_db(reconnect=True)
|
||||||
|
|
||||||
return get_connection(alias)
|
|
||||||
|
|
||||||
# Support old naming convention
|
|
||||||
_get_connection = get_connection
|
|
||||||
_get_db = get_db
|
|
||||||
|
@@ -1,188 +0,0 @@
|
|||||||
from bson import DBRef, SON
|
|
||||||
|
|
||||||
from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document)
|
|
||||||
from fields import (ReferenceField, ListField, DictField, MapField)
|
|
||||||
from connection import get_db
|
|
||||||
from queryset import QuerySet
|
|
||||||
from document import Document
|
|
||||||
|
|
||||||
|
|
||||||
class DeReference(object):
|
|
||||||
|
|
||||||
def __call__(self, items, max_depth=1, instance=None, name=None):
|
|
||||||
"""
|
|
||||||
Cheaply dereferences the items to a set depth.
|
|
||||||
Also handles the convertion of complex data types.
|
|
||||||
|
|
||||||
:param items: The iterable (dict, list, queryset) to be dereferenced.
|
|
||||||
:param max_depth: The maximum depth to recurse to
|
|
||||||
:param instance: The owning instance used for tracking changes by
|
|
||||||
:class:`~mongoengine.base.ComplexBaseField`
|
|
||||||
:param name: The name of the field, used for tracking changes by
|
|
||||||
:class:`~mongoengine.base.ComplexBaseField`
|
|
||||||
:param get: A boolean determining if being called by __get__
|
|
||||||
"""
|
|
||||||
if items is None or isinstance(items, basestring):
|
|
||||||
return items
|
|
||||||
|
|
||||||
# cheapest way to convert a queryset to a list
|
|
||||||
# list(queryset) uses a count() query to determine length
|
|
||||||
if isinstance(items, QuerySet):
|
|
||||||
items = [i for i in items]
|
|
||||||
|
|
||||||
self.max_depth = max_depth
|
|
||||||
|
|
||||||
doc_type = None
|
|
||||||
if instance and instance._fields:
|
|
||||||
doc_type = instance._fields[name].field
|
|
||||||
|
|
||||||
if isinstance(doc_type, ReferenceField):
|
|
||||||
doc_type = doc_type.document_type
|
|
||||||
if all([i.__class__ == doc_type for i in items]):
|
|
||||||
return items
|
|
||||||
|
|
||||||
self.reference_map = self._find_references(items)
|
|
||||||
self.object_map = self._fetch_objects(doc_type=doc_type)
|
|
||||||
return self._attach_objects(items, 0, instance, name)
|
|
||||||
|
|
||||||
def _find_references(self, items, depth=0):
|
|
||||||
"""
|
|
||||||
Recursively finds all db references to be dereferenced
|
|
||||||
|
|
||||||
:param items: The iterable (dict, list, queryset)
|
|
||||||
:param depth: The current depth of recursion
|
|
||||||
"""
|
|
||||||
reference_map = {}
|
|
||||||
if not items or depth >= self.max_depth:
|
|
||||||
return reference_map
|
|
||||||
|
|
||||||
# Determine the iterator to use
|
|
||||||
if not hasattr(items, 'items'):
|
|
||||||
iterator = enumerate(items)
|
|
||||||
else:
|
|
||||||
iterator = items.iteritems()
|
|
||||||
|
|
||||||
# Recursively find dbreferences
|
|
||||||
depth += 1
|
|
||||||
for k, item in iterator:
|
|
||||||
if hasattr(item, '_fields'):
|
|
||||||
for field_name, field in item._fields.iteritems():
|
|
||||||
v = item._data.get(field_name, None)
|
|
||||||
if isinstance(v, (DBRef)):
|
|
||||||
reference_map.setdefault(field.document_type, []).append(v.id)
|
|
||||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
|
||||||
reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id)
|
|
||||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
|
||||||
field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
|
|
||||||
references = self._find_references(v, depth)
|
|
||||||
for key, refs in references.iteritems():
|
|
||||||
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
|
||||||
key = field_cls
|
|
||||||
reference_map.setdefault(key, []).extend(refs)
|
|
||||||
elif isinstance(item, (DBRef)):
|
|
||||||
reference_map.setdefault(item.collection, []).append(item.id)
|
|
||||||
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
|
||||||
reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id)
|
|
||||||
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
|
|
||||||
references = self._find_references(item, depth - 1)
|
|
||||||
for key, refs in references.iteritems():
|
|
||||||
reference_map.setdefault(key, []).extend(refs)
|
|
||||||
|
|
||||||
return reference_map
|
|
||||||
|
|
||||||
def _fetch_objects(self, doc_type=None):
|
|
||||||
"""Fetch all references and convert to their document objects
|
|
||||||
"""
|
|
||||||
object_map = {}
|
|
||||||
for col, dbrefs in self.reference_map.iteritems():
|
|
||||||
keys = object_map.keys()
|
|
||||||
refs = list(set([dbref for dbref in dbrefs if str(dbref) not in keys]))
|
|
||||||
if hasattr(col, 'objects'): # We have a document class for the refs
|
|
||||||
references = col.objects.in_bulk(refs)
|
|
||||||
for key, doc in references.iteritems():
|
|
||||||
object_map[key] = doc
|
|
||||||
else: # Generic reference: use the refs data to convert to document
|
|
||||||
if doc_type and not isinstance(doc_type, (ListField, DictField, MapField,) ):
|
|
||||||
references = doc_type._get_db()[col].find({'_id': {'$in': refs}})
|
|
||||||
for ref in references:
|
|
||||||
doc = doc_type._from_son(ref)
|
|
||||||
object_map[doc.id] = doc
|
|
||||||
else:
|
|
||||||
references = get_db()[col].find({'_id': {'$in': refs}})
|
|
||||||
for ref in references:
|
|
||||||
if '_cls' in ref:
|
|
||||||
doc = get_document(ref["_cls"])._from_son(ref)
|
|
||||||
else:
|
|
||||||
doc = doc_type._from_son(ref)
|
|
||||||
object_map[doc.id] = doc
|
|
||||||
return object_map
|
|
||||||
|
|
||||||
def _attach_objects(self, items, depth=0, instance=None, name=None):
|
|
||||||
"""
|
|
||||||
Recursively finds all db references to be dereferenced
|
|
||||||
|
|
||||||
:param items: The iterable (dict, list, queryset)
|
|
||||||
:param depth: The current depth of recursion
|
|
||||||
:param instance: The owning instance used for tracking changes by
|
|
||||||
:class:`~mongoengine.base.ComplexBaseField`
|
|
||||||
:param name: The name of the field, used for tracking changes by
|
|
||||||
:class:`~mongoengine.base.ComplexBaseField`
|
|
||||||
"""
|
|
||||||
if not items:
|
|
||||||
if isinstance(items, (BaseDict, BaseList)):
|
|
||||||
return items
|
|
||||||
|
|
||||||
if instance:
|
|
||||||
if isinstance(items, dict):
|
|
||||||
return BaseDict(items, instance, name)
|
|
||||||
else:
|
|
||||||
return BaseList(items, instance, name)
|
|
||||||
|
|
||||||
if isinstance(items, (dict, SON)):
|
|
||||||
if '_ref' in items:
|
|
||||||
return self.object_map.get(items['_ref'].id, items)
|
|
||||||
elif '_types' in items and '_cls' in items:
|
|
||||||
doc = get_document(items['_cls'])._from_son(items)
|
|
||||||
doc._data = self._attach_objects(doc._data, depth, doc, name)
|
|
||||||
return doc
|
|
||||||
|
|
||||||
if not hasattr(items, 'items'):
|
|
||||||
is_list = True
|
|
||||||
iterator = enumerate(items)
|
|
||||||
data = []
|
|
||||||
else:
|
|
||||||
is_list = False
|
|
||||||
iterator = items.iteritems()
|
|
||||||
data = {}
|
|
||||||
|
|
||||||
depth += 1
|
|
||||||
for k, v in iterator:
|
|
||||||
if is_list:
|
|
||||||
data.append(v)
|
|
||||||
else:
|
|
||||||
data[k] = v
|
|
||||||
|
|
||||||
if k in self.object_map:
|
|
||||||
data[k] = self.object_map[k]
|
|
||||||
elif hasattr(v, '_fields'):
|
|
||||||
for field_name, field in v._fields.iteritems():
|
|
||||||
v = data[k]._data.get(field_name, None)
|
|
||||||
if isinstance(v, (DBRef)):
|
|
||||||
data[k]._data[field_name] = self.object_map.get(v.id, v)
|
|
||||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
|
||||||
data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v)
|
|
||||||
elif isinstance(v, dict) and depth <= self.max_depth:
|
|
||||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
|
||||||
elif isinstance(v, (list, tuple)) and depth <= self.max_depth:
|
|
||||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
|
||||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
|
||||||
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name)
|
|
||||||
elif hasattr(v, 'id'):
|
|
||||||
data[k] = self.object_map.get(v.id, v)
|
|
||||||
|
|
||||||
if instance and name:
|
|
||||||
if is_list:
|
|
||||||
return BaseList(data, instance, name)
|
|
||||||
return BaseDict(data, instance, name)
|
|
||||||
depth += 1
|
|
||||||
return data
|
|
@@ -1,50 +1,36 @@
|
|||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
|
|
||||||
|
from django.utils.hashcompat import md5_constructor, sha_constructor
|
||||||
from django.utils.encoding import smart_str
|
from django.utils.encoding import smart_str
|
||||||
from django.contrib.auth.models import AnonymousUser
|
from django.contrib.auth.models import AnonymousUser
|
||||||
from django.contrib.auth.hashers import check_password, make_password
|
|
||||||
from django.utils.translation import ugettext_lazy as _
|
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
REDIRECT_FIELD_NAME = 'next'
|
REDIRECT_FIELD_NAME = 'next'
|
||||||
|
|
||||||
|
def get_hexdigest(algorithm, salt, raw_password):
|
||||||
|
raw_password, salt = smart_str(raw_password), smart_str(salt)
|
||||||
|
if algorithm == 'md5':
|
||||||
|
return md5_constructor(salt + raw_password).hexdigest()
|
||||||
|
elif algorithm == 'sha1':
|
||||||
|
return sha_constructor(salt + raw_password).hexdigest()
|
||||||
|
raise ValueError('Got unknown password algorithm type in password')
|
||||||
|
|
||||||
|
|
||||||
class User(Document):
|
class User(Document):
|
||||||
"""A User document that aims to mirror most of the API specified by Django
|
"""A User document that aims to mirror most of the API specified by Django
|
||||||
at http://docs.djangoproject.com/en/dev/topics/auth/#users
|
at http://docs.djangoproject.com/en/dev/topics/auth/#users
|
||||||
"""
|
"""
|
||||||
username = StringField(max_length=30, required=True,
|
username = StringField(max_length=30, required=True)
|
||||||
verbose_name=_('username'),
|
first_name = StringField(max_length=30)
|
||||||
help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters"))
|
last_name = StringField(max_length=30)
|
||||||
|
email = StringField()
|
||||||
first_name = StringField(max_length=30,
|
password = StringField(max_length=128)
|
||||||
verbose_name=_('first name'))
|
is_staff = BooleanField(default=False)
|
||||||
|
is_active = BooleanField(default=True)
|
||||||
last_name = StringField(max_length=30,
|
is_superuser = BooleanField(default=False)
|
||||||
verbose_name=_('last name'))
|
last_login = DateTimeField(default=datetime.datetime.now)
|
||||||
email = EmailField(verbose_name=_('e-mail address'))
|
date_joined = DateTimeField(default=datetime.datetime.now)
|
||||||
password = StringField(max_length=128,
|
|
||||||
verbose_name=_('password'),
|
|
||||||
help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
|
|
||||||
is_staff = BooleanField(default=False,
|
|
||||||
verbose_name=_('staff status'),
|
|
||||||
help_text=_("Designates whether the user can log into this admin site."))
|
|
||||||
is_active = BooleanField(default=True,
|
|
||||||
verbose_name=_('active'),
|
|
||||||
help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts."))
|
|
||||||
is_superuser = BooleanField(default=False,
|
|
||||||
verbose_name=_('superuser status'),
|
|
||||||
help_text=_("Designates that this user has all permissions without explicitly assigning them."))
|
|
||||||
last_login = DateTimeField(default=datetime.datetime.now,
|
|
||||||
verbose_name=_('last login'))
|
|
||||||
date_joined = DateTimeField(default=datetime.datetime.now,
|
|
||||||
verbose_name=_('date joined'))
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
'indexes': [
|
|
||||||
{'fields': ['username'], 'unique': True}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return self.username
|
return self.username
|
||||||
@@ -66,7 +52,11 @@ class User(Document):
|
|||||||
assigning to :attr:`~mongoengine.django.auth.User.password` as the
|
assigning to :attr:`~mongoengine.django.auth.User.password` as the
|
||||||
password is hashed before storage.
|
password is hashed before storage.
|
||||||
"""
|
"""
|
||||||
self.password = make_password(raw_password)
|
from random import random
|
||||||
|
algo = 'sha1'
|
||||||
|
salt = get_hexdigest(algo, str(random()), str(random()))[:5]
|
||||||
|
hash = get_hexdigest(algo, salt, raw_password)
|
||||||
|
self.password = '%s$%s$%s' % (algo, salt, hash)
|
||||||
self.save()
|
self.save()
|
||||||
return self
|
return self
|
||||||
|
|
||||||
@@ -76,7 +66,8 @@ class User(Document):
|
|||||||
:attr:`~mongoengine.django.auth.User.password` as the password is
|
:attr:`~mongoengine.django.auth.User.password` as the password is
|
||||||
hashed before storage.
|
hashed before storage.
|
||||||
"""
|
"""
|
||||||
return check_password(raw_password, self.password)
|
algo, salt, hash = self.password.split('$')
|
||||||
|
return hash == get_hexdigest(algo, salt, raw_password)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_user(cls, username, password, email=None):
|
def create_user(cls, username, password, email=None):
|
||||||
@@ -95,7 +86,7 @@ class User(Document):
|
|||||||
else:
|
else:
|
||||||
email = '@'.join([email_name, domain_part.lower()])
|
email = '@'.join([email_name, domain_part.lower()])
|
||||||
|
|
||||||
user = cls(username=username, email=email, date_joined=now)
|
user = User(username=username, email=email, date_joined=now)
|
||||||
user.set_password(password)
|
user.set_password(password)
|
||||||
user.save()
|
user.save()
|
||||||
return user
|
return user
|
||||||
@@ -108,10 +99,6 @@ class MongoEngineBackend(object):
|
|||||||
"""Authenticate using MongoEngine and mongoengine.django.auth.User.
|
"""Authenticate using MongoEngine and mongoengine.django.auth.User.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
supports_object_permissions = False
|
|
||||||
supports_anonymous_user = False
|
|
||||||
supports_inactive_user = False
|
|
||||||
|
|
||||||
def authenticate(self, username=None, password=None):
|
def authenticate(self, username=None, password=None):
|
||||||
user = User.objects(username=username).first()
|
user = User.objects(username=username).first()
|
||||||
if user:
|
if user:
|
||||||
|
@@ -5,22 +5,16 @@ from django.utils.encoding import force_unicode
|
|||||||
from mongoengine.document import Document
|
from mongoengine.document import Document
|
||||||
from mongoengine import fields
|
from mongoengine import fields
|
||||||
from mongoengine.queryset import OperationError
|
from mongoengine.queryset import OperationError
|
||||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME
|
|
||||||
from django.conf import settings
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
MONGOENGINE_SESSION_DB_ALIAS = getattr(
|
|
||||||
settings, 'MONGOENGINE_SESSION_DB_ALIAS',
|
|
||||||
DEFAULT_CONNECTION_NAME)
|
|
||||||
|
|
||||||
class MongoSession(Document):
|
class MongoSession(Document):
|
||||||
session_key = fields.StringField(primary_key=True, max_length=40)
|
session_key = fields.StringField(primary_key=True, max_length=40)
|
||||||
session_data = fields.StringField()
|
session_data = fields.StringField()
|
||||||
expire_date = fields.DateTimeField()
|
expire_date = fields.DateTimeField()
|
||||||
|
|
||||||
meta = {'collection': 'django_session',
|
meta = {'collection': 'django_session', 'allow_inheritance': False}
|
||||||
'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
|
|
||||||
'allow_inheritance': False}
|
|
||||||
|
|
||||||
|
|
||||||
class SessionStore(SessionBase):
|
class SessionStore(SessionBase):
|
||||||
@@ -41,7 +35,7 @@ class SessionStore(SessionBase):
|
|||||||
|
|
||||||
def create(self):
|
def create(self):
|
||||||
while True:
|
while True:
|
||||||
self._session_key = self._get_new_session_key()
|
self.session_key = self._get_new_session_key()
|
||||||
try:
|
try:
|
||||||
self.save(must_create=True)
|
self.save(must_create=True)
|
||||||
except CreateError:
|
except CreateError:
|
||||||
@@ -51,9 +45,7 @@ class SessionStore(SessionBase):
|
|||||||
return
|
return
|
||||||
|
|
||||||
def save(self, must_create=False):
|
def save(self, must_create=False):
|
||||||
if self._session_key is None:
|
s = MongoSession(session_key=self.session_key)
|
||||||
self.create()
|
|
||||||
s = MongoSession(session_key=self._session_key)
|
|
||||||
s.session_data = self.encode(self._get_session(no_load=must_create))
|
s.session_data = self.encode(self._get_session(no_load=must_create))
|
||||||
s.expire_date = self.get_expiry_date()
|
s.expire_date = self.get_expiry_date()
|
||||||
try:
|
try:
|
||||||
|
@@ -1,7 +1,6 @@
|
|||||||
from django.http import Http404
|
from django.http import Http404
|
||||||
from mongoengine.queryset import QuerySet
|
from mongoengine.queryset import QuerySet
|
||||||
from mongoengine.base import BaseDocument
|
from mongoengine.base import BaseDocument
|
||||||
from mongoengine.base import ValidationError
|
|
||||||
|
|
||||||
def _get_queryset(cls):
|
def _get_queryset(cls):
|
||||||
"""Inspired by django.shortcuts.*"""
|
"""Inspired by django.shortcuts.*"""
|
||||||
@@ -26,7 +25,7 @@ def get_document_or_404(cls, *args, **kwargs):
|
|||||||
queryset = _get_queryset(cls)
|
queryset = _get_queryset(cls)
|
||||||
try:
|
try:
|
||||||
return queryset.get(*args, **kwargs)
|
return queryset.get(*args, **kwargs)
|
||||||
except (queryset._document.DoesNotExist, ValidationError):
|
except queryset._document.DoesNotExist:
|
||||||
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
raise Http404('No %s matches the given query.' % queryset._document._class_name)
|
||||||
|
|
||||||
def get_list_or_404(cls, *args, **kwargs):
|
def get_list_or_404(cls, *args, **kwargs):
|
||||||
|
@@ -1,18 +1,12 @@
|
|||||||
import pymongo
|
|
||||||
from bson.dbref import DBRef
|
|
||||||
|
|
||||||
from mongoengine import signals
|
|
||||||
from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument,
|
from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument,
|
||||||
BaseDict, BaseList)
|
ValidationError)
|
||||||
from queryset import OperationError
|
from queryset import OperationError
|
||||||
from connection import get_db, DEFAULT_CONNECTION_NAME
|
from connection import _get_db
|
||||||
|
|
||||||
__all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument',
|
import pymongo
|
||||||
'DynamicEmbeddedDocument', 'OperationError', 'InvalidCollectionError']
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidCollectionError(Exception):
|
__all__ = ['Document', 'EmbeddedDocument', 'ValidationError', 'OperationError']
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class EmbeddedDocument(BaseDocument):
|
class EmbeddedDocument(BaseDocument):
|
||||||
@@ -24,21 +18,6 @@ class EmbeddedDocument(BaseDocument):
|
|||||||
|
|
||||||
__metaclass__ = DocumentMetaclass
|
__metaclass__ = DocumentMetaclass
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super(EmbeddedDocument, self).__init__(*args, **kwargs)
|
|
||||||
self._changed_fields = []
|
|
||||||
|
|
||||||
def __delattr__(self, *args, **kwargs):
|
|
||||||
"""Handle deletions of fields"""
|
|
||||||
field_name = args[0]
|
|
||||||
if field_name in self._fields:
|
|
||||||
default = self._fields[field_name].default
|
|
||||||
if callable(default):
|
|
||||||
default = default()
|
|
||||||
setattr(self, field_name, default)
|
|
||||||
else:
|
|
||||||
super(EmbeddedDocument, self).__delattr__(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class Document(BaseDocument):
|
class Document(BaseDocument):
|
||||||
"""The base class used for defining the structure and properties of
|
"""The base class used for defining the structure and properties of
|
||||||
@@ -73,65 +52,11 @@ class Document(BaseDocument):
|
|||||||
dictionary. The value should be a list of field names or tuples of field
|
dictionary. The value should be a list of field names or tuples of field
|
||||||
names. Index direction may be specified by prefixing the field names with
|
names. Index direction may be specified by prefixing the field names with
|
||||||
a **+** or **-** sign.
|
a **+** or **-** sign.
|
||||||
|
|
||||||
By default, _types will be added to the start of every index (that
|
|
||||||
doesn't contain a list) if allow_inheritence is True. This can be
|
|
||||||
disabled by either setting types to False on the specific index or
|
|
||||||
by setting index_types to False on the meta dictionary for the document.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__metaclass__ = TopLevelDocumentMetaclass
|
__metaclass__ = TopLevelDocumentMetaclass
|
||||||
|
|
||||||
@apply
|
def save(self, safe=True, force_insert=False, validate=True):
|
||||||
def pk():
|
|
||||||
"""Primary key alias
|
|
||||||
"""
|
|
||||||
def fget(self):
|
|
||||||
return getattr(self, self._meta['id_field'])
|
|
||||||
def fset(self, value):
|
|
||||||
return setattr(self, self._meta['id_field'], value)
|
|
||||||
return property(fget, fset)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _get_db(cls):
|
|
||||||
"""Some Model using other db_alias"""
|
|
||||||
return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME ))
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _get_collection(cls):
|
|
||||||
"""Returns the collection for the document."""
|
|
||||||
if not hasattr(cls, '_collection') or cls._collection is None:
|
|
||||||
db = cls._get_db()
|
|
||||||
collection_name = cls._get_collection_name()
|
|
||||||
# Create collection as a capped collection if specified
|
|
||||||
if cls._meta['max_size'] or cls._meta['max_documents']:
|
|
||||||
# Get max document limit and max byte size from meta
|
|
||||||
max_size = cls._meta['max_size'] or 10000000 # 10MB default
|
|
||||||
max_documents = cls._meta['max_documents']
|
|
||||||
|
|
||||||
if collection_name in db.collection_names():
|
|
||||||
cls._collection = db[collection_name]
|
|
||||||
# The collection already exists, check if its capped
|
|
||||||
# options match the specified capped options
|
|
||||||
options = cls._collection.options()
|
|
||||||
if options.get('max') != max_documents or \
|
|
||||||
options.get('size') != max_size:
|
|
||||||
msg = ('Cannot create collection "%s" as a capped '
|
|
||||||
'collection as it already exists') % cls._collection
|
|
||||||
raise InvalidCollectionError(msg)
|
|
||||||
else:
|
|
||||||
# Create the collection as a capped collection
|
|
||||||
opts = {'capped': True, 'size': max_size}
|
|
||||||
if max_documents:
|
|
||||||
opts['max'] = max_documents
|
|
||||||
cls._collection = db.create_collection(
|
|
||||||
collection_name, **opts
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
cls._collection = db[collection_name]
|
|
||||||
return cls._collection
|
|
||||||
|
|
||||||
def save(self, safe=True, force_insert=False, validate=True, write_options=None,
|
|
||||||
cascade=None, cascade_kwargs=None, _refs=None):
|
|
||||||
"""Save the :class:`~mongoengine.Document` to the database. If the
|
"""Save the :class:`~mongoengine.Document` to the database. If the
|
||||||
document already exists, it will be updated, otherwise it will be
|
document already exists, it will be updated, otherwise it will be
|
||||||
created.
|
created.
|
||||||
@@ -142,79 +67,17 @@ class Document(BaseDocument):
|
|||||||
:param safe: check if the operation succeeded before returning
|
:param safe: check if the operation succeeded before returning
|
||||||
:param force_insert: only try to create a new document, don't allow
|
:param force_insert: only try to create a new document, don't allow
|
||||||
updates of existing documents
|
updates of existing documents
|
||||||
:param validate: validates the document; set to ``False`` to skip.
|
:param validate: validates the document; set to ``False`` for skiping
|
||||||
:param write_options: Extra keyword arguments are passed down to
|
|
||||||
:meth:`~pymongo.collection.Collection.save` OR
|
|
||||||
:meth:`~pymongo.collection.Collection.insert`
|
|
||||||
which will be used as options for the resultant ``getLastError`` command.
|
|
||||||
For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers
|
|
||||||
have recorded the write and will force an fsync on each server being written to.
|
|
||||||
:param cascade: Sets the flag for cascading saves. You can set a default by setting
|
|
||||||
"cascade" in the document __meta__
|
|
||||||
:param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves
|
|
||||||
:param _refs: A list of processed references used in cascading saves
|
|
||||||
|
|
||||||
.. versionchanged:: 0.5
|
|
||||||
In existing documents it only saves changed fields using set / unset
|
|
||||||
Saves are cascaded and any :class:`~bson.dbref.DBRef` objects
|
|
||||||
that have changes are saved as well.
|
|
||||||
.. versionchanged:: 0.6
|
|
||||||
Cascade saves are optional = defaults to True, if you want fine grain
|
|
||||||
control then you can turn off using document meta['cascade'] = False
|
|
||||||
Also you can pass different kwargs to the cascade save using cascade_kwargs
|
|
||||||
which overwrites the existing kwargs with custom values
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
signals.pre_save.send(self.__class__, document=self)
|
|
||||||
|
|
||||||
if validate:
|
if validate:
|
||||||
self.validate()
|
self.validate()
|
||||||
|
|
||||||
if not write_options:
|
|
||||||
write_options = {}
|
|
||||||
|
|
||||||
doc = self.to_mongo()
|
doc = self.to_mongo()
|
||||||
|
|
||||||
created = force_insert or '_id' not in doc
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
collection = self.__class__.objects._collection
|
collection = self.__class__.objects._collection
|
||||||
if created:
|
|
||||||
if force_insert:
|
if force_insert:
|
||||||
object_id = collection.insert(doc, safe=safe, **write_options)
|
object_id = collection.insert(doc, safe=safe)
|
||||||
else:
|
else:
|
||||||
object_id = collection.save(doc, safe=safe, **write_options)
|
object_id = collection.save(doc, safe=safe)
|
||||||
else:
|
|
||||||
object_id = doc['_id']
|
|
||||||
updates, removals = self._delta()
|
|
||||||
|
|
||||||
# Need to add shard key to query, or you get an error
|
|
||||||
select_dict = {'_id': object_id}
|
|
||||||
shard_key = self.__class__._meta.get('shard_key', tuple())
|
|
||||||
for k in shard_key:
|
|
||||||
actual_key = self._db_field_map.get(k, k)
|
|
||||||
select_dict[actual_key] = doc[actual_key]
|
|
||||||
|
|
||||||
upsert = self._created
|
|
||||||
if updates:
|
|
||||||
collection.update(select_dict, {"$set": updates}, upsert=upsert, safe=safe, **write_options)
|
|
||||||
if removals:
|
|
||||||
collection.update(select_dict, {"$unset": removals}, upsert=upsert, safe=safe, **write_options)
|
|
||||||
|
|
||||||
cascade = self._meta.get('cascade', True) if cascade is None else cascade
|
|
||||||
if cascade:
|
|
||||||
kwargs = {
|
|
||||||
"safe": safe,
|
|
||||||
"force_insert": force_insert,
|
|
||||||
"validate": validate,
|
|
||||||
"write_options": write_options,
|
|
||||||
"cascade": cascade
|
|
||||||
}
|
|
||||||
if cascade_kwargs: # Allow granular control over cascades
|
|
||||||
kwargs.update(cascade_kwargs)
|
|
||||||
kwargs['_refs'] = _refs
|
|
||||||
self.cascade_save(**kwargs)
|
|
||||||
|
|
||||||
except pymongo.errors.OperationFailure, err:
|
except pymongo.errors.OperationFailure, err:
|
||||||
message = 'Could not save document (%s)'
|
message = 'Could not save document (%s)'
|
||||||
if u'duplicate key' in unicode(err):
|
if u'duplicate key' in unicode(err):
|
||||||
@@ -223,168 +86,37 @@ class Document(BaseDocument):
|
|||||||
id_field = self._meta['id_field']
|
id_field = self._meta['id_field']
|
||||||
self[id_field] = self._fields[id_field].to_python(object_id)
|
self[id_field] = self._fields[id_field].to_python(object_id)
|
||||||
|
|
||||||
self._changed_fields = []
|
|
||||||
self._created = False
|
|
||||||
signals.post_save.send(self.__class__, document=self, created=created)
|
|
||||||
|
|
||||||
def cascade_save(self, *args, **kwargs):
|
|
||||||
"""Recursively saves any references / generic references on an object"""
|
|
||||||
from fields import ReferenceField, GenericReferenceField
|
|
||||||
_refs = kwargs.get('_refs', []) or []
|
|
||||||
for name, cls in self._fields.items():
|
|
||||||
if not isinstance(cls, (ReferenceField, GenericReferenceField)):
|
|
||||||
continue
|
|
||||||
ref = getattr(self, name)
|
|
||||||
if not ref:
|
|
||||||
continue
|
|
||||||
ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
|
|
||||||
if ref and ref_id not in _refs:
|
|
||||||
_refs.append(ref_id)
|
|
||||||
kwargs["_refs"] = _refs
|
|
||||||
ref.save(**kwargs)
|
|
||||||
ref._changed_fields = []
|
|
||||||
|
|
||||||
def update(self, **kwargs):
|
|
||||||
"""Performs an update on the :class:`~mongoengine.Document`
|
|
||||||
A convenience wrapper to :meth:`~mongoengine.QuerySet.update`.
|
|
||||||
|
|
||||||
Raises :class:`OperationError` if called on an object that has not yet
|
|
||||||
been saved.
|
|
||||||
"""
|
|
||||||
if not self.pk:
|
|
||||||
raise OperationError('attempt to update a document not yet saved')
|
|
||||||
|
|
||||||
# Need to add shard key to query, or you get an error
|
|
||||||
select_dict = {'pk': self.pk}
|
|
||||||
shard_key = self.__class__._meta.get('shard_key', tuple())
|
|
||||||
for k in shard_key:
|
|
||||||
select_dict[k] = getattr(self, k)
|
|
||||||
return self.__class__.objects(**select_dict).update_one(**kwargs)
|
|
||||||
|
|
||||||
def delete(self, safe=False):
|
def delete(self, safe=False):
|
||||||
"""Delete the :class:`~mongoengine.Document` from the database. This
|
"""Delete the :class:`~mongoengine.Document` from the database. This
|
||||||
will only take effect if the document has been previously saved.
|
will only take effect if the document has been previously saved.
|
||||||
|
|
||||||
:param safe: check if the operation succeeded before returning
|
:param safe: check if the operation succeeded before returning
|
||||||
"""
|
"""
|
||||||
signals.pre_delete.send(self.__class__, document=self)
|
id_field = self._meta['id_field']
|
||||||
|
object_id = self._fields[id_field].to_mongo(self[id_field])
|
||||||
try:
|
try:
|
||||||
self.__class__.objects(pk=self.pk).delete(safe=safe)
|
self.__class__.objects(**{id_field: object_id}).delete(safe=safe)
|
||||||
except pymongo.errors.OperationFailure, err:
|
except pymongo.errors.OperationFailure, err:
|
||||||
message = u'Could not delete document (%s)' % err.message
|
message = u'Could not delete document (%s)' % err.message
|
||||||
raise OperationError(message)
|
raise OperationError(message)
|
||||||
|
|
||||||
signals.post_delete.send(self.__class__, document=self)
|
def reload(self):
|
||||||
|
|
||||||
def select_related(self, max_depth=1):
|
|
||||||
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects to
|
|
||||||
a maximum depth in order to cut down the number queries to mongodb.
|
|
||||||
|
|
||||||
.. versionadded:: 0.5
|
|
||||||
"""
|
|
||||||
from dereference import DeReference
|
|
||||||
self._data = DeReference()(self._data, max_depth)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def reload(self, max_depth=1):
|
|
||||||
"""Reloads all attributes from the database.
|
"""Reloads all attributes from the database.
|
||||||
|
|
||||||
.. versionadded:: 0.1.2
|
.. versionadded:: 0.1.2
|
||||||
.. versionchanged:: 0.6 Now chainable
|
|
||||||
"""
|
"""
|
||||||
id_field = self._meta['id_field']
|
id_field = self._meta['id_field']
|
||||||
obj = self.__class__.objects(
|
obj = self.__class__.objects(**{id_field: self[id_field]}).first()
|
||||||
**{id_field: self[id_field]}
|
|
||||||
).first().select_related(max_depth=max_depth)
|
|
||||||
for field in self._fields:
|
for field in self._fields:
|
||||||
setattr(self, field, self._reload(field, obj[field]))
|
setattr(self, field, obj[field])
|
||||||
if self._dynamic:
|
|
||||||
for name in self._dynamic_fields.keys():
|
|
||||||
setattr(self, name, self._reload(name, obj._data[name]))
|
|
||||||
self._changed_fields = obj._changed_fields
|
|
||||||
return obj
|
|
||||||
|
|
||||||
def _reload(self, key, value):
|
|
||||||
"""Used by :meth:`~mongoengine.Document.reload` to ensure the
|
|
||||||
correct instance is linked to self.
|
|
||||||
"""
|
|
||||||
if isinstance(value, BaseDict):
|
|
||||||
value = [(k, self._reload(k, v)) for k, v in value.items()]
|
|
||||||
value = BaseDict(value, self, key)
|
|
||||||
elif isinstance(value, BaseList):
|
|
||||||
value = [self._reload(key, v) for v in value]
|
|
||||||
value = BaseList(value, self, key)
|
|
||||||
elif isinstance(value, (EmbeddedDocument, DynamicEmbeddedDocument)):
|
|
||||||
value._changed_fields = []
|
|
||||||
return value
|
|
||||||
|
|
||||||
def to_dbref(self):
|
|
||||||
"""Returns an instance of :class:`~bson.dbref.DBRef` useful in
|
|
||||||
`__raw__` queries."""
|
|
||||||
if not self.pk:
|
|
||||||
msg = "Only saved documents can have a valid dbref"
|
|
||||||
raise OperationError(msg)
|
|
||||||
return DBRef(self.__class__._get_collection_name(), self.pk)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def register_delete_rule(cls, document_cls, field_name, rule):
|
|
||||||
"""This method registers the delete rules to apply when removing this
|
|
||||||
object.
|
|
||||||
"""
|
|
||||||
cls._meta['delete_rules'][(document_cls, field_name)] = rule
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def drop_collection(cls):
|
def drop_collection(cls):
|
||||||
"""Drops the entire collection associated with this
|
"""Drops the entire collection associated with this
|
||||||
:class:`~mongoengine.Document` type from the database.
|
:class:`~mongoengine.Document` type from the database.
|
||||||
"""
|
"""
|
||||||
from mongoengine.queryset import QuerySet
|
db = _get_db()
|
||||||
db = cls._get_db()
|
db.drop_collection(cls._meta['collection'])
|
||||||
db.drop_collection(cls._get_collection_name())
|
|
||||||
QuerySet._reset_already_indexed(cls)
|
|
||||||
|
|
||||||
|
|
||||||
class DynamicDocument(Document):
|
|
||||||
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
|
|
||||||
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
|
|
||||||
way as an ordinary document but has expando style properties. Any data
|
|
||||||
passed or set against the :class:`~mongoengine.DynamicDocument` that is
|
|
||||||
not a field is automatically converted into a
|
|
||||||
:class:`~mongoengine.BaseDynamicField` and data can be attributed to that
|
|
||||||
field.
|
|
||||||
|
|
||||||
..note::
|
|
||||||
|
|
||||||
There is one caveat on Dynamic Documents: fields cannot start with `_`
|
|
||||||
"""
|
|
||||||
__metaclass__ = TopLevelDocumentMetaclass
|
|
||||||
_dynamic = True
|
|
||||||
|
|
||||||
def __delattr__(self, *args, **kwargs):
|
|
||||||
"""Deletes the attribute by setting to None and allowing _delta to unset
|
|
||||||
it"""
|
|
||||||
field_name = args[0]
|
|
||||||
if field_name in self._dynamic_fields:
|
|
||||||
setattr(self, field_name, None)
|
|
||||||
else:
|
|
||||||
super(DynamicDocument, self).__delattr__(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class DynamicEmbeddedDocument(EmbeddedDocument):
|
|
||||||
"""A Dynamic Embedded Document class allowing flexible, expandable and
|
|
||||||
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
|
|
||||||
information about dynamic documents.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__metaclass__ = DocumentMetaclass
|
|
||||||
_dynamic = True
|
|
||||||
|
|
||||||
def __delattr__(self, *args, **kwargs):
|
|
||||||
"""Deletes the attribute by setting to None and allowing _delta to unset
|
|
||||||
it"""
|
|
||||||
field_name = args[0]
|
|
||||||
setattr(self, field_name, None)
|
|
||||||
|
|
||||||
|
|
||||||
class MapReduceDocument(object):
|
class MapReduceDocument(object):
|
||||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,46 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
__all__ = ['pre_init', 'post_init', 'pre_save', 'post_save',
|
|
||||||
'pre_delete', 'post_delete']
|
|
||||||
|
|
||||||
signals_available = False
|
|
||||||
try:
|
|
||||||
from blinker import Namespace
|
|
||||||
signals_available = True
|
|
||||||
except ImportError:
|
|
||||||
class Namespace(object):
|
|
||||||
def signal(self, name, doc=None):
|
|
||||||
return _FakeSignal(name, doc)
|
|
||||||
|
|
||||||
class _FakeSignal(object):
|
|
||||||
"""If blinker is unavailable, create a fake class with the same
|
|
||||||
interface that allows sending of signals but will fail with an
|
|
||||||
error on anything else. Instead of doing anything on send, it
|
|
||||||
will just ignore the arguments and do nothing instead.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, name, doc=None):
|
|
||||||
self.name = name
|
|
||||||
self.__doc__ = doc
|
|
||||||
|
|
||||||
def _fail(self, *args, **kwargs):
|
|
||||||
raise RuntimeError('signalling support is unavailable '
|
|
||||||
'because the blinker library is '
|
|
||||||
'not installed.')
|
|
||||||
send = lambda *a, **kw: None
|
|
||||||
connect = disconnect = has_receivers_for = receivers_for = \
|
|
||||||
temporarily_connected_to = _fail
|
|
||||||
del _fail
|
|
||||||
|
|
||||||
# the namespace for code signals. If you are not mongoengine code, do
|
|
||||||
# not put signals in here. Create your own namespace instead.
|
|
||||||
_signals = Namespace()
|
|
||||||
|
|
||||||
pre_init = _signals.signal('pre_init')
|
|
||||||
post_init = _signals.signal('post_init')
|
|
||||||
pre_save = _signals.signal('pre_save')
|
|
||||||
post_save = _signals.signal('post_save')
|
|
||||||
pre_delete = _signals.signal('pre_delete')
|
|
||||||
post_delete = _signals.signal('post_delete')
|
|
||||||
pre_bulk_insert = _signals.signal('pre_bulk_insert')
|
|
||||||
post_bulk_insert = _signals.signal('post_bulk_insert')
|
|
@@ -1,59 +0,0 @@
|
|||||||
from mongoengine.connection import get_db
|
|
||||||
|
|
||||||
|
|
||||||
class query_counter(object):
|
|
||||||
""" Query_counter contextmanager to get the number of queries. """
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
""" Construct the query_counter. """
|
|
||||||
self.counter = 0
|
|
||||||
self.db = get_db()
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
""" On every with block we need to drop the profile collection. """
|
|
||||||
self.db.set_profiling_level(0)
|
|
||||||
self.db.system.profile.drop()
|
|
||||||
self.db.set_profiling_level(2)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, t, value, traceback):
|
|
||||||
""" Reset the profiling level. """
|
|
||||||
self.db.set_profiling_level(0)
|
|
||||||
|
|
||||||
def __eq__(self, value):
|
|
||||||
""" == Compare querycounter. """
|
|
||||||
return value == self._get_count()
|
|
||||||
|
|
||||||
def __ne__(self, value):
|
|
||||||
""" != Compare querycounter. """
|
|
||||||
return not self.__eq__(value)
|
|
||||||
|
|
||||||
def __lt__(self, value):
|
|
||||||
""" < Compare querycounter. """
|
|
||||||
return self._get_count() < value
|
|
||||||
|
|
||||||
def __le__(self, value):
|
|
||||||
""" <= Compare querycounter. """
|
|
||||||
return self._get_count() <= value
|
|
||||||
|
|
||||||
def __gt__(self, value):
|
|
||||||
""" > Compare querycounter. """
|
|
||||||
return self._get_count() > value
|
|
||||||
|
|
||||||
def __ge__(self, value):
|
|
||||||
""" >= Compare querycounter. """
|
|
||||||
return self._get_count() >= value
|
|
||||||
|
|
||||||
def __int__(self):
|
|
||||||
""" int representation. """
|
|
||||||
return self._get_count()
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
""" repr query_counter as the number of queries. """
|
|
||||||
return u"%s" % self._get_count()
|
|
||||||
|
|
||||||
def _get_count(self):
|
|
||||||
""" Get the number of queries. """
|
|
||||||
count = self.db.system.profile.find().count() - self.counter
|
|
||||||
self.counter += 1
|
|
||||||
return count
|
|
@@ -1,62 +0,0 @@
|
|||||||
# sitelib for noarch packages, sitearch for others (remove the unneeded one)
|
|
||||||
%{!?python_sitelib: %global python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")}
|
|
||||||
%{!?python_sitearch: %global python_sitearch %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(1))")}
|
|
||||||
|
|
||||||
%define srcname mongoengine
|
|
||||||
|
|
||||||
Name: python-%{srcname}
|
|
||||||
Version: 0.6.3
|
|
||||||
Release: 1%{?dist}
|
|
||||||
Summary: A Python Document-Object Mapper for working with MongoDB
|
|
||||||
|
|
||||||
Group: Development/Libraries
|
|
||||||
License: MIT
|
|
||||||
URL: https://github.com/MongoEngine/mongoengine
|
|
||||||
Source0: %{srcname}-%{version}.tar.bz2
|
|
||||||
|
|
||||||
BuildRequires: python-devel
|
|
||||||
BuildRequires: python-setuptools
|
|
||||||
|
|
||||||
Requires: mongodb
|
|
||||||
Requires: pymongo
|
|
||||||
Requires: python-blinker
|
|
||||||
Requires: python-imaging
|
|
||||||
|
|
||||||
|
|
||||||
%description
|
|
||||||
MongoEngine is an ORM-like layer on top of PyMongo.
|
|
||||||
|
|
||||||
%prep
|
|
||||||
%setup -q -n %{srcname}-%{version}
|
|
||||||
|
|
||||||
|
|
||||||
%build
|
|
||||||
# Remove CFLAGS=... for noarch packages (unneeded)
|
|
||||||
CFLAGS="$RPM_OPT_FLAGS" %{__python} setup.py build
|
|
||||||
|
|
||||||
|
|
||||||
%install
|
|
||||||
rm -rf $RPM_BUILD_ROOT
|
|
||||||
%{__python} setup.py install -O1 --skip-build --root $RPM_BUILD_ROOT
|
|
||||||
|
|
||||||
%clean
|
|
||||||
rm -rf $RPM_BUILD_ROOT
|
|
||||||
|
|
||||||
%files
|
|
||||||
%defattr(-,root,root,-)
|
|
||||||
%doc docs AUTHORS LICENSE README.rst
|
|
||||||
# For noarch packages: sitelib
|
|
||||||
%{python_sitelib}/*
|
|
||||||
# For arch-specific packages: sitearch
|
|
||||||
# %{python_sitearch}/*
|
|
||||||
|
|
||||||
%changelog
|
|
||||||
* Mon Mar 05 2012 Ross Lawley <ross.lawley@gmail.com> 0.6
|
|
||||||
- 0.6 released
|
|
||||||
* Thu Oct 27 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.3-1
|
|
||||||
- Update to latest dev version
|
|
||||||
- Add PIL dependency for ImageField
|
|
||||||
* Wed Oct 12 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.2-1
|
|
||||||
- Update version
|
|
||||||
* Fri Sep 23 2011 Pau Aliagas <linuxnow@gmail.com> 0.5.0-1
|
|
||||||
- Initial version
|
|
@@ -1 +0,0 @@
|
|||||||
pymongo
|
|
5
setup.py
5
setup.py
@@ -38,9 +38,7 @@ setup(name='mongoengine',
|
|||||||
packages=find_packages(),
|
packages=find_packages(),
|
||||||
author='Harry Marr',
|
author='Harry Marr',
|
||||||
author_email='harry.marr@{nospam}gmail.com',
|
author_email='harry.marr@{nospam}gmail.com',
|
||||||
maintainer="Ross Lawley",
|
url='http://hmarr.com/mongoengine/',
|
||||||
maintainer_email="ross.lawley@{nospam}gmail.com",
|
|
||||||
url='http://mongoengine.org/',
|
|
||||||
license='MIT',
|
license='MIT',
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
description=DESCRIPTION,
|
description=DESCRIPTION,
|
||||||
@@ -49,5 +47,4 @@ setup(name='mongoengine',
|
|||||||
classifiers=CLASSIFIERS,
|
classifiers=CLASSIFIERS,
|
||||||
install_requires=['pymongo'],
|
install_requires=['pymongo'],
|
||||||
test_suite='tests',
|
test_suite='tests',
|
||||||
tests_require=['blinker', 'django>=1.3', 'PIL']
|
|
||||||
)
|
)
|
||||||
|
@@ -1,70 +0,0 @@
|
|||||||
import unittest
|
|
||||||
import pymongo
|
|
||||||
|
|
||||||
import mongoengine.connection
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine.connection import get_db, get_connection, ConnectionError
|
|
||||||
|
|
||||||
|
|
||||||
class ConnectionTest(unittest.TestCase):
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
mongoengine.connection._connection_settings = {}
|
|
||||||
mongoengine.connection._connections = {}
|
|
||||||
mongoengine.connection._dbs = {}
|
|
||||||
|
|
||||||
def test_connect(self):
|
|
||||||
"""Ensure that the connect() method works properly.
|
|
||||||
"""
|
|
||||||
connect('mongoenginetest')
|
|
||||||
|
|
||||||
conn = get_connection()
|
|
||||||
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
|
|
||||||
|
|
||||||
db = get_db()
|
|
||||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
|
||||||
self.assertEqual(db.name, 'mongoenginetest')
|
|
||||||
|
|
||||||
connect('mongoenginetest2', alias='testdb')
|
|
||||||
conn = get_connection('testdb')
|
|
||||||
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
|
|
||||||
|
|
||||||
def test_connect_uri(self):
|
|
||||||
"""Ensure that the connect() method works properly with uri's
|
|
||||||
"""
|
|
||||||
c = connect(db='mongoenginetest', alias='admin')
|
|
||||||
c.admin.system.users.remove({})
|
|
||||||
c.mongoenginetest.system.users.remove({})
|
|
||||||
|
|
||||||
c.admin.add_user("admin", "password")
|
|
||||||
c.admin.authenticate("admin", "password")
|
|
||||||
c.mongoenginetest.add_user("username", "password")
|
|
||||||
|
|
||||||
self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost')
|
|
||||||
|
|
||||||
connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest')
|
|
||||||
|
|
||||||
conn = get_connection()
|
|
||||||
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
|
|
||||||
|
|
||||||
db = get_db()
|
|
||||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
|
||||||
self.assertEqual(db.name, 'mongoenginetest')
|
|
||||||
|
|
||||||
def test_register_connection(self):
|
|
||||||
"""Ensure that connections with different aliases may be registered.
|
|
||||||
"""
|
|
||||||
register_connection('testdb', 'mongoenginetest2')
|
|
||||||
|
|
||||||
self.assertRaises(ConnectionError, get_connection)
|
|
||||||
conn = get_connection('testdb')
|
|
||||||
self.assertTrue(isinstance(conn, pymongo.connection.Connection))
|
|
||||||
|
|
||||||
db = get_db('testdb')
|
|
||||||
self.assertTrue(isinstance(db, pymongo.database.Database))
|
|
||||||
self.assertEqual(db.name, 'mongoenginetest2')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
@@ -1,812 +0,0 @@
|
|||||||
import unittest
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine.connection import get_db
|
|
||||||
from mongoengine.tests import query_counter
|
|
||||||
|
|
||||||
|
|
||||||
class FieldTest(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
self.db = get_db()
|
|
||||||
|
|
||||||
def test_list_item_dereference(self):
|
|
||||||
"""Ensure that DBRef items in ListFields are dereferenced.
|
|
||||||
"""
|
|
||||||
class User(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class Group(Document):
|
|
||||||
members = ListField(ReferenceField(User))
|
|
||||||
|
|
||||||
User.drop_collection()
|
|
||||||
Group.drop_collection()
|
|
||||||
|
|
||||||
for i in xrange(1, 51):
|
|
||||||
user = User(name='user %s' % i)
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
group = Group(members=User.objects)
|
|
||||||
group.save()
|
|
||||||
|
|
||||||
group = Group(members=User.objects)
|
|
||||||
group.save()
|
|
||||||
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_obj = Group.objects.first()
|
|
||||||
self.assertEqual(q, 1)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
# Document select_related
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_obj = Group.objects.first().select_related()
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
# Queryset select_related
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
group_objs = Group.objects.select_related()
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
for group_obj in group_objs:
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
User.drop_collection()
|
|
||||||
Group.drop_collection()
|
|
||||||
|
|
||||||
def test_recursive_reference(self):
|
|
||||||
"""Ensure that ReferenceFields can reference their own documents.
|
|
||||||
"""
|
|
||||||
class Employee(Document):
|
|
||||||
name = StringField()
|
|
||||||
boss = ReferenceField('self')
|
|
||||||
friends = ListField(ReferenceField('self'))
|
|
||||||
|
|
||||||
Employee.drop_collection()
|
|
||||||
|
|
||||||
bill = Employee(name='Bill Lumbergh')
|
|
||||||
bill.save()
|
|
||||||
|
|
||||||
michael = Employee(name='Michael Bolton')
|
|
||||||
michael.save()
|
|
||||||
|
|
||||||
samir = Employee(name='Samir Nagheenanajar')
|
|
||||||
samir.save()
|
|
||||||
|
|
||||||
friends = [michael, samir]
|
|
||||||
peter = Employee(name='Peter Gibbons', boss=bill, friends=friends)
|
|
||||||
peter.save()
|
|
||||||
|
|
||||||
Employee(name='Funky Gibbon', boss=bill, friends=friends).save()
|
|
||||||
Employee(name='Funky Gibbon', boss=bill, friends=friends).save()
|
|
||||||
Employee(name='Funky Gibbon', boss=bill, friends=friends).save()
|
|
||||||
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
peter = Employee.objects.with_id(peter.id)
|
|
||||||
self.assertEqual(q, 1)
|
|
||||||
|
|
||||||
peter.boss
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
peter.friends
|
|
||||||
self.assertEqual(q, 3)
|
|
||||||
|
|
||||||
# Document select_related
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
peter = Employee.objects.with_id(peter.id).select_related()
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
self.assertEquals(peter.boss, bill)
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
self.assertEquals(peter.friends, friends)
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
# Queryset select_related
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
employees = Employee.objects(boss=bill).select_related()
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
for employee in employees:
|
|
||||||
self.assertEquals(employee.boss, bill)
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
self.assertEquals(employee.friends, friends)
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
def test_circular_reference(self):
|
|
||||||
"""Ensure you can handle circular references
|
|
||||||
"""
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
relations = ListField(EmbeddedDocumentField('Relation'))
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<Person: %s>" % self.name
|
|
||||||
|
|
||||||
class Relation(EmbeddedDocument):
|
|
||||||
name = StringField()
|
|
||||||
person = ReferenceField('Person')
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
mother = Person(name="Mother")
|
|
||||||
daughter = Person(name="Daughter")
|
|
||||||
|
|
||||||
mother.save()
|
|
||||||
daughter.save()
|
|
||||||
|
|
||||||
daughter_rel = Relation(name="Daughter", person=daughter)
|
|
||||||
mother.relations.append(daughter_rel)
|
|
||||||
mother.save()
|
|
||||||
|
|
||||||
mother_rel = Relation(name="Daughter", person=mother)
|
|
||||||
self_rel = Relation(name="Self", person=daughter)
|
|
||||||
daughter.relations.append(mother_rel)
|
|
||||||
daughter.relations.append(self_rel)
|
|
||||||
daughter.save()
|
|
||||||
|
|
||||||
self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
|
|
||||||
|
|
||||||
def test_circular_reference_on_self(self):
|
|
||||||
"""Ensure you can handle circular references
|
|
||||||
"""
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
relations = ListField(ReferenceField('self'))
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<Person: %s>" % self.name
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
mother = Person(name="Mother")
|
|
||||||
daughter = Person(name="Daughter")
|
|
||||||
|
|
||||||
mother.save()
|
|
||||||
daughter.save()
|
|
||||||
|
|
||||||
mother.relations.append(daughter)
|
|
||||||
mother.save()
|
|
||||||
|
|
||||||
daughter.relations.append(mother)
|
|
||||||
daughter.relations.append(daughter)
|
|
||||||
daughter.save()
|
|
||||||
|
|
||||||
self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects())
|
|
||||||
|
|
||||||
def test_circular_tree_reference(self):
|
|
||||||
"""Ensure you can handle circular references with more than one level
|
|
||||||
"""
|
|
||||||
class Other(EmbeddedDocument):
|
|
||||||
name = StringField()
|
|
||||||
friends = ListField(ReferenceField('Person'))
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
other = EmbeddedDocumentField(Other, default=lambda: Other())
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<Person: %s>" % self.name
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
paul = Person(name="Paul")
|
|
||||||
paul.save()
|
|
||||||
maria = Person(name="Maria")
|
|
||||||
maria.save()
|
|
||||||
julia = Person(name='Julia')
|
|
||||||
julia.save()
|
|
||||||
anna = Person(name='Anna')
|
|
||||||
anna.save()
|
|
||||||
|
|
||||||
paul.other.friends = [maria, julia, anna]
|
|
||||||
paul.other.name = "Paul's friends"
|
|
||||||
paul.save()
|
|
||||||
|
|
||||||
maria.other.friends = [paul, julia, anna]
|
|
||||||
maria.other.name = "Maria's friends"
|
|
||||||
maria.save()
|
|
||||||
|
|
||||||
julia.other.friends = [paul, maria, anna]
|
|
||||||
julia.other.name = "Julia's friends"
|
|
||||||
julia.save()
|
|
||||||
|
|
||||||
anna.other.friends = [paul, maria, julia]
|
|
||||||
anna.other.name = "Anna's friends"
|
|
||||||
anna.save()
|
|
||||||
|
|
||||||
self.assertEquals(
|
|
||||||
"[<Person: Paul>, <Person: Maria>, <Person: Julia>, <Person: Anna>]",
|
|
||||||
"%s" % Person.objects()
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_generic_reference(self):
|
|
||||||
|
|
||||||
class UserA(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class UserB(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class UserC(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class Group(Document):
|
|
||||||
members = ListField(GenericReferenceField())
|
|
||||||
|
|
||||||
UserA.drop_collection()
|
|
||||||
UserB.drop_collection()
|
|
||||||
UserC.drop_collection()
|
|
||||||
Group.drop_collection()
|
|
||||||
|
|
||||||
members = []
|
|
||||||
for i in xrange(1, 51):
|
|
||||||
a = UserA(name='User A %s' % i)
|
|
||||||
a.save()
|
|
||||||
|
|
||||||
b = UserB(name='User B %s' % i)
|
|
||||||
b.save()
|
|
||||||
|
|
||||||
c = UserC(name='User C %s' % i)
|
|
||||||
c.save()
|
|
||||||
|
|
||||||
members += [a, b, c]
|
|
||||||
|
|
||||||
group = Group(members=members)
|
|
||||||
group.save()
|
|
||||||
|
|
||||||
group = Group(members=members)
|
|
||||||
group.save()
|
|
||||||
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_obj = Group.objects.first()
|
|
||||||
self.assertEqual(q, 1)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
for m in group_obj.members:
|
|
||||||
self.assertTrue('User' in m.__class__.__name__)
|
|
||||||
|
|
||||||
# Document select_related
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_obj = Group.objects.first().select_related()
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
for m in group_obj.members:
|
|
||||||
self.assertTrue('User' in m.__class__.__name__)
|
|
||||||
|
|
||||||
# Queryset select_related
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_objs = Group.objects.select_related()
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
for group_obj in group_objs:
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
for m in group_obj.members:
|
|
||||||
self.assertTrue('User' in m.__class__.__name__)
|
|
||||||
|
|
||||||
UserA.drop_collection()
|
|
||||||
UserB.drop_collection()
|
|
||||||
UserC.drop_collection()
|
|
||||||
Group.drop_collection()
|
|
||||||
|
|
||||||
def test_list_field_complex(self):
|
|
||||||
|
|
||||||
class UserA(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class UserB(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class UserC(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class Group(Document):
|
|
||||||
members = ListField()
|
|
||||||
|
|
||||||
UserA.drop_collection()
|
|
||||||
UserB.drop_collection()
|
|
||||||
UserC.drop_collection()
|
|
||||||
Group.drop_collection()
|
|
||||||
|
|
||||||
members = []
|
|
||||||
for i in xrange(1, 51):
|
|
||||||
a = UserA(name='User A %s' % i)
|
|
||||||
a.save()
|
|
||||||
|
|
||||||
b = UserB(name='User B %s' % i)
|
|
||||||
b.save()
|
|
||||||
|
|
||||||
c = UserC(name='User C %s' % i)
|
|
||||||
c.save()
|
|
||||||
|
|
||||||
members += [a, b, c]
|
|
||||||
|
|
||||||
group = Group(members=members)
|
|
||||||
group.save()
|
|
||||||
|
|
||||||
group = Group(members=members)
|
|
||||||
group.save()
|
|
||||||
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_obj = Group.objects.first()
|
|
||||||
self.assertEqual(q, 1)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
for m in group_obj.members:
|
|
||||||
self.assertTrue('User' in m.__class__.__name__)
|
|
||||||
|
|
||||||
# Document select_related
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_obj = Group.objects.first().select_related()
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
for m in group_obj.members:
|
|
||||||
self.assertTrue('User' in m.__class__.__name__)
|
|
||||||
|
|
||||||
# Queryset select_related
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_objs = Group.objects.select_related()
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
for group_obj in group_objs:
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
for m in group_obj.members:
|
|
||||||
self.assertTrue('User' in m.__class__.__name__)
|
|
||||||
|
|
||||||
UserA.drop_collection()
|
|
||||||
UserB.drop_collection()
|
|
||||||
UserC.drop_collection()
|
|
||||||
Group.drop_collection()
|
|
||||||
|
|
||||||
def test_map_field_reference(self):
|
|
||||||
|
|
||||||
class User(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class Group(Document):
|
|
||||||
members = MapField(ReferenceField(User))
|
|
||||||
|
|
||||||
User.drop_collection()
|
|
||||||
Group.drop_collection()
|
|
||||||
|
|
||||||
members = []
|
|
||||||
for i in xrange(1, 51):
|
|
||||||
user = User(name='user %s' % i)
|
|
||||||
user.save()
|
|
||||||
members.append(user)
|
|
||||||
|
|
||||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
|
||||||
group.save()
|
|
||||||
|
|
||||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
|
||||||
group.save()
|
|
||||||
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_obj = Group.objects.first()
|
|
||||||
self.assertEqual(q, 1)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
for k, m in group_obj.members.iteritems():
|
|
||||||
self.assertTrue(isinstance(m, User))
|
|
||||||
|
|
||||||
# Document select_related
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_obj = Group.objects.first().select_related()
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
for k, m in group_obj.members.iteritems():
|
|
||||||
self.assertTrue(isinstance(m, User))
|
|
||||||
|
|
||||||
# Queryset select_related
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_objs = Group.objects.select_related()
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
for group_obj in group_objs:
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
for k, m in group_obj.members.iteritems():
|
|
||||||
self.assertTrue(isinstance(m, User))
|
|
||||||
|
|
||||||
User.drop_collection()
|
|
||||||
Group.drop_collection()
|
|
||||||
|
|
||||||
def test_dict_field(self):
|
|
||||||
|
|
||||||
class UserA(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class UserB(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class UserC(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class Group(Document):
|
|
||||||
members = DictField()
|
|
||||||
|
|
||||||
UserA.drop_collection()
|
|
||||||
UserB.drop_collection()
|
|
||||||
UserC.drop_collection()
|
|
||||||
Group.drop_collection()
|
|
||||||
|
|
||||||
members = []
|
|
||||||
for i in xrange(1, 51):
|
|
||||||
a = UserA(name='User A %s' % i)
|
|
||||||
a.save()
|
|
||||||
|
|
||||||
b = UserB(name='User B %s' % i)
|
|
||||||
b.save()
|
|
||||||
|
|
||||||
c = UserC(name='User C %s' % i)
|
|
||||||
c.save()
|
|
||||||
|
|
||||||
members += [a, b, c]
|
|
||||||
|
|
||||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
|
||||||
group.save()
|
|
||||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
|
||||||
group.save()
|
|
||||||
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_obj = Group.objects.first()
|
|
||||||
self.assertEqual(q, 1)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
for k, m in group_obj.members.iteritems():
|
|
||||||
self.assertTrue('User' in m.__class__.__name__)
|
|
||||||
|
|
||||||
# Document select_related
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_obj = Group.objects.first().select_related()
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
for k, m in group_obj.members.iteritems():
|
|
||||||
self.assertTrue('User' in m.__class__.__name__)
|
|
||||||
|
|
||||||
# Queryset select_related
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_objs = Group.objects.select_related()
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
for group_obj in group_objs:
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
for k, m in group_obj.members.iteritems():
|
|
||||||
self.assertTrue('User' in m.__class__.__name__)
|
|
||||||
|
|
||||||
Group.objects.delete()
|
|
||||||
Group().save()
|
|
||||||
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_obj = Group.objects.first()
|
|
||||||
self.assertEqual(q, 1)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 1)
|
|
||||||
self.assertEqual(group_obj.members, {})
|
|
||||||
|
|
||||||
UserA.drop_collection()
|
|
||||||
UserB.drop_collection()
|
|
||||||
UserC.drop_collection()
|
|
||||||
Group.drop_collection()
|
|
||||||
|
|
||||||
def test_dict_field_no_field_inheritance(self):
|
|
||||||
|
|
||||||
class UserA(Document):
|
|
||||||
name = StringField()
|
|
||||||
meta = {'allow_inheritance': False}
|
|
||||||
|
|
||||||
class Group(Document):
|
|
||||||
members = DictField()
|
|
||||||
|
|
||||||
UserA.drop_collection()
|
|
||||||
Group.drop_collection()
|
|
||||||
|
|
||||||
members = []
|
|
||||||
for i in xrange(1, 51):
|
|
||||||
a = UserA(name='User A %s' % i)
|
|
||||||
a.save()
|
|
||||||
|
|
||||||
members += [a]
|
|
||||||
|
|
||||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
|
||||||
group.save()
|
|
||||||
|
|
||||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
|
||||||
group.save()
|
|
||||||
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_obj = Group.objects.first()
|
|
||||||
self.assertEqual(q, 1)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
for k, m in group_obj.members.iteritems():
|
|
||||||
self.assertTrue(isinstance(m, UserA))
|
|
||||||
|
|
||||||
# Document select_related
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_obj = Group.objects.first().select_related()
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
for k, m in group_obj.members.iteritems():
|
|
||||||
self.assertTrue(isinstance(m, UserA))
|
|
||||||
|
|
||||||
# Queryset select_related
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_objs = Group.objects.select_related()
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
for group_obj in group_objs:
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 2)
|
|
||||||
|
|
||||||
for k, m in group_obj.members.iteritems():
|
|
||||||
self.assertTrue(isinstance(m, UserA))
|
|
||||||
|
|
||||||
UserA.drop_collection()
|
|
||||||
Group.drop_collection()
|
|
||||||
|
|
||||||
def test_generic_reference_map_field(self):
|
|
||||||
|
|
||||||
class UserA(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class UserB(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class UserC(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
class Group(Document):
|
|
||||||
members = MapField(GenericReferenceField())
|
|
||||||
|
|
||||||
UserA.drop_collection()
|
|
||||||
UserB.drop_collection()
|
|
||||||
UserC.drop_collection()
|
|
||||||
Group.drop_collection()
|
|
||||||
|
|
||||||
members = []
|
|
||||||
for i in xrange(1, 51):
|
|
||||||
a = UserA(name='User A %s' % i)
|
|
||||||
a.save()
|
|
||||||
|
|
||||||
b = UserB(name='User B %s' % i)
|
|
||||||
b.save()
|
|
||||||
|
|
||||||
c = UserC(name='User C %s' % i)
|
|
||||||
c.save()
|
|
||||||
|
|
||||||
members += [a, b, c]
|
|
||||||
|
|
||||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
|
||||||
group.save()
|
|
||||||
group = Group(members=dict([(str(u.id), u) for u in members]))
|
|
||||||
group.save()
|
|
||||||
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_obj = Group.objects.first()
|
|
||||||
self.assertEqual(q, 1)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
for k, m in group_obj.members.iteritems():
|
|
||||||
self.assertTrue('User' in m.__class__.__name__)
|
|
||||||
|
|
||||||
# Document select_related
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_obj = Group.objects.first().select_related()
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
for k, m in group_obj.members.iteritems():
|
|
||||||
self.assertTrue('User' in m.__class__.__name__)
|
|
||||||
|
|
||||||
# Queryset select_related
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_objs = Group.objects.select_related()
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
for group_obj in group_objs:
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 4)
|
|
||||||
|
|
||||||
for k, m in group_obj.members.iteritems():
|
|
||||||
self.assertTrue('User' in m.__class__.__name__)
|
|
||||||
|
|
||||||
Group.objects.delete()
|
|
||||||
Group().save()
|
|
||||||
|
|
||||||
with query_counter() as q:
|
|
||||||
self.assertEqual(q, 0)
|
|
||||||
|
|
||||||
group_obj = Group.objects.first()
|
|
||||||
self.assertEqual(q, 1)
|
|
||||||
|
|
||||||
[m for m in group_obj.members]
|
|
||||||
self.assertEqual(q, 1)
|
|
||||||
|
|
||||||
UserA.drop_collection()
|
|
||||||
UserB.drop_collection()
|
|
||||||
UserC.drop_collection()
|
|
||||||
Group.drop_collection()
|
|
||||||
|
|
||||||
def test_multidirectional_lists(self):
|
|
||||||
|
|
||||||
class Asset(Document):
|
|
||||||
name = StringField(max_length=250, required=True)
|
|
||||||
parent = GenericReferenceField(default=None)
|
|
||||||
parents = ListField(GenericReferenceField())
|
|
||||||
children = ListField(GenericReferenceField())
|
|
||||||
|
|
||||||
Asset.drop_collection()
|
|
||||||
|
|
||||||
root = Asset(name='', path="/", title="Site Root")
|
|
||||||
root.save()
|
|
||||||
|
|
||||||
company = Asset(name='company', title='Company', parent=root, parents=[root])
|
|
||||||
company.save()
|
|
||||||
|
|
||||||
root.children = [company]
|
|
||||||
root.save()
|
|
||||||
|
|
||||||
root = root.reload()
|
|
||||||
self.assertEquals(root.children, [company])
|
|
||||||
self.assertEquals(company.parents, [root])
|
|
||||||
|
|
||||||
def test_dict_in_dbref_instance(self):
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField(max_length=250, required=True)
|
|
||||||
|
|
||||||
class Room(Document):
|
|
||||||
number = StringField(max_length=250, required=True)
|
|
||||||
staffs_with_position = ListField(DictField())
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
Room.drop_collection()
|
|
||||||
|
|
||||||
bob = Person.objects.create(name='Bob')
|
|
||||||
bob.save()
|
|
||||||
sarah = Person.objects.create(name='Sarah')
|
|
||||||
sarah.save()
|
|
||||||
|
|
||||||
room_101 = Room.objects.create(number="101")
|
|
||||||
room_101.staffs_with_position = [
|
|
||||||
{'position_key': 'window', 'staff': sarah},
|
|
||||||
{'position_key': 'door', 'staff': bob.to_dbref()}]
|
|
||||||
room_101.save()
|
|
||||||
|
|
||||||
room = Room.objects.first().select_related()
|
|
||||||
self.assertEquals(room.staffs_with_position[0]['staff'], sarah)
|
|
||||||
self.assertEquals(room.staffs_with_position[1]['staff'], bob)
|
|
@@ -1,90 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine.django.shortcuts import get_document_or_404
|
|
||||||
|
|
||||||
from django.http import Http404
|
|
||||||
from django.template import Context, Template
|
|
||||||
from django.conf import settings
|
|
||||||
from django.core.paginator import Paginator
|
|
||||||
|
|
||||||
settings.configure()
|
|
||||||
|
|
||||||
class QuerySetTest(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
|
|
||||||
class Person(Document):
|
|
||||||
name = StringField()
|
|
||||||
age = IntField()
|
|
||||||
self.Person = Person
|
|
||||||
|
|
||||||
def test_order_by_in_django_template(self):
|
|
||||||
"""Ensure that QuerySets are properly ordered in Django template.
|
|
||||||
"""
|
|
||||||
self.Person.drop_collection()
|
|
||||||
|
|
||||||
self.Person(name="A", age=20).save()
|
|
||||||
self.Person(name="D", age=10).save()
|
|
||||||
self.Person(name="B", age=40).save()
|
|
||||||
self.Person(name="C", age=30).save()
|
|
||||||
|
|
||||||
t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}")
|
|
||||||
|
|
||||||
d = {"ol": self.Person.objects.order_by('-name')}
|
|
||||||
self.assertEqual(t.render(Context(d)), u'D-10:C-30:B-40:A-20:')
|
|
||||||
d = {"ol": self.Person.objects.order_by('+name')}
|
|
||||||
self.assertEqual(t.render(Context(d)), u'A-20:B-40:C-30:D-10:')
|
|
||||||
d = {"ol": self.Person.objects.order_by('-age')}
|
|
||||||
self.assertEqual(t.render(Context(d)), u'B-40:C-30:A-20:D-10:')
|
|
||||||
d = {"ol": self.Person.objects.order_by('+age')}
|
|
||||||
self.assertEqual(t.render(Context(d)), u'D-10:A-20:C-30:B-40:')
|
|
||||||
|
|
||||||
self.Person.drop_collection()
|
|
||||||
|
|
||||||
def test_q_object_filter_in_template(self):
|
|
||||||
|
|
||||||
self.Person.drop_collection()
|
|
||||||
|
|
||||||
self.Person(name="A", age=20).save()
|
|
||||||
self.Person(name="D", age=10).save()
|
|
||||||
self.Person(name="B", age=40).save()
|
|
||||||
self.Person(name="C", age=30).save()
|
|
||||||
|
|
||||||
t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}")
|
|
||||||
|
|
||||||
d = {"ol": self.Person.objects.filter(Q(age=10) | Q(name="C"))}
|
|
||||||
self.assertEqual(t.render(Context(d)), 'D-10:C-30:')
|
|
||||||
|
|
||||||
# Check double rendering doesn't throw an error
|
|
||||||
self.assertEqual(t.render(Context(d)), 'D-10:C-30:')
|
|
||||||
|
|
||||||
def test_get_document_or_404(self):
|
|
||||||
p = self.Person(name="G404")
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234')
|
|
||||||
self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk))
|
|
||||||
|
|
||||||
def test_pagination(self):
|
|
||||||
"""Ensure that Pagination works as expected
|
|
||||||
"""
|
|
||||||
class Page(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
Page.drop_collection()
|
|
||||||
|
|
||||||
for i in xrange(1, 11):
|
|
||||||
Page(name=str(i)).save()
|
|
||||||
|
|
||||||
paginator = Paginator(Page.objects.all(), 2)
|
|
||||||
|
|
||||||
t = Template("{% for i in page.object_list %}{{ i.name }}:{% endfor %}")
|
|
||||||
for p in paginator.page_range:
|
|
||||||
d = {"page": paginator.page(p)}
|
|
||||||
end = p * 2
|
|
||||||
start = end - 1
|
|
||||||
self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end))
|
|
2288
tests/document.py
2288
tests/document.py
File diff suppressed because it is too large
Load Diff
@@ -1,502 +0,0 @@
|
|||||||
import unittest
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine.connection import get_db
|
|
||||||
|
|
||||||
|
|
||||||
class DynamicDocTest(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
self.db = get_db()
|
|
||||||
|
|
||||||
class Person(DynamicDocument):
|
|
||||||
name = StringField()
|
|
||||||
meta = {'allow_inheritance': True}
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
self.Person = Person
|
|
||||||
|
|
||||||
def test_simple_dynamic_document(self):
|
|
||||||
"""Ensures simple dynamic documents are saved correctly"""
|
|
||||||
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "James"
|
|
||||||
p.age = 34
|
|
||||||
|
|
||||||
self.assertEquals(p.to_mongo(),
|
|
||||||
{"_types": ["Person"], "_cls": "Person",
|
|
||||||
"name": "James", "age": 34}
|
|
||||||
)
|
|
||||||
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
self.assertEquals(self.Person.objects.first().age, 34)
|
|
||||||
|
|
||||||
# Confirm no changes to self.Person
|
|
||||||
self.assertFalse(hasattr(self.Person, 'age'))
|
|
||||||
|
|
||||||
def test_dynamic_document_delta(self):
|
|
||||||
"""Ensures simple dynamic documents can delta correctly"""
|
|
||||||
p = self.Person(name="James", age=34)
|
|
||||||
self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {}))
|
|
||||||
|
|
||||||
p.doc = 123
|
|
||||||
del(p.doc)
|
|
||||||
self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1}))
|
|
||||||
|
|
||||||
def test_change_scope_of_variable(self):
|
|
||||||
"""Test changing the scope of a dynamic field has no adverse effects"""
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.misc = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
p.misc = {'hello': 'world'}
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
self.assertEquals(p.misc, {'hello': 'world'})
|
|
||||||
|
|
||||||
def test_delete_dynamic_field(self):
|
|
||||||
"""Test deleting a dynamic field works"""
|
|
||||||
self.Person.drop_collection()
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.misc = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
p.misc = {'hello': 'world'}
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
self.assertEquals(p.misc, {'hello': 'world'})
|
|
||||||
collection = self.db[self.Person._get_collection_name()]
|
|
||||||
obj = collection.find_one()
|
|
||||||
self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name'])
|
|
||||||
|
|
||||||
del(p.misc)
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
|
||||||
self.assertFalse(hasattr(p, 'misc'))
|
|
||||||
|
|
||||||
obj = collection.find_one()
|
|
||||||
self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'name'])
|
|
||||||
|
|
||||||
def test_dynamic_document_queries(self):
|
|
||||||
"""Ensure we can query dynamic fields"""
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.age = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
self.assertEquals(1, self.Person.objects(age=22).count())
|
|
||||||
p = self.Person.objects(age=22)
|
|
||||||
p = p.get()
|
|
||||||
self.assertEquals(22, p.age)
|
|
||||||
|
|
||||||
def test_complex_dynamic_document_queries(self):
|
|
||||||
class Person(DynamicDocument):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
Person.drop_collection()
|
|
||||||
|
|
||||||
p = Person(name="test")
|
|
||||||
p.age = "ten"
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p1 = Person(name="test1")
|
|
||||||
p1.age = "less then ten and a half"
|
|
||||||
p1.save()
|
|
||||||
|
|
||||||
p2 = Person(name="test2")
|
|
||||||
p2.age = 10
|
|
||||||
p2.save()
|
|
||||||
|
|
||||||
self.assertEquals(Person.objects(age__icontains='ten').count(), 2)
|
|
||||||
self.assertEquals(Person.objects(age__gte=10).count(), 1)
|
|
||||||
|
|
||||||
def test_complex_data_lookups(self):
|
|
||||||
"""Ensure you can query dynamic document dynamic fields"""
|
|
||||||
p = self.Person()
|
|
||||||
p.misc = {'hello': 'world'}
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
self.assertEquals(1, self.Person.objects(misc__hello='world').count())
|
|
||||||
|
|
||||||
def test_inheritance(self):
|
|
||||||
"""Ensure that dynamic document plays nice with inheritance"""
|
|
||||||
class Employee(self.Person):
|
|
||||||
salary = IntField()
|
|
||||||
|
|
||||||
Employee.drop_collection()
|
|
||||||
|
|
||||||
self.assertTrue('name' in Employee._fields)
|
|
||||||
self.assertTrue('salary' in Employee._fields)
|
|
||||||
self.assertEqual(Employee._get_collection_name(),
|
|
||||||
self.Person._get_collection_name())
|
|
||||||
|
|
||||||
joe_bloggs = Employee()
|
|
||||||
joe_bloggs.name = "Joe Bloggs"
|
|
||||||
joe_bloggs.salary = 10
|
|
||||||
joe_bloggs.age = 20
|
|
||||||
joe_bloggs.save()
|
|
||||||
|
|
||||||
self.assertEquals(1, self.Person.objects(age=20).count())
|
|
||||||
self.assertEquals(1, Employee.objects(age=20).count())
|
|
||||||
|
|
||||||
joe_bloggs = self.Person.objects.first()
|
|
||||||
self.assertTrue(isinstance(joe_bloggs, Employee))
|
|
||||||
|
|
||||||
def test_embedded_dynamic_document(self):
|
|
||||||
"""Test dynamic embedded documents"""
|
|
||||||
class Embedded(DynamicEmbeddedDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
|
|
||||||
embedded_1 = Embedded()
|
|
||||||
embedded_1.string_field = 'hello'
|
|
||||||
embedded_1.int_field = 1
|
|
||||||
embedded_1.dict_field = {'hello': 'world'}
|
|
||||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
doc.embedded_field = embedded_1
|
|
||||||
|
|
||||||
self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
|
|
||||||
"embedded_field": {
|
|
||||||
"_types": ['Embedded'], "_cls": "Embedded",
|
|
||||||
"string_field": "hello",
|
|
||||||
"int_field": 1,
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
"list_field": ['1', 2, {'hello': 'world'}]
|
|
||||||
}
|
|
||||||
})
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEquals(doc.embedded_field.__class__, Embedded)
|
|
||||||
self.assertEquals(doc.embedded_field.string_field, "hello")
|
|
||||||
self.assertEquals(doc.embedded_field.int_field, 1)
|
|
||||||
self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'})
|
|
||||||
self.assertEquals(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}])
|
|
||||||
|
|
||||||
def test_complex_embedded_documents(self):
|
|
||||||
"""Test complex dynamic embedded documents setups"""
|
|
||||||
class Embedded(DynamicEmbeddedDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
|
|
||||||
embedded_1 = Embedded()
|
|
||||||
embedded_1.string_field = 'hello'
|
|
||||||
embedded_1.int_field = 1
|
|
||||||
embedded_1.dict_field = {'hello': 'world'}
|
|
||||||
|
|
||||||
embedded_2 = Embedded()
|
|
||||||
embedded_2.string_field = 'hello'
|
|
||||||
embedded_2.int_field = 1
|
|
||||||
embedded_2.dict_field = {'hello': 'world'}
|
|
||||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
|
|
||||||
embedded_1.list_field = ['1', 2, embedded_2]
|
|
||||||
doc.embedded_field = embedded_1
|
|
||||||
|
|
||||||
self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc",
|
|
||||||
"embedded_field": {
|
|
||||||
"_types": ['Embedded'], "_cls": "Embedded",
|
|
||||||
"string_field": "hello",
|
|
||||||
"int_field": 1,
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
"list_field": ['1', 2,
|
|
||||||
{"_types": ['Embedded'], "_cls": "Embedded",
|
|
||||||
"string_field": "hello",
|
|
||||||
"int_field": 1,
|
|
||||||
"dict_field": {"hello": "world"},
|
|
||||||
"list_field": ['1', 2, {'hello': 'world'}]}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
})
|
|
||||||
doc.save()
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEquals(doc.embedded_field.__class__, Embedded)
|
|
||||||
self.assertEquals(doc.embedded_field.string_field, "hello")
|
|
||||||
self.assertEquals(doc.embedded_field.int_field, 1)
|
|
||||||
self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'})
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[0], '1')
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[1], 2)
|
|
||||||
|
|
||||||
embedded_field = doc.embedded_field.list_field[2]
|
|
||||||
|
|
||||||
self.assertEquals(embedded_field.__class__, Embedded)
|
|
||||||
self.assertEquals(embedded_field.string_field, "hello")
|
|
||||||
self.assertEquals(embedded_field.int_field, 1)
|
|
||||||
self.assertEquals(embedded_field.dict_field, {'hello': 'world'})
|
|
||||||
self.assertEquals(embedded_field.list_field, ['1', 2, {'hello': 'world'}])
|
|
||||||
|
|
||||||
def test_delta_for_dynamic_documents(self):
|
|
||||||
p = self.Person()
|
|
||||||
p.name = "Dean"
|
|
||||||
p.age = 22
|
|
||||||
p.save()
|
|
||||||
|
|
||||||
p.age = 24
|
|
||||||
self.assertEquals(p.age, 24)
|
|
||||||
self.assertEquals(p._get_changed_fields(), ['age'])
|
|
||||||
self.assertEquals(p._delta(), ({'age': 24}, {}))
|
|
||||||
|
|
||||||
p = self.Person.objects(age=22).get()
|
|
||||||
p.age = 24
|
|
||||||
self.assertEquals(p.age, 24)
|
|
||||||
self.assertEquals(p._get_changed_fields(), ['age'])
|
|
||||||
self.assertEquals(p._delta(), ({'age': 24}, {}))
|
|
||||||
|
|
||||||
p.save()
|
|
||||||
self.assertEquals(1, self.Person.objects(age=24).count())
|
|
||||||
|
|
||||||
def test_delta(self):
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEquals(doc._get_changed_fields(), [])
|
|
||||||
self.assertEquals(doc._delta(), ({}, {}))
|
|
||||||
|
|
||||||
doc.string_field = 'hello'
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['string_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({'string_field': 'hello'}, {}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.int_field = 1
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['int_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({'int_field': 1}, {}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
dict_value = {'hello': 'world', 'ping': 'pong'}
|
|
||||||
doc.dict_field = dict_value
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['dict_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({'dict_field': dict_value}, {}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
list_value = ['1', 2, {'hello': 'world'}]
|
|
||||||
doc.list_field = list_value
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['list_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({'list_field': list_value}, {}))
|
|
||||||
|
|
||||||
# Test unsetting
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.dict_field = {}
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['dict_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({}, {'dict_field': 1}))
|
|
||||||
|
|
||||||
doc._changed_fields = []
|
|
||||||
doc.list_field = []
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['list_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({}, {'list_field': 1}))
|
|
||||||
|
|
||||||
def test_delta_recursive(self):
|
|
||||||
"""Testing deltaing works with dynamic documents"""
|
|
||||||
class Embedded(DynamicEmbeddedDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Doc(DynamicDocument):
|
|
||||||
pass
|
|
||||||
|
|
||||||
Doc.drop_collection()
|
|
||||||
doc = Doc()
|
|
||||||
doc.save()
|
|
||||||
|
|
||||||
doc = Doc.objects.first()
|
|
||||||
self.assertEquals(doc._get_changed_fields(), [])
|
|
||||||
self.assertEquals(doc._delta(), ({}, {}))
|
|
||||||
|
|
||||||
embedded_1 = Embedded()
|
|
||||||
embedded_1.string_field = 'hello'
|
|
||||||
embedded_1.int_field = 1
|
|
||||||
embedded_1.dict_field = {'hello': 'world'}
|
|
||||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
doc.embedded_field = embedded_1
|
|
||||||
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field'])
|
|
||||||
|
|
||||||
embedded_delta = {
|
|
||||||
'string_field': 'hello',
|
|
||||||
'int_field': 1,
|
|
||||||
'dict_field': {'hello': 'world'},
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}]
|
|
||||||
}
|
|
||||||
self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {}))
|
|
||||||
embedded_delta.update({
|
|
||||||
'_types': ['Embedded'],
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
})
|
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field': embedded_delta}, {}))
|
|
||||||
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
doc.embedded_field.dict_field = {}
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.dict_field'])
|
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
|
|
||||||
|
|
||||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
doc.embedded_field.list_field = []
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
|
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1}))
|
|
||||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
embedded_2 = Embedded()
|
|
||||||
embedded_2.string_field = 'hello'
|
|
||||||
embedded_2.int_field = 1
|
|
||||||
embedded_2.dict_field = {'hello': 'world'}
|
|
||||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
|
||||||
|
|
||||||
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
|
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({
|
|
||||||
'list_field': ['1', 2, {
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'_types': ['Embedded'],
|
|
||||||
'string_field': 'hello',
|
|
||||||
'dict_field': {'hello': 'world'},
|
|
||||||
'int_field': 1,
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
}]
|
|
||||||
}, {}))
|
|
||||||
|
|
||||||
self.assertEquals(doc._delta(), ({
|
|
||||||
'embedded_field.list_field': ['1', 2, {
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'_types': ['Embedded'],
|
|
||||||
'string_field': 'hello',
|
|
||||||
'dict_field': {'hello': 'world'},
|
|
||||||
'int_field': 1,
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
}]
|
|
||||||
}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2]._changed_fields, [])
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[0], '1')
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[1], 2)
|
|
||||||
for k in doc.embedded_field.list_field[2]._fields:
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k])
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].string_field = 'world'
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field'])
|
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {}))
|
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world')
|
|
||||||
|
|
||||||
# Test multiple assignments
|
|
||||||
doc.embedded_field.list_field[2].string_field = 'hello world'
|
|
||||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field'])
|
|
||||||
self.assertEquals(doc.embedded_field._delta(), ({
|
|
||||||
'list_field': ['1', 2, {
|
|
||||||
'_types': ['Embedded'],
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'string_field': 'hello world',
|
|
||||||
'int_field': 1,
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
'dict_field': {'hello': 'world'}}]}, {}))
|
|
||||||
self.assertEquals(doc._delta(), ({
|
|
||||||
'embedded_field.list_field': ['1', 2, {
|
|
||||||
'_types': ['Embedded'],
|
|
||||||
'_cls': 'Embedded',
|
|
||||||
'string_field': 'hello world',
|
|
||||||
'int_field': 1,
|
|
||||||
'list_field': ['1', 2, {'hello': 'world'}],
|
|
||||||
'dict_field': {'hello': 'world'}}
|
|
||||||
]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world')
|
|
||||||
|
|
||||||
# Test list native methods
|
|
||||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].list_field.append(1)
|
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1])
|
|
||||||
|
|
||||||
doc.embedded_field.list_field[2].list_field.sort()
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}])
|
|
||||||
|
|
||||||
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
|
||||||
self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
del(doc.embedded_field.list_field[2].list_field)
|
|
||||||
self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1}))
|
|
||||||
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
doc.dict_field = {'embedded': embedded_1}
|
|
||||||
doc.save()
|
|
||||||
doc.reload()
|
|
||||||
|
|
||||||
doc.dict_field['embedded'].string_field = 'Hello World'
|
|
||||||
self.assertEquals(doc._get_changed_fields(), ['dict_field.embedded.string_field'])
|
|
||||||
self.assertEquals(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {}))
|
|
||||||
|
|
||||||
def test_indexes(self):
|
|
||||||
"""Ensure that indexes are used when meta[indexes] is specified.
|
|
||||||
"""
|
|
||||||
class BlogPost(DynamicDocument):
|
|
||||||
meta = {
|
|
||||||
'indexes': [
|
|
||||||
'-date',
|
|
||||||
('category', '-date')
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
BlogPost.drop_collection()
|
|
||||||
|
|
||||||
info = BlogPost.objects._collection.index_information()
|
|
||||||
# _id, '-date', ('cat', 'date')
|
|
||||||
# NB: there is no index on _types by itself, since
|
|
||||||
# the indices on -date and tags will both contain
|
|
||||||
# _types as first element in the key
|
|
||||||
self.assertEqual(len(info), 3)
|
|
||||||
|
|
||||||
# Indexes are lazy so use list() to perform query
|
|
||||||
list(BlogPost.objects)
|
|
||||||
info = BlogPost.objects._collection.index_information()
|
|
||||||
info = [value['key'] for key, value in info.iteritems()]
|
|
||||||
self.assertTrue([('_types', 1), ('category', 1), ('date', -1)]
|
|
||||||
in info)
|
|
||||||
self.assertTrue([('_types', 1), ('date', -1)] in info)
|
|
1168
tests/fields.py
1168
tests/fields.py
File diff suppressed because it is too large
Load Diff
@@ -1,23 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
|
|
||||||
|
|
||||||
class PickleEmbedded(EmbeddedDocument):
|
|
||||||
date = DateTimeField(default=datetime.now)
|
|
||||||
|
|
||||||
|
|
||||||
class PickleTest(Document):
|
|
||||||
number = IntField()
|
|
||||||
string = StringField(choices=(('One', '1'), ('Two', '2')))
|
|
||||||
embedded = EmbeddedDocumentField(PickleEmbedded)
|
|
||||||
lists = ListField(StringField())
|
|
||||||
photo = FileField()
|
|
||||||
|
|
||||||
|
|
||||||
class Mixin(object):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
|
|
||||||
class Base(Document):
|
|
||||||
meta = {'allow_inheritance': True}
|
|
Binary file not shown.
Before Width: | Height: | Size: 8.1 KiB |
1778
tests/queryset.py
1778
tests/queryset.py
File diff suppressed because it is too large
Load Diff
230
tests/signals.py
230
tests/signals.py
@@ -1,230 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
from mongoengine import *
|
|
||||||
from mongoengine import signals
|
|
||||||
|
|
||||||
signal_output = []
|
|
||||||
|
|
||||||
|
|
||||||
class SignalTests(unittest.TestCase):
|
|
||||||
"""
|
|
||||||
Testing signals before/after saving and deleting.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def get_signal_output(self, fn, *args, **kwargs):
|
|
||||||
# Flush any existing signal output
|
|
||||||
global signal_output
|
|
||||||
signal_output = []
|
|
||||||
fn(*args, **kwargs)
|
|
||||||
return signal_output
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
connect(db='mongoenginetest')
|
|
||||||
class Author(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
def __unicode__(self):
|
|
||||||
return self.name
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def pre_init(cls, sender, document, *args, **kwargs):
|
|
||||||
signal_output.append('pre_init signal, %s' % cls.__name__)
|
|
||||||
signal_output.append(str(kwargs['values']))
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def post_init(cls, sender, document, **kwargs):
|
|
||||||
signal_output.append('post_init signal, %s' % document)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def pre_save(cls, sender, document, **kwargs):
|
|
||||||
signal_output.append('pre_save signal, %s' % document)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def post_save(cls, sender, document, **kwargs):
|
|
||||||
signal_output.append('post_save signal, %s' % document)
|
|
||||||
if 'created' in kwargs:
|
|
||||||
if kwargs['created']:
|
|
||||||
signal_output.append('Is created')
|
|
||||||
else:
|
|
||||||
signal_output.append('Is updated')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def pre_delete(cls, sender, document, **kwargs):
|
|
||||||
signal_output.append('pre_delete signal, %s' % document)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def post_delete(cls, sender, document, **kwargs):
|
|
||||||
signal_output.append('post_delete signal, %s' % document)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def pre_bulk_insert(cls, sender, documents, **kwargs):
|
|
||||||
signal_output.append('pre_bulk_insert signal, %s' % documents)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def post_bulk_insert(cls, sender, documents, **kwargs):
|
|
||||||
signal_output.append('post_bulk_insert signal, %s' % documents)
|
|
||||||
if kwargs.get('loaded', False):
|
|
||||||
signal_output.append('Is loaded')
|
|
||||||
else:
|
|
||||||
signal_output.append('Not loaded')
|
|
||||||
self.Author = Author
|
|
||||||
|
|
||||||
|
|
||||||
class Another(Document):
|
|
||||||
name = StringField()
|
|
||||||
|
|
||||||
def __unicode__(self):
|
|
||||||
return self.name
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def pre_init(cls, sender, document, **kwargs):
|
|
||||||
signal_output.append('pre_init Another signal, %s' % cls.__name__)
|
|
||||||
signal_output.append(str(kwargs['values']))
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def post_init(cls, sender, document, **kwargs):
|
|
||||||
signal_output.append('post_init Another signal, %s' % document)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def pre_save(cls, sender, document, **kwargs):
|
|
||||||
signal_output.append('pre_save Another signal, %s' % document)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def post_save(cls, sender, document, **kwargs):
|
|
||||||
signal_output.append('post_save Another signal, %s' % document)
|
|
||||||
if 'created' in kwargs:
|
|
||||||
if kwargs['created']:
|
|
||||||
signal_output.append('Is created')
|
|
||||||
else:
|
|
||||||
signal_output.append('Is updated')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def pre_delete(cls, sender, document, **kwargs):
|
|
||||||
signal_output.append('pre_delete Another signal, %s' % document)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def post_delete(cls, sender, document, **kwargs):
|
|
||||||
signal_output.append('post_delete Another signal, %s' % document)
|
|
||||||
|
|
||||||
self.Another = Another
|
|
||||||
# Save up the number of connected signals so that we can check at the end
|
|
||||||
# that all the signals we register get properly unregistered
|
|
||||||
self.pre_signals = (
|
|
||||||
len(signals.pre_init.receivers),
|
|
||||||
len(signals.post_init.receivers),
|
|
||||||
len(signals.pre_save.receivers),
|
|
||||||
len(signals.post_save.receivers),
|
|
||||||
len(signals.pre_delete.receivers),
|
|
||||||
len(signals.post_delete.receivers),
|
|
||||||
len(signals.pre_bulk_insert.receivers),
|
|
||||||
len(signals.post_bulk_insert.receivers),
|
|
||||||
)
|
|
||||||
|
|
||||||
signals.pre_init.connect(Author.pre_init, sender=Author)
|
|
||||||
signals.post_init.connect(Author.post_init, sender=Author)
|
|
||||||
signals.pre_save.connect(Author.pre_save, sender=Author)
|
|
||||||
signals.post_save.connect(Author.post_save, sender=Author)
|
|
||||||
signals.pre_delete.connect(Author.pre_delete, sender=Author)
|
|
||||||
signals.post_delete.connect(Author.post_delete, sender=Author)
|
|
||||||
signals.pre_bulk_insert.connect(Author.pre_bulk_insert, sender=Author)
|
|
||||||
signals.post_bulk_insert.connect(Author.post_bulk_insert, sender=Author)
|
|
||||||
|
|
||||||
signals.pre_init.connect(Another.pre_init, sender=Another)
|
|
||||||
signals.post_init.connect(Another.post_init, sender=Another)
|
|
||||||
signals.pre_save.connect(Another.pre_save, sender=Another)
|
|
||||||
signals.post_save.connect(Another.post_save, sender=Another)
|
|
||||||
signals.pre_delete.connect(Another.pre_delete, sender=Another)
|
|
||||||
signals.post_delete.connect(Another.post_delete, sender=Another)
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
signals.pre_init.disconnect(self.Author.pre_init)
|
|
||||||
signals.post_init.disconnect(self.Author.post_init)
|
|
||||||
signals.post_delete.disconnect(self.Author.post_delete)
|
|
||||||
signals.pre_delete.disconnect(self.Author.pre_delete)
|
|
||||||
signals.post_save.disconnect(self.Author.post_save)
|
|
||||||
signals.pre_save.disconnect(self.Author.pre_save)
|
|
||||||
signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert)
|
|
||||||
signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert)
|
|
||||||
|
|
||||||
signals.pre_init.disconnect(self.Another.pre_init)
|
|
||||||
signals.post_init.disconnect(self.Another.post_init)
|
|
||||||
signals.post_delete.disconnect(self.Another.post_delete)
|
|
||||||
signals.pre_delete.disconnect(self.Another.pre_delete)
|
|
||||||
signals.post_save.disconnect(self.Another.post_save)
|
|
||||||
signals.pre_save.disconnect(self.Another.pre_save)
|
|
||||||
|
|
||||||
# Check that all our signals got disconnected properly.
|
|
||||||
post_signals = (
|
|
||||||
len(signals.pre_init.receivers),
|
|
||||||
len(signals.post_init.receivers),
|
|
||||||
len(signals.pre_save.receivers),
|
|
||||||
len(signals.post_save.receivers),
|
|
||||||
len(signals.pre_delete.receivers),
|
|
||||||
len(signals.post_delete.receivers),
|
|
||||||
len(signals.pre_bulk_insert.receivers),
|
|
||||||
len(signals.post_bulk_insert.receivers),
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(self.pre_signals, post_signals)
|
|
||||||
|
|
||||||
def test_model_signals(self):
|
|
||||||
""" Model saves should throw some signals. """
|
|
||||||
|
|
||||||
def create_author():
|
|
||||||
a1 = self.Author(name='Bill Shakespeare')
|
|
||||||
|
|
||||||
def bulk_create_author_with_load():
|
|
||||||
a1 = self.Author(name='Bill Shakespeare')
|
|
||||||
self.Author.objects.insert([a1], load_bulk=True)
|
|
||||||
|
|
||||||
def bulk_create_author_without_load():
|
|
||||||
a1 = self.Author(name='Bill Shakespeare')
|
|
||||||
self.Author.objects.insert([a1], load_bulk=False)
|
|
||||||
|
|
||||||
self.assertEqual(self.get_signal_output(create_author), [
|
|
||||||
"pre_init signal, Author",
|
|
||||||
"{'name': 'Bill Shakespeare'}",
|
|
||||||
"post_init signal, Bill Shakespeare",
|
|
||||||
])
|
|
||||||
|
|
||||||
a1 = self.Author(name='Bill Shakespeare')
|
|
||||||
self.assertEqual(self.get_signal_output(a1.save), [
|
|
||||||
"pre_save signal, Bill Shakespeare",
|
|
||||||
"post_save signal, Bill Shakespeare",
|
|
||||||
"Is created"
|
|
||||||
])
|
|
||||||
|
|
||||||
a1.reload()
|
|
||||||
a1.name='William Shakespeare'
|
|
||||||
self.assertEqual(self.get_signal_output(a1.save), [
|
|
||||||
"pre_save signal, William Shakespeare",
|
|
||||||
"post_save signal, William Shakespeare",
|
|
||||||
"Is updated"
|
|
||||||
])
|
|
||||||
|
|
||||||
self.assertEqual(self.get_signal_output(a1.delete), [
|
|
||||||
'pre_delete signal, William Shakespeare',
|
|
||||||
'post_delete signal, William Shakespeare',
|
|
||||||
])
|
|
||||||
|
|
||||||
signal_output = self.get_signal_output(bulk_create_author_with_load)
|
|
||||||
|
|
||||||
# The output of this signal is not entirely deterministic. The reloaded
|
|
||||||
# object will have an object ID. Hence, we only check part of the output
|
|
||||||
self.assertEquals(signal_output[3],
|
|
||||||
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]")
|
|
||||||
self.assertEquals(signal_output[-2:],
|
|
||||||
["post_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
|
||||||
"Is loaded",])
|
|
||||||
|
|
||||||
self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [
|
|
||||||
"pre_init signal, Author",
|
|
||||||
"{'name': 'Bill Shakespeare'}",
|
|
||||||
"post_init signal, Bill Shakespeare",
|
|
||||||
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
|
||||||
"post_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
|
||||||
"Not loaded",
|
|
||||||
])
|
|
||||||
|
|
||||||
self.Author.objects.delete()
|
|
Reference in New Issue
Block a user