This commit is contained in:
Wilson Júnior 2013-05-16 12:50:47 -03:00
commit 10e0b1daec
49 changed files with 2087 additions and 612 deletions

View File

@ -11,7 +11,6 @@ env:
- PYMONGO=dev DJANGO=1.4.2 - PYMONGO=dev DJANGO=1.4.2
- PYMONGO=2.5 DJANGO=1.5.1 - PYMONGO=2.5 DJANGO=1.5.1
- PYMONGO=2.5 DJANGO=1.4.2 - PYMONGO=2.5 DJANGO=1.4.2
- PYMONGO=2.4.2 DJANGO=1.4.2
install: install:
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi

View File

@ -157,3 +157,6 @@ that much better:
* Kenneth Falck * Kenneth Falck
* Lukasz Balcerzak * Lukasz Balcerzak
* Nicolas Cortot * Nicolas Cortot
* Alex (https://github.com/kelsta)
* Jin Zhang

View File

@ -20,7 +20,7 @@ post to the `user group <http://groups.google.com/group/mongoengine-users>`
Supported Interpreters Supported Interpreters
---------------------- ----------------------
PyMongo supports CPython 2.5 and newer. Language MongoEngine supports CPython 2.6 and newer. Language
features not supported by all interpreters can not be used. features not supported by all interpreters can not be used.
Please also ensure that your code is properly converted by Please also ensure that your code is properly converted by
`2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support. `2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support.
@ -46,7 +46,7 @@ General Guidelines
- Write tests and make sure they pass (make sure you have a mongod - Write tests and make sure they pass (make sure you have a mongod
running on the default port, then execute ``python setup.py test`` running on the default port, then execute ``python setup.py test``
from the cmd line to run the test suite). from the cmd line to run the test suite).
- Add yourself to AUTHORS.rst :) - Add yourself to AUTHORS :)
Documentation Documentation
------------- -------------

View File

@ -26,7 +26,7 @@ setup.py install``.
Dependencies Dependencies
============ ============
- pymongo 2.1.1+ - pymongo 2.5+
- sphinx (optional - for documentation generation) - sphinx (optional - for documentation generation)
Examples Examples

View File

@ -86,17 +86,43 @@ def main():
---------------------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, force=True Creating 10000 dictionaries - MongoEngine, force=True
8.36906409264 8.36906409264
0.8.X
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - Pymongo
3.69964408875
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - Pymongo write_concern={"w": 0}
3.5526599884
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine
7.00959801674
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries without continual assign - MongoEngine
5.60943293571
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade=True
6.715102911
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True
5.50644683838
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False
4.69851183891
----------------------------------------------------------------------------------------------------
Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False
4.68946313858
----------------------------------------------------------------------------------------------------
""" """
setup = """ setup = """
from pymongo import Connection from pymongo import MongoClient
connection = Connection() connection = MongoClient()
connection.drop_database('timeit_test') connection.drop_database('timeit_test')
""" """
stmt = """ stmt = """
from pymongo import Connection from pymongo import MongoClient
connection = Connection() connection = MongoClient()
db = connection.timeit_test db = connection.timeit_test
noddy = db.noddy noddy = db.noddy
@ -106,7 +132,7 @@ for i in xrange(10000):
for j in range(20): for j in range(20):
example['fields']["key"+str(j)] = "value "+str(j) example['fields']["key"+str(j)] = "value "+str(j)
noddy.insert(example) noddy.save(example)
myNoddys = noddy.find() myNoddys = noddy.find()
[n for n in myNoddys] # iterate [n for n in myNoddys] # iterate
@ -117,9 +143,32 @@ myNoddys = noddy.find()
t = timeit.Timer(stmt=stmt, setup=setup) t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1) print t.timeit(1)
stmt = """
from pymongo import MongoClient
connection = MongoClient()
db = connection.timeit_test
noddy = db.noddy
for i in xrange(10000):
example = {'fields': {}}
for j in range(20):
example['fields']["key"+str(j)] = "value "+str(j)
noddy.save(example, write_concern={"w": 0})
myNoddys = noddy.find()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - Pymongo write_concern={"w": 0}"""
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
setup = """ setup = """
from pymongo import Connection from pymongo import MongoClient
connection = Connection() connection = MongoClient()
connection.drop_database('timeit_test') connection.drop_database('timeit_test')
connection.disconnect() connection.disconnect()
@ -149,33 +198,18 @@ myNoddys = Noddy.objects()
stmt = """ stmt = """
for i in xrange(10000): for i in xrange(10000):
noddy = Noddy() noddy = Noddy()
fields = {}
for j in range(20): for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j) fields["key"+str(j)] = "value "+str(j)
noddy.save(safe=False, validate=False) noddy.fields = fields
noddy.save()
myNoddys = Noddy.objects() myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate [n for n in myNoddys] # iterate
""" """
print "-" * 100 print "-" * 100
print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False""" print """Creating 10000 dictionaries without continual assign - MongoEngine"""
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
stmt = """
for i in xrange(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
noddy.save(safe=False, validate=False, cascade=False)
myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False"""
t = timeit.Timer(stmt=stmt, setup=setup) t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1) print t.timeit(1)
@ -184,16 +218,65 @@ for i in xrange(10000):
noddy = Noddy() noddy = Noddy()
for j in range(20): for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j) noddy.fields["key"+str(j)] = "value "+str(j)
noddy.save(force_insert=True, safe=False, validate=False, cascade=False) noddy.save(write_concern={"w": 0}, cascade=True)
myNoddys = Noddy.objects() myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate [n for n in myNoddys] # iterate
""" """
print "-" * 100 print "-" * 100
print """Creating 10000 dictionaries - MongoEngine, force=True""" print """Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True"""
t = timeit.Timer(stmt=stmt, setup=setup) t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1) print t.timeit(1)
stmt = """
for i in xrange(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
noddy.save(write_concern={"w": 0}, validate=False, cascade=True)
myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True"""
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
stmt = """
for i in xrange(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
noddy.save(validate=False, write_concern={"w": 0})
myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False"""
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
stmt = """
for i in xrange(10000):
noddy = Noddy()
for j in range(20):
noddy.fields["key"+str(j)] = "value "+str(j)
noddy.save(force_insert=True, write_concern={"w": 0}, validate=False)
myNoddys = Noddy.objects()
[n for n in myNoddys] # iterate
"""
print "-" * 100
print """Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False"""
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@ -76,10 +76,13 @@ Fields
.. autoclass:: mongoengine.fields.BinaryField .. autoclass:: mongoengine.fields.BinaryField
.. autoclass:: mongoengine.fields.FileField .. autoclass:: mongoengine.fields.FileField
.. autoclass:: mongoengine.fields.ImageField .. autoclass:: mongoengine.fields.ImageField
.. autoclass:: mongoengine.fields.GeoPointField
.. autoclass:: mongoengine.fields.SequenceField .. autoclass:: mongoengine.fields.SequenceField
.. autoclass:: mongoengine.fields.ObjectIdField .. autoclass:: mongoengine.fields.ObjectIdField
.. autoclass:: mongoengine.fields.UUIDField .. autoclass:: mongoengine.fields.UUIDField
.. autoclass:: mongoengine.fields.GeoPointField
.. autoclass:: mongoengine.fields.PointField
.. autoclass:: mongoengine.fields.LineStringField
.. autoclass:: mongoengine.fields.PolygonField
.. autoclass:: mongoengine.fields.GridFSError .. autoclass:: mongoengine.fields.GridFSError
.. autoclass:: mongoengine.fields.GridFSProxy .. autoclass:: mongoengine.fields.GridFSProxy
.. autoclass:: mongoengine.fields.ImageGridFsProxy .. autoclass:: mongoengine.fields.ImageGridFsProxy

View File

@ -2,8 +2,23 @@
Changelog Changelog
========= =========
Changes in 0.8.X Changes in 0.8.0
================ ================
- Added no_sub_classes context manager and queryset helper (#312)
- Querysets now utilises a local cache
- Changed __len__ behavour in the queryset (#247, #311)
- Fixed querying string versions of ObjectIds issue with ReferenceField (#307)
- Added $setOnInsert support for upserts (#308)
- Upserts now possible with just query parameters (#309)
- Upserting is the only way to ensure docs are saved correctly (#306)
- Fixed register_delete_rule inheritance issue
- Fix cloning of sliced querysets (#303)
- Fixed update_one write concern (#302)
- Updated minimum requirement for pymongo to 2.5
- Add support for new geojson fields, indexes and queries (#299)
- If values cant be compared mark as changed (#287)
- Ensure as_pymongo() and to_json honour only() and exclude() (#293)
- Document serialization uses field order to ensure a strict order is set (#296)
- DecimalField now stores as float not string (#289) - DecimalField now stores as float not string (#289)
- UUIDField now stores as a binary by default (#292) - UUIDField now stores as a binary by default (#292)
- Added Custom User Model for Django 1.5 (#285) - Added Custom User Model for Django 1.5 (#285)
@ -13,7 +28,6 @@ Changes in 0.8.X
- Added SequenceField.set_next_value(value) helper (#159) - Added SequenceField.set_next_value(value) helper (#159)
- Updated .only() behaviour - now like exclude it is chainable (#202) - Updated .only() behaviour - now like exclude it is chainable (#202)
- Added with_limit_and_skip support to count() (#235) - Added with_limit_and_skip support to count() (#235)
- Removed __len__ from queryset (#247)
- Objects queryset manager now inherited (#256) - Objects queryset manager now inherited (#256)
- Updated connection to use MongoClient (#262, #274) - Updated connection to use MongoClient (#262, #274)
- Fixed db_alias and inherited Documents (#143) - Fixed db_alias and inherited Documents (#143)

View File

@ -132,7 +132,11 @@ html_theme_path = ['_themes']
html_use_smartypants = True html_use_smartypants = True
# Custom sidebar templates, maps document names to template names. # Custom sidebar templates, maps document names to template names.
#html_sidebars = {} html_sidebars = {
'index': ['globaltoc.html', 'searchbox.html'],
'**': ['localtoc.html', 'relations.html', 'searchbox.html']
}
# Additional templates that should be rendered to pages, maps page names to # Additional templates that should be rendered to pages, maps page names to
# template names. # template names.

View File

@ -1,8 +1,8 @@
============================= ==============
Using MongoEngine with Django Django Support
============================= ==============
.. note:: Updated to support Django 1.4 .. note:: Updated to support Django 1.5
Connecting Connecting
========== ==========
@ -98,7 +98,7 @@ Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` sec
Storage Storage
======= =======
With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`, With MongoEngine's support for GridFS via the :class:`~mongoengine.fields.FileField`,
it is useful to have a Django file storage backend that wraps this. The new it is useful to have a Django file storage backend that wraps this. The new
storage module is called :class:`~mongoengine.django.storage.GridFSStorage`. storage module is called :class:`~mongoengine.django.storage.GridFSStorage`.
Using it is very similar to using the default FileSystemStorage.:: Using it is very similar to using the default FileSystemStorage.::

View File

@ -24,6 +24,9 @@ objects** as class attributes to the document class::
title = StringField(max_length=200, required=True) title = StringField(max_length=200, required=True)
date_modified = DateTimeField(default=datetime.datetime.now) date_modified = DateTimeField(default=datetime.datetime.now)
As BSON (the binary format for storing data in mongodb) is order dependent,
documents are serialized based on their field order.
Dynamic document schemas Dynamic document schemas
======================== ========================
One of the benefits of MongoDb is dynamic schemas for a collection, whilst data One of the benefits of MongoDb is dynamic schemas for a collection, whilst data
@ -51,6 +54,7 @@ be saved ::
There is one caveat on Dynamic Documents: fields cannot start with `_` There is one caveat on Dynamic Documents: fields cannot start with `_`
Dynamic fields are stored in alphabetical order *after* any declared fields.
Fields Fields
====== ======
@ -62,31 +66,31 @@ not provided. Default values may optionally be a callable, which will be called
to retrieve the value (such as in the above example). The field types available to retrieve the value (such as in the above example). The field types available
are as follows: are as follows:
* :class:`~mongoengine.BinaryField` * :class:`~mongoengine.fields.BinaryField`
* :class:`~mongoengine.BooleanField` * :class:`~mongoengine.fields.BooleanField`
* :class:`~mongoengine.ComplexDateTimeField` * :class:`~mongoengine.fields.ComplexDateTimeField`
* :class:`~mongoengine.DateTimeField` * :class:`~mongoengine.fields.DateTimeField`
* :class:`~mongoengine.DecimalField` * :class:`~mongoengine.fields.DecimalField`
* :class:`~mongoengine.DictField` * :class:`~mongoengine.fields.DictField`
* :class:`~mongoengine.DynamicField` * :class:`~mongoengine.fields.DynamicField`
* :class:`~mongoengine.EmailField` * :class:`~mongoengine.fields.EmailField`
* :class:`~mongoengine.EmbeddedDocumentField` * :class:`~mongoengine.fields.EmbeddedDocumentField`
* :class:`~mongoengine.FileField` * :class:`~mongoengine.fields.FileField`
* :class:`~mongoengine.FloatField` * :class:`~mongoengine.fields.FloatField`
* :class:`~mongoengine.GenericEmbeddedDocumentField` * :class:`~mongoengine.fields.GenericEmbeddedDocumentField`
* :class:`~mongoengine.GenericReferenceField` * :class:`~mongoengine.fields.GenericReferenceField`
* :class:`~mongoengine.GeoPointField` * :class:`~mongoengine.fields.GeoPointField`
* :class:`~mongoengine.ImageField` * :class:`~mongoengine.fields.ImageField`
* :class:`~mongoengine.IntField` * :class:`~mongoengine.fields.IntField`
* :class:`~mongoengine.ListField` * :class:`~mongoengine.fields.ListField`
* :class:`~mongoengine.MapField` * :class:`~mongoengine.fields.MapField`
* :class:`~mongoengine.ObjectIdField` * :class:`~mongoengine.fields.ObjectIdField`
* :class:`~mongoengine.ReferenceField` * :class:`~mongoengine.fields.ReferenceField`
* :class:`~mongoengine.SequenceField` * :class:`~mongoengine.fields.SequenceField`
* :class:`~mongoengine.SortedListField` * :class:`~mongoengine.fields.SortedListField`
* :class:`~mongoengine.StringField` * :class:`~mongoengine.fields.StringField`
* :class:`~mongoengine.URLField` * :class:`~mongoengine.fields.URLField`
* :class:`~mongoengine.UUIDField` * :class:`~mongoengine.fields.UUIDField`
Field arguments Field arguments
--------------- ---------------
@ -110,7 +114,7 @@ arguments can be set on all fields:
The definion of default parameters follow `the general rules on Python The definion of default parameters follow `the general rules on Python
<http://docs.python.org/reference/compound_stmts.html#function-definitions>`__, <http://docs.python.org/reference/compound_stmts.html#function-definitions>`__,
which means that some care should be taken when dealing with default mutable objects which means that some care should be taken when dealing with default mutable objects
(like in :class:`~mongoengine.ListField` or :class:`~mongoengine.DictField`):: (like in :class:`~mongoengine.fields.ListField` or :class:`~mongoengine.fields.DictField`)::
class ExampleFirst(Document): class ExampleFirst(Document):
# Default an empty list # Default an empty list
@ -172,8 +176,8 @@ arguments can be set on all fields:
List fields List fields
----------- -----------
MongoDB allows the storage of lists of items. To add a list of items to a MongoDB allows the storage of lists of items. To add a list of items to a
:class:`~mongoengine.Document`, use the :class:`~mongoengine.ListField` field :class:`~mongoengine.Document`, use the :class:`~mongoengine.fields.ListField` field
type. :class:`~mongoengine.ListField` takes another field object as its first type. :class:`~mongoengine.fields.ListField` takes another field object as its first
argument, which specifies which type elements may be stored within the list:: argument, which specifies which type elements may be stored within the list::
class Page(Document): class Page(Document):
@ -191,7 +195,7 @@ inherit from :class:`~mongoengine.EmbeddedDocument` rather than
content = StringField() content = StringField()
To embed the document within another document, use the To embed the document within another document, use the
:class:`~mongoengine.EmbeddedDocumentField` field type, providing the embedded :class:`~mongoengine.fields.EmbeddedDocumentField` field type, providing the embedded
document class as the first argument:: document class as the first argument::
class Page(Document): class Page(Document):
@ -206,7 +210,7 @@ Dictionary Fields
Often, an embedded document may be used instead of a dictionary -- generally Often, an embedded document may be used instead of a dictionary -- generally
this is recommended as dictionaries don't support validation or custom field this is recommended as dictionaries don't support validation or custom field
types. However, sometimes you will not know the structure of what you want to types. However, sometimes you will not know the structure of what you want to
store; in this situation a :class:`~mongoengine.DictField` is appropriate:: store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate::
class SurveyResponse(Document): class SurveyResponse(Document):
date = DateTimeField() date = DateTimeField()
@ -224,7 +228,7 @@ other objects, so are the most flexible field type available.
Reference fields Reference fields
---------------- ----------------
References may be stored to other documents in the database using the References may be stored to other documents in the database using the
:class:`~mongoengine.ReferenceField`. Pass in another document class as the :class:`~mongoengine.fields.ReferenceField`. Pass in another document class as the
first argument to the constructor, then simply assign document objects to the first argument to the constructor, then simply assign document objects to the
field:: field::
@ -245,9 +249,9 @@ field::
The :class:`User` object is automatically turned into a reference behind the The :class:`User` object is automatically turned into a reference behind the
scenes, and dereferenced when the :class:`Page` object is retrieved. scenes, and dereferenced when the :class:`Page` object is retrieved.
To add a :class:`~mongoengine.ReferenceField` that references the document To add a :class:`~mongoengine.fields.ReferenceField` that references the document
being defined, use the string ``'self'`` in place of the document class as the being defined, use the string ``'self'`` in place of the document class as the
argument to :class:`~mongoengine.ReferenceField`'s constructor. To reference a argument to :class:`~mongoengine.fields.ReferenceField`'s constructor. To reference a
document that has not yet been defined, use the name of the undefined document document that has not yet been defined, use the name of the undefined document
as the constructor's argument:: as the constructor's argument::
@ -325,7 +329,7 @@ Its value can take any of the following constants:
:const:`mongoengine.PULL` :const:`mongoengine.PULL`
Removes the reference to the object (using MongoDB's "pull" operation) Removes the reference to the object (using MongoDB's "pull" operation)
from any object's fields of from any object's fields of
:class:`~mongoengine.ListField` (:class:`~mongoengine.ReferenceField`). :class:`~mongoengine.fields.ListField` (:class:`~mongoengine.fields.ReferenceField`).
.. warning:: .. warning::
@ -352,7 +356,7 @@ Its value can take any of the following constants:
Generic reference fields Generic reference fields
'''''''''''''''''''''''' ''''''''''''''''''''''''
A second kind of reference field also exists, A second kind of reference field also exists,
:class:`~mongoengine.GenericReferenceField`. This allows you to reference any :class:`~mongoengine.fields.GenericReferenceField`. This allows you to reference any
kind of :class:`~mongoengine.Document`, and hence doesn't take a kind of :class:`~mongoengine.Document`, and hence doesn't take a
:class:`~mongoengine.Document` subclass as a constructor argument:: :class:`~mongoengine.Document` subclass as a constructor argument::
@ -376,15 +380,15 @@ kind of :class:`~mongoengine.Document`, and hence doesn't take a
.. note:: .. note::
Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less Using :class:`~mongoengine.fields.GenericReferenceField`\ s is slightly less
efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if efficient than the standard :class:`~mongoengine.fields.ReferenceField`\ s, so if
you will only be referencing one document type, prefer the standard you will only be referencing one document type, prefer the standard
:class:`~mongoengine.ReferenceField`. :class:`~mongoengine.fields.ReferenceField`.
Uniqueness constraints Uniqueness constraints
---------------------- ----------------------
MongoEngine allows you to specify that a field should be unique across a MongoEngine allows you to specify that a field should be unique across a
collection by providing ``unique=True`` to a :class:`~mongoengine.Field`\ 's collection by providing ``unique=True`` to a :class:`~mongoengine.fields.Field`\ 's
constructor. If you try to save a document that has the same value for a unique constructor. If you try to save a document that has the same value for a unique
field as a document that is already in the database, a field as a document that is already in the database, a
:class:`~mongoengine.OperationError` will be raised. You may also specify :class:`~mongoengine.OperationError` will be raised. You may also specify
@ -475,6 +479,10 @@ If a dictionary is passed then the following options are available:
:attr:`unique` (Default: False) :attr:`unique` (Default: False)
Whether the index should be unique. Whether the index should be unique.
:attr:`expireAfterSeconds` (Optional)
Allows you to automatically expire data from a collection by setting the
time in seconds to expire the a field.
.. note:: .. note::
Inheritance adds extra fields indices see: :ref:`document-inheritance`. Inheritance adds extra fields indices see: :ref:`document-inheritance`.
@ -485,18 +493,47 @@ Compound Indexes and Indexing sub documents
Compound indexes can be created by adding the Embedded field or dictionary Compound indexes can be created by adding the Embedded field or dictionary
field name to the index definition. field name to the index definition.
Sometimes its more efficient to index parts of Embeedded / dictionary fields, Sometimes its more efficient to index parts of Embedded / dictionary fields,
in this case use 'dot' notation to identify the value to index eg: `rank.title` in this case use 'dot' notation to identify the value to index eg: `rank.title`
Geospatial indexes Geospatial indexes
------------------ ------------------
The best geo index for mongodb is the new "2dsphere", which has an improved
spherical model and provides better performance and more options when querying.
The following fields will explicitly add a "2dsphere" index:
- :class:`~mongoengine.fields.PointField`
- :class:`~mongoengine.fields.LineStringField`
- :class:`~mongoengine.fields.PolygonField`
As "2dsphere" indexes can be part of a compound index, you may not want the
automatic index but would prefer a compound index. In this example we turn off
auto indexing and explicitly declare a compound index on ``location`` and ``datetime``::
class Log(Document):
location = PointField(auto_index=False)
datetime = DateTimeField()
meta = {
'indexes': [[("location", "2dsphere"), ("datetime", 1)]]
}
Pre MongoDB 2.4 Geo
'''''''''''''''''''
.. note:: For MongoDB < 2.4 this is still current, however the new 2dsphere
index is a big improvement over the previous 2D model - so upgrading is
advised.
Geospatial indexes will be automatically created for all Geospatial indexes will be automatically created for all
:class:`~mongoengine.GeoPointField`\ s :class:`~mongoengine.fields.GeoPointField`\ s
It is also possible to explicitly define geospatial indexes. This is It is also possible to explicitly define geospatial indexes. This is
useful if you need to define a geospatial index on a subfield of a useful if you need to define a geospatial index on a subfield of a
:class:`~mongoengine.DictField` or a custom field that contains a :class:`~mongoengine.fields.DictField` or a custom field that contains a
point. To create a geospatial index you must prefix the field with the point. To create a geospatial index you must prefix the field with the
***** sign. :: ***** sign. ::
@ -508,6 +545,22 @@ point. To create a geospatial index you must prefix the field with the
], ],
} }
Time To Live indexes
--------------------
A special index type that allows you to automatically expire data from a
collection after a given period. See the official
`ttl <http://docs.mongodb.org/manual/tutorial/expire-data/#expire-data-from-collections-by-setting-ttl>`_
documentation for more information. A common usecase might be session data::
class Session(Document):
created = DateTimeField(default=datetime.now)
meta = {
'indexes': [
{'fields': ['created'], 'expireAfterSeconds': 3600}
]
}
Ordering Ordering
======== ========
A default ordering can be specified for your A default ordering can be specified for your

View File

@ -30,11 +30,14 @@ already exist, then any changes will be updated atomically. For example::
.. note:: .. note::
Changes to documents are tracked and on the whole perform `set` operations. Changes to documents are tracked and on the whole perform ``set`` operations.
* ``list_field.pop(0)`` - *sets* the resulting list * ``list_field.push(0)`` - *sets* the resulting list
* ``del(list_field)`` - *unsets* whole list * ``del(list_field)`` - *unsets* whole list
With lists its preferable to use ``Doc.update(push__list_field=0)`` as
this stops the whole list being updated - stopping any race conditions.
.. seealso:: .. seealso::
:ref:`guide-atomic-updates` :ref:`guide-atomic-updates`
@ -68,11 +71,12 @@ document values for example::
Cascading Saves Cascading Saves
--------------- ---------------
If your document contains :class:`~mongoengine.ReferenceField` or If your document contains :class:`~mongoengine.fields.ReferenceField` or
:class:`~mongoengine.GenericReferenceField` objects, then by default the :class:`~mongoengine.fields.GenericReferenceField` objects, then by default the
:meth:`~mongoengine.Document.save` method will automatically save any changes to :meth:`~mongoengine.Document.save` method will not save any changes to
those objects as well. If this is not desired passing :attr:`cascade` as False those objects. If you want all references to also be saved also, noting each
to the save method turns this feature off. save is a separate query, then passing :attr:`cascade` as True
to the save method will cascade any saves.
Deleting documents Deleting documents
------------------ ------------------

View File

@ -7,7 +7,7 @@ GridFS
Writing Writing
------- -------
GridFS support comes in the form of the :class:`~mongoengine.FileField` field GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field
object. This field acts as a file-like object and provides a couple of object. This field acts as a file-like object and provides a couple of
different ways of inserting and retrieving data. Arbitrary metadata such as different ways of inserting and retrieving data. Arbitrary metadata such as
content type can also be stored alongside the files. In the following example, content type can also be stored alongside the files. In the following example,
@ -27,7 +27,7 @@ a document is created to store details about animals, including a photo::
Retrieval Retrieval
--------- ---------
So using the :class:`~mongoengine.FileField` is just like using any other So using the :class:`~mongoengine.fields.FileField` is just like using any other
field. The file can also be retrieved just as easily:: field. The file can also be retrieved just as easily::
marmot = Animal.objects(genus='Marmota').first() marmot = Animal.objects(genus='Marmota').first()
@ -37,7 +37,7 @@ field. The file can also be retrieved just as easily::
Streaming Streaming
--------- ---------
Streaming data into a :class:`~mongoengine.FileField` is achieved in a Streaming data into a :class:`~mongoengine.fields.FileField` is achieved in a
slightly different manner. First, a new file must be created by calling the slightly different manner. First, a new file must be created by calling the
:func:`new_file` method. Data can then be written using :func:`write`:: :func:`new_file` method. Data can then be written using :func:`write`::

View File

@ -65,6 +65,9 @@ Available operators are as follows:
* ``size`` -- the size of the array is * ``size`` -- the size of the array is
* ``exists`` -- value for field exists * ``exists`` -- value for field exists
String queries
--------------
The following operators are available as shortcuts to querying with regular The following operators are available as shortcuts to querying with regular
expressions: expressions:
@ -78,8 +81,71 @@ expressions:
* ``iendswith`` -- string field ends with value (case insensitive) * ``iendswith`` -- string field ends with value (case insensitive)
* ``match`` -- performs an $elemMatch so you can match an entire document within an array * ``match`` -- performs an $elemMatch so you can match an entire document within an array
There are a few special operators for performing geographical queries, that
may used with :class:`~mongoengine.GeoPointField`\ s: Geo queries
-----------
There are a few special operators for performing geographical queries. The following
were added in 0.8 for: :class:`~mongoengine.fields.PointField`,
:class:`~mongoengine.fields.LineStringField` and
:class:`~mongoengine.fields.PolygonField`:
* ``geo_within`` -- Check if a geometry is within a polygon. For ease of use
it accepts either a geojson geometry or just the polygon coordinates eg::
loc.objects(point__geo_with=[[[40, 5], [40, 6], [41, 6], [40, 5]]])
loc.objects(point__geo_with={"type": "Polygon",
"coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
* ``geo_within_box`` - simplified geo_within searching with a box eg::
loc.objects(point__geo_within_box=[(-125.0, 35.0), (-100.0, 40.0)])
loc.objects(point__geo_within_box=[<bottom left coordinates>, <upper right coordinates>])
* ``geo_within_polygon`` -- simplified geo_within searching within a simple polygon eg::
loc.objects(point__geo_within_polygon=[[40, 5], [40, 6], [41, 6], [40, 5]])
loc.objects(point__geo_within_polygon=[ [ <x1> , <y1> ] ,
[ <x2> , <y2> ] ,
[ <x3> , <y3> ] ])
* ``geo_within_center`` -- simplified geo_within the flat circle radius of a point eg::
loc.objects(point__geo_within_center=[(-125.0, 35.0), 1])
loc.objects(point__geo_within_center=[ [ <x>, <y> ] , <radius> ])
* ``geo_within_sphere`` -- simplified geo_within the spherical circle radius of a point eg::
loc.objects(point__geo_within_sphere=[(-125.0, 35.0), 1])
loc.objects(point__geo_within_sphere=[ [ <x>, <y> ] , <radius> ])
* ``geo_intersects`` -- selects all locations that intersect with a geometry eg::
# Inferred from provided points lists:
loc.objects(poly__geo_intersects=[40, 6])
loc.objects(poly__geo_intersects=[[40, 5], [40, 6]])
loc.objects(poly__geo_intersects=[[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]])
# With geoJson style objects
loc.objects(poly__geo_intersects={"type": "Point", "coordinates": [40, 6]})
loc.objects(poly__geo_intersects={"type": "LineString",
"coordinates": [[40, 5], [40, 6]]})
loc.objects(poly__geo_intersects={"type": "Polygon",
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]})
* ``near`` -- Find all the locations near a given point::
loc.objects(point__near=[40, 5])
loc.objects(point__near={"type": "Point", "coordinates": [40, 5]})
You can also set the maximum distance in meters as well::
loc.objects(point__near=[40, 5], point__max_distance=1000)
The older 2D indexes are still supported with the
:class:`~mongoengine.fields.GeoPointField`:
* ``within_distance`` -- provide a list containing a point and a maximum * ``within_distance`` -- provide a list containing a point and a maximum
distance (e.g. [(41.342, -87.653), 5]) distance (e.g. [(41.342, -87.653), 5])
@ -91,7 +157,9 @@ may used with :class:`~mongoengine.GeoPointField`\ s:
[(35.0, -125.0), (40.0, -100.0)]) [(35.0, -125.0), (40.0, -100.0)])
* ``within_polygon`` -- filter documents to those within a given polygon (e.g. * ``within_polygon`` -- filter documents to those within a given polygon (e.g.
[(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]). [(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]).
.. note:: Requires Mongo Server 2.0 .. note:: Requires Mongo Server 2.0
* ``max_distance`` -- can be added to your location queries to set a maximum * ``max_distance`` -- can be added to your location queries to set a maximum
distance. distance.
@ -100,7 +168,7 @@ Querying lists
-------------- --------------
On most fields, this syntax will look up documents where the field specified On most fields, this syntax will look up documents where the field specified
matches the given value exactly, but when the field refers to a matches the given value exactly, but when the field refers to a
:class:`~mongoengine.ListField`, a single item may be provided, in which case :class:`~mongoengine.fields.ListField`, a single item may be provided, in which case
lists that contain that item will be matched:: lists that contain that item will be matched::
class Page(Document): class Page(Document):
@ -319,7 +387,7 @@ Retrieving a subset of fields
Sometimes a subset of fields on a :class:`~mongoengine.Document` is required, Sometimes a subset of fields on a :class:`~mongoengine.Document` is required,
and for efficiency only these should be retrieved from the database. This issue and for efficiency only these should be retrieved from the database. This issue
is especially important for MongoDB, as fields may often be extremely large is especially important for MongoDB, as fields may often be extremely large
(e.g. a :class:`~mongoengine.ListField` of (e.g. a :class:`~mongoengine.fields.ListField` of
:class:`~mongoengine.EmbeddedDocument`\ s, which represent the comments on a :class:`~mongoengine.EmbeddedDocument`\ s, which represent the comments on a
blog post. To select only a subset of fields, use blog post. To select only a subset of fields, use
:meth:`~mongoengine.queryset.QuerySet.only`, specifying the fields you want to :meth:`~mongoengine.queryset.QuerySet.only`, specifying the fields you want to
@ -351,14 +419,14 @@ If you later need the missing fields, just call
Getting related data Getting related data
-------------------- --------------------
When iterating the results of :class:`~mongoengine.ListField` or When iterating the results of :class:`~mongoengine.fields.ListField` or
:class:`~mongoengine.DictField` we automatically dereference any :class:`~mongoengine.fields.DictField` we automatically dereference any
:class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the :class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the
number the queries to mongo. number the queries to mongo.
There are times when that efficiency is not enough, documents that have There are times when that efficiency is not enough, documents that have
:class:`~mongoengine.ReferenceField` objects or :class:`~mongoengine.fields.ReferenceField` objects or
:class:`~mongoengine.GenericReferenceField` objects at the top level are :class:`~mongoengine.fields.GenericReferenceField` objects at the top level are
expensive as the number of queries to MongoDB can quickly rise. expensive as the number of queries to MongoDB can quickly rise.
To limit the number of queries use To limit the number of queries use
@ -541,7 +609,7 @@ Javascript code. When accessing a field on a collection object, use
square-bracket notation, and prefix the MongoEngine field name with a tilde. square-bracket notation, and prefix the MongoEngine field name with a tilde.
The field name that follows the tilde will be translated to the name used in The field name that follows the tilde will be translated to the name used in
the database. Note that when referring to fields on embedded documents, the database. Note that when referring to fields on embedded documents,
the name of the :class:`~mongoengine.EmbeddedDocumentField`, followed by a dot, the name of the :class:`~mongoengine.fields.EmbeddedDocumentField`, followed by a dot,
should be used before the name of the field on the embedded document. The should be used before the name of the field on the embedded document. The
following example shows how the substitutions are made:: following example shows how the substitutions are made::

View File

@ -55,15 +55,25 @@ See the :doc:`changelog` for a full list of changes to MongoEngine and
.. note:: Always read and test the `upgrade <upgrade>`_ documentation before .. note:: Always read and test the `upgrade <upgrade>`_ documentation before
putting updates live in production **;)** putting updates live in production **;)**
Offline Reading
---------------
Download the docs in `pdf <https://media.readthedocs.org/pdf/mongoengine-odm/latest/mongoengine-odm.pdf>`_
or `epub <https://media.readthedocs.org/epub/mongoengine-odm/latest/mongoengine-odm.epub>`_
formats for offline reading.
.. toctree:: .. toctree::
:maxdepth: 1
:numbered:
:hidden: :hidden:
tutorial tutorial
guide/index guide/index
apireference apireference
django
changelog changelog
upgrade upgrade
django
Indices and tables Indices and tables
------------------ ------------------

View File

@ -115,7 +115,7 @@ by setting :attr:`allow_inheritance` to True in the :attr:`meta`::
link_url = StringField() link_url = StringField()
We are storing a reference to the author of the posts using a We are storing a reference to the author of the posts using a
:class:`~mongoengine.ReferenceField` object. These are similar to foreign key :class:`~mongoengine.fields.ReferenceField` object. These are similar to foreign key
fields in traditional ORMs, and are automatically translated into references fields in traditional ORMs, and are automatically translated into references
when they are saved, and dereferenced when they are loaded. when they are saved, and dereferenced when they are loaded.
@ -137,7 +137,7 @@ size of our database. So let's take a look that the code our modified
author = ReferenceField(User) author = ReferenceField(User)
tags = ListField(StringField(max_length=30)) tags = ListField(StringField(max_length=30))
The :class:`~mongoengine.ListField` object that is used to define a Post's tags The :class:`~mongoengine.fields.ListField` object that is used to define a Post's tags
takes a field object as its first argument --- this means that you can have takes a field object as its first argument --- this means that you can have
lists of any type of field (including lists). lists of any type of field (including lists).
@ -174,7 +174,7 @@ We can then store a list of comment documents in our post document::
Handling deletions of references Handling deletions of references
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The :class:`~mongoengine.ReferenceField` object takes a keyword The :class:`~mongoengine.fields.ReferenceField` object takes a keyword
`reverse_delete_rule` for handling deletion rules if the reference is deleted. `reverse_delete_rule` for handling deletion rules if the reference is deleted.
To delete all the posts if a user is deleted set the rule:: To delete all the posts if a user is deleted set the rule::
@ -184,7 +184,7 @@ To delete all the posts if a user is deleted set the rule::
tags = ListField(StringField(max_length=30)) tags = ListField(StringField(max_length=30))
comments = ListField(EmbeddedDocumentField(Comment)) comments = ListField(EmbeddedDocumentField(Comment))
See :class:`~mongoengine.ReferenceField` for more information. See :class:`~mongoengine.fields.ReferenceField` for more information.
.. note:: .. note::
MapFields and DictFields currently don't support automatic handling of MapFields and DictFields currently don't support automatic handling of

View File

@ -15,10 +15,10 @@ possible for the whole of the release.
live. There maybe multiple manual steps in migrating and these are best honed live. There maybe multiple manual steps in migrating and these are best honed
on a staging / test system. on a staging / test system.
Python Python and PyMongo
======= ==================
Support for python 2.5 has been dropped. MongoEngine requires python 2.6 (or above) and pymongo 2.5 (or above)
Data Model Data Model
========== ==========
@ -120,6 +120,9 @@ eg::
p._mark_as_dirty('friends') p._mark_as_dirty('friends')
p.save() p.save()
`An example test migration for ReferenceFields is available on github
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/refrencefield_dbref_to_object_id.py>`_.
UUIDField UUIDField
--------- ---------
@ -145,6 +148,9 @@ eg::
a._mark_as_dirty('uuid') a._mark_as_dirty('uuid')
a.save() a.save()
`An example test migration for UUIDFields is available on github
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/uuidfield_to_binary.py>`_.
DecimalField DecimalField
------------ ------------
@ -172,7 +178,10 @@ eg::
p.save() p.save()
.. note:: DecimalField's have also been improved with the addition of precision .. note:: DecimalField's have also been improved with the addition of precision
and rounding. See :class:`~mongoengine.DecimalField` for more information. and rounding. See :class:`~mongoengine.fields.DecimalField` for more information.
`An example test migration for DecimalFields is available on github
<https://github.com/MongoEngine/mongoengine/blob/master/tests/migration/decimalfield_as_float.py>`_.
Cascading Saves Cascading Saves
--------------- ---------------
@ -187,6 +196,19 @@ you will have to explicitly tell it to cascade on save::
# Or on save: # Or on save:
my_document.save(cascade=True) my_document.save(cascade=True)
Storage
-------
Document and Embedded Documents are now serialized based on declared field order.
Previously, the data was passed to mongodb as a dictionary and which meant that
order wasn't guaranteed - so things like ``$addToSet`` operations on
:class:`~mongoengine.EmbeddedDocument` could potentially fail in unexpected
ways.
If this impacts you, you may want to rewrite the objects using the
``doc.mark_as_dirty('field')`` pattern described above. If you are using a
compound primary key then you will need to ensure the order is fixed and match
your EmbeddedDocument to that order.
Querysets Querysets
========= =========
@ -213,12 +235,15 @@ update your code like so: ::
mammals = Animal.objects(type="mammal").filter(order="Carnivora") # The final queryset is assgined to mammals mammals = Animal.objects(type="mammal").filter(order="Carnivora") # The final queryset is assgined to mammals
[m for m in mammals] # This will return all carnivores [m for m in mammals] # This will return all carnivores
No more len Len iterates the queryset
----------- --------------------------
If you ever did len(queryset) it previously did a count() under the covers, this If you ever did `len(queryset)` it previously did a `count()` under the covers,
caused some unusual issues - so now it has been removed in favour of the this caused some unusual issues. As `len(queryset)` is most often used by
explicit `queryset.count()` to update:: `list(queryset)` we now cache the queryset results and use that for the length.
This isn't as performant as a `count()` and if you aren't iterating the
queryset you should upgrade to use count::
# Old code # Old code
len(Animal.objects(type="mammal")) len(Animal.objects(type="mammal"))

View File

@ -15,7 +15,7 @@ import django
__all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + __all__ = (list(document.__all__) + fields.__all__ + connection.__all__ +
list(queryset.__all__) + signals.__all__ + list(errors.__all__)) list(queryset.__all__) + signals.__all__ + list(errors.__all__))
VERSION = (0, 8, 0, '+') VERSION = (0, 8, 0, 'RC4')
def get_version(): def get_version():

View File

@ -3,3 +3,6 @@ from mongoengine.base.datastructures import *
from mongoengine.base.document import * from mongoengine.base.document import *
from mongoengine.base.fields import * from mongoengine.base.fields import *
from mongoengine.base.metaclasses import * from mongoengine.base.metaclasses import *
# Help with backwards compatibility
from mongoengine.errors import *

View File

@ -6,6 +6,7 @@ from functools import partial
import pymongo import pymongo
from bson import json_util from bson import json_util
from bson.dbref import DBRef from bson.dbref import DBRef
from bson.son import SON
from mongoengine import signals from mongoengine import signals
from mongoengine.common import _import_class from mongoengine.common import _import_class
@ -228,11 +229,16 @@ class BaseDocument(object):
pass pass
def to_mongo(self): def to_mongo(self):
"""Return data dictionary ready for use with MongoDB. """Return as SON data ready for use with MongoDB.
""" """
data = {} data = SON()
for field_name, field in self._fields.iteritems(): data["_id"] = None
data['_cls'] = self._class_name
for field_name in self:
value = self._data.get(field_name, None) value = self._data.get(field_name, None)
field = self._fields.get(field_name)
if value is not None: if value is not None:
value = field.to_mongo(value) value = field.to_mongo(value)
@ -244,19 +250,27 @@ class BaseDocument(object):
if value is not None: if value is not None:
data[field.db_field] = value data[field.db_field] = value
# Only add _cls if allow_inheritance is True # If "_id" has not been set, then try and set it
if (hasattr(self, '_meta') and if data["_id"] is None:
self._meta.get('allow_inheritance', ALLOW_INHERITANCE) == True): data["_id"] = self._data.get("id", None)
data['_cls'] = self._class_name
if '_id' in data and data['_id'] is None: if data['_id'] is None:
del data['_id'] data.pop('_id')
# Only add _cls if allow_inheritance is True
if (not hasattr(self, '_meta') or
not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)):
data.pop('_cls')
if not self._dynamic: if not self._dynamic:
return data return data
for name, field in self._dynamic_fields.items(): # Sort dynamic fields by key
dynamic_fields = sorted(self._dynamic_fields.iteritems(),
key=operator.itemgetter(0))
for name, field in dynamic_fields:
data[name] = field.to_mongo(self._data.get(name, None)) data[name] = field.to_mongo(self._data.get(name, None))
return data return data
def validate(self, clean=True): def validate(self, clean=True):
@ -648,6 +662,7 @@ class BaseDocument(object):
if include_cls and direction is not pymongo.GEO2D: if include_cls and direction is not pymongo.GEO2D:
index_list.insert(0, ('_cls', 1)) index_list.insert(0, ('_cls', 1))
if index_list:
spec['fields'] = index_list spec['fields'] = index_list
if spec.get('sparse', False) and len(spec['fields']) > 1: if spec.get('sparse', False) and len(spec['fields']) > 1:
raise ValueError( raise ValueError(
@ -704,26 +719,31 @@ class BaseDocument(object):
return unique_indexes return unique_indexes
@classmethod @classmethod
def _geo_indices(cls, inspected=None): def _geo_indices(cls, inspected=None, parent_field=None):
inspected = inspected or [] inspected = inspected or []
geo_indices = [] geo_indices = []
inspected.append(cls) inspected.append(cls)
EmbeddedDocumentField = _import_class("EmbeddedDocumentField") geo_field_type_names = ["EmbeddedDocumentField", "GeoPointField",
GeoPointField = _import_class("GeoPointField") "PointField", "LineStringField", "PolygonField"]
geo_field_types = tuple([_import_class(field) for field in geo_field_type_names])
for field in cls._fields.values(): for field in cls._fields.values():
if not isinstance(field, (EmbeddedDocumentField, GeoPointField)): if not isinstance(field, geo_field_types):
continue continue
if hasattr(field, 'document_type'): if hasattr(field, 'document_type'):
field_cls = field.document_type field_cls = field.document_type
if field_cls in inspected: if field_cls in inspected:
continue continue
if hasattr(field_cls, '_geo_indices'): if hasattr(field_cls, '_geo_indices'):
geo_indices += field_cls._geo_indices(inspected) geo_indices += field_cls._geo_indices(inspected, parent_field=field.db_field)
elif field._geo_index: elif field._geo_index:
field_name = field.db_field
if parent_field:
field_name = "%s.%s" % (parent_field, field_name)
geo_indices.append({'fields': geo_indices.append({'fields':
[(field.db_field, pymongo.GEO2D)]}) [(field_name, field._geo_index)]})
return geo_indices return geo_indices
@classmethod @classmethod

View File

@ -2,7 +2,8 @@ import operator
import warnings import warnings
import weakref import weakref
from bson import DBRef, ObjectId from bson import DBRef, ObjectId, SON
import pymongo
from mongoengine.common import _import_class from mongoengine.common import _import_class
from mongoengine.errors import ValidationError from mongoengine.errors import ValidationError
@ -10,7 +11,7 @@ from mongoengine.errors import ValidationError
from mongoengine.base.common import ALLOW_INHERITANCE from mongoengine.base.common import ALLOW_INHERITANCE
from mongoengine.base.datastructures import BaseDict, BaseList from mongoengine.base.datastructures import BaseDict, BaseList
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField") __all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField")
class BaseField(object): class BaseField(object):
@ -81,13 +82,16 @@ class BaseField(object):
def __set__(self, instance, value): def __set__(self, instance, value):
"""Descriptor for assigning a value to a field in a document. """Descriptor for assigning a value to a field in a document.
""" """
changed = False if instance._initialised:
try:
if (self.name not in instance._data or if (self.name not in instance._data or
instance._data[self.name] != value): instance._data[self.name] != value):
changed = True
instance._data[self.name] = value
if changed and instance._initialised:
instance._mark_as_changed(self.name) instance._mark_as_changed(self.name)
except:
# Values cant be compared eg: naive and tz datetimes
# So mark it as changed
instance._mark_as_changed(self.name)
instance._data[self.name] = value
def error(self, message="", errors=None, field_name=None): def error(self, message="", errors=None, field_name=None):
"""Raises a ValidationError. """Raises a ValidationError.
@ -295,7 +299,7 @@ class ComplexBaseField(BaseField):
meta = getattr(v, '_meta', {}) meta = getattr(v, '_meta', {})
allow_inheritance = ( allow_inheritance = (
meta.get('allow_inheritance', ALLOW_INHERITANCE) meta.get('allow_inheritance', ALLOW_INHERITANCE)
== True) is True)
if not allow_inheritance and not self.field: if not allow_inheritance and not self.field:
value_dict[k] = GenericReferenceField().to_mongo(v) value_dict[k] = GenericReferenceField().to_mongo(v)
else: else:
@ -393,3 +397,100 @@ class ObjectIdField(BaseField):
ObjectId(unicode(value)) ObjectId(unicode(value))
except: except:
self.error('Invalid Object ID') self.error('Invalid Object ID')
class GeoJsonBaseField(BaseField):
"""A geo json field storing a geojson style object.
.. versionadded:: 0.8
"""
_geo_index = pymongo.GEOSPHERE
_type = "GeoBase"
def __init__(self, auto_index=True, *args, **kwargs):
"""
:param auto_index: Automatically create a "2dsphere" index. Defaults
to `True`.
"""
self._name = "%sField" % self._type
if not auto_index:
self._geo_index = False
super(GeoJsonBaseField, self).__init__(*args, **kwargs)
def validate(self, value):
"""Validate the GeoJson object based on its type
"""
if isinstance(value, dict):
if set(value.keys()) == set(['type', 'coordinates']):
if value['type'] != self._type:
self.error('%s type must be "%s"' % (self._name, self._type))
return self.validate(value['coordinates'])
else:
self.error('%s can only accept a valid GeoJson dictionary'
' or lists of (x, y)' % self._name)
return
elif not isinstance(value, (list, tuple)):
self.error('%s can only accept lists of [x, y]' % self._name)
return
validate = getattr(self, "_validate_%s" % self._type.lower())
error = validate(value)
if error:
self.error(error)
def _validate_polygon(self, value):
if not isinstance(value, (list, tuple)):
return 'Polygons must contain list of linestrings'
# Quick and dirty validator
try:
value[0][0][0]
except:
return "Invalid Polygon must contain at least one valid linestring"
errors = []
for val in value:
error = self._validate_linestring(val, False)
if not error and val[0] != val[-1]:
error = 'LineStrings must start and end at the same point'
if error and error not in errors:
errors.append(error)
if errors:
return "Invalid Polygon:\n%s" % ", ".join(errors)
def _validate_linestring(self, value, top_level=True):
"""Validates a linestring"""
if not isinstance(value, (list, tuple)):
return 'LineStrings must contain list of coordinate pairs'
# Quick and dirty validator
try:
value[0][0]
except:
return "Invalid LineString must contain at least one valid point"
errors = []
for val in value:
error = self._validate_point(val)
if error and error not in errors:
errors.append(error)
if errors:
if top_level:
return "Invalid LineString:\n%s" % ", ".join(errors)
else:
return "%s" % ", ".join(errors)
def _validate_point(self, value):
"""Validate each set of coords"""
if not isinstance(value, (list, tuple)):
return 'Points must be a list of coordinate pairs'
elif not len(value) == 2:
return "Value (%s) must be a two-dimensional point" % repr(value)
elif (not isinstance(value[0], (float, int)) or
not isinstance(value[1], (float, int))):
return "Both values (%s) in point must be float or int" % repr(value)
def to_mongo(self, value):
if isinstance(value, dict):
return value
return SON([("type", self._type), ("coordinates", value)])

View File

@ -140,8 +140,31 @@ class DocumentMetaclass(type):
base._subclasses += (_cls,) base._subclasses += (_cls,)
base._types = base._subclasses # TODO depreciate _types base._types = base._subclasses # TODO depreciate _types
# Handle delete rules
Document, EmbeddedDocument, DictField = cls._import_classes() Document, EmbeddedDocument, DictField = cls._import_classes()
if issubclass(new_class, Document):
new_class._collection = None
# Add class to the _document_registry
_document_registry[new_class._class_name] = new_class
# In Python 2, User-defined methods objects have special read-only
# attributes 'im_func' and 'im_self' which contain the function obj
# and class instance object respectively. With Python 3 these special
# attributes have been replaced by __func__ and __self__. The Blinker
# module continues to use im_func and im_self, so the code below
# copies __func__ into im_func and __self__ into im_self for
# classmethod objects in Document derived classes.
if PY3:
for key, val in new_class.__dict__.items():
if isinstance(val, classmethod):
f = val.__get__(new_class)
if hasattr(f, '__func__') and not hasattr(f, 'im_func'):
f.__dict__.update({'im_func': getattr(f, '__func__')})
if hasattr(f, '__self__') and not hasattr(f, 'im_self'):
f.__dict__.update({'im_self': getattr(f, '__self__')})
# Handle delete rules
for field in new_class._fields.itervalues(): for field in new_class._fields.itervalues():
f = field f = field
f.owner_document = new_class f.owner_document = new_class
@ -172,28 +195,6 @@ class DocumentMetaclass(type):
"field name" % field.name) "field name" % field.name)
raise InvalidDocumentError(msg) raise InvalidDocumentError(msg)
if issubclass(new_class, Document):
new_class._collection = None
# Add class to the _document_registry
_document_registry[new_class._class_name] = new_class
# In Python 2, User-defined methods objects have special read-only
# attributes 'im_func' and 'im_self' which contain the function obj
# and class instance object respectively. With Python 3 these special
# attributes have been replaced by __func__ and __self__. The Blinker
# module continues to use im_func and im_self, so the code below
# copies __func__ into im_func and __self__ into im_self for
# classmethod objects in Document derived classes.
if PY3:
for key, val in new_class.__dict__.items():
if isinstance(val, classmethod):
f = val.__get__(new_class)
if hasattr(f, '__func__') and not hasattr(f, 'im_func'):
f.__dict__.update({'im_func': getattr(f, '__func__')})
if hasattr(f, '__self__') and not hasattr(f, 'im_self'):
f.__dict__.update({'im_self': getattr(f, '__self__')})
return new_class return new_class
def add_to_class(self, name, value): def add_to_class(self, name, value):

View File

@ -11,6 +11,7 @@ def _import_class(cls_name):
field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField', field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField',
'FileField', 'GenericReferenceField', 'FileField', 'GenericReferenceField',
'GenericEmbeddedDocumentField', 'GeoPointField', 'GenericEmbeddedDocumentField', 'GeoPointField',
'PointField', 'LineStringField', 'PolygonField',
'ReferenceField', 'StringField', 'ComplexBaseField') 'ReferenceField', 'StringField', 'ComplexBaseField')
queryset_classes = ('OperationError',) queryset_classes = ('OperationError',)
deref_classes = ('DeReference',) deref_classes = ('DeReference',)

View File

@ -137,11 +137,12 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
if alias not in _dbs: if alias not in _dbs:
conn = get_connection(alias) conn = get_connection(alias)
conn_settings = _connection_settings[alias] conn_settings = _connection_settings[alias]
_dbs[alias] = conn[conn_settings['name']] db = conn[conn_settings['name']]
# Authenticate if necessary # Authenticate if necessary
if conn_settings['username'] and conn_settings['password']: if conn_settings['username'] and conn_settings['password']:
_dbs[alias].authenticate(conn_settings['username'], db.authenticate(conn_settings['username'],
conn_settings['password']) conn_settings['password'])
_dbs[alias] = db
return _dbs[alias] return _dbs[alias]

View File

@ -1,8 +1,10 @@
from mongoengine.common import _import_class from mongoengine.common import _import_class
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
from mongoengine.queryset import OperationError, QuerySet from mongoengine.queryset import QuerySet
__all__ = ("switch_db", "switch_collection", "no_dereference", "query_counter")
__all__ = ("switch_db", "switch_collection", "no_dereference",
"no_sub_classes", "query_counter")
class switch_db(object): class switch_db(object):
@ -130,6 +132,36 @@ class no_dereference(object):
return self.cls return self.cls
class no_sub_classes(object):
""" no_sub_classes context manager.
Only returns instances of this class and no sub (inherited) classes::
with no_sub_classes(Group) as Group:
Group.objects.find()
"""
def __init__(self, cls):
""" Construct the no_sub_classes context manager.
:param cls: the class to turn querying sub classes on
"""
self.cls = cls
def __enter__(self):
""" change the objects default and _auto_dereference values"""
self.cls._all_subclasses = self.cls._subclasses
self.cls._subclasses = (self.cls,)
return self.cls
def __exit__(self, t, value, traceback):
""" Reset the default and _auto_dereference values"""
self.cls._subclasses = self.cls._all_subclasses
delattr(self.cls, '_all_subclasses')
return self.cls
class QuerySetNoDeRef(QuerySet): class QuerySetNoDeRef(QuerySet):
"""Special no_dereference QuerySet""" """Special no_dereference QuerySet"""
def __dereference(items, max_depth=1, instance=None, name=None): def __dereference(items, max_depth=1, instance=None, name=None):

View File

@ -1,4 +1,3 @@
from __future__ import with_statement
import warnings import warnings
import pymongo import pymongo
@ -232,7 +231,6 @@ class Document(BaseDocument):
return not updated return not updated
return created return created
upsert = self._created
update_query = {} update_query = {}
if updates: if updates:
@ -241,7 +239,7 @@ class Document(BaseDocument):
update_query["$unset"] = removals update_query["$unset"] = removals
if updates or removals: if updates or removals:
last_error = collection.update(select_dict, update_query, last_error = collection.update(select_dict, update_query,
upsert=upsert, **write_concern) upsert=True, **write_concern)
created = is_new_object(last_error) created = is_new_object(last_error)
cascade = (self._meta.get('cascade', True) cascade = (self._meta.get('cascade', True)
@ -523,7 +521,6 @@ class Document(BaseDocument):
# an extra index on _cls, as mongodb will use the existing # an extra index on _cls, as mongodb will use the existing
# index to service queries against _cls # index to service queries against _cls
cls_indexed = False cls_indexed = False
def includes_cls(fields): def includes_cls(fields):
first_field = None first_field = None
if len(fields): if len(fields):
@ -548,7 +545,7 @@ class Document(BaseDocument):
# If _cls is being used (for polymorphism), it needs an index, # If _cls is being used (for polymorphism), it needs an index,
# only if another index doesn't begin with _cls # only if another index doesn't begin with _cls
if (index_cls and not cls_indexed and if (index_cls and not cls_indexed and
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) == True): cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
collection.ensure_index('_cls', background=background, collection.ensure_index('_cls', background=background,
**index_opts) **index_opts)
@ -559,7 +556,7 @@ class DynamicDocument(Document):
way as an ordinary document but has expando style properties. Any data way as an ordinary document but has expando style properties. Any data
passed or set against the :class:`~mongoengine.DynamicDocument` that is passed or set against the :class:`~mongoengine.DynamicDocument` that is
not a field is automatically converted into a not a field is automatically converted into a
:class:`~mongoengine.DynamicField` and data can be attributed to that :class:`~mongoengine.fields.DynamicField` and data can be attributed to that
field. field.
.. note:: .. note::

View File

@ -8,13 +8,14 @@ import uuid
import warnings import warnings
from operator import itemgetter from operator import itemgetter
import pymongo
import gridfs import gridfs
from bson import Binary, DBRef, SON, ObjectId from bson import Binary, DBRef, SON, ObjectId
from mongoengine.errors import ValidationError from mongoengine.errors import ValidationError
from mongoengine.python_support import (PY3, bin_type, txt_type, from mongoengine.python_support import (PY3, bin_type, txt_type,
str_types, StringIO) str_types, StringIO)
from base import (BaseField, ComplexBaseField, ObjectIdField, from base import (BaseField, ComplexBaseField, ObjectIdField, GeoJsonBaseField,
get_document, BaseDocument) get_document, BaseDocument)
from queryset import DO_NOTHING, QuerySet from queryset import DO_NOTHING, QuerySet
from document import Document, EmbeddedDocument from document import Document, EmbeddedDocument
@ -33,9 +34,8 @@ __all__ = ['StringField', 'URLField', 'EmailField', 'IntField', 'LongField',
'SortedListField', 'DictField', 'MapField', 'ReferenceField', 'SortedListField', 'DictField', 'MapField', 'ReferenceField',
'GenericReferenceField', 'BinaryField', 'GridFSError', 'GenericReferenceField', 'BinaryField', 'GridFSError',
'GridFSProxy', 'FileField', 'ImageGridFsProxy', 'GridFSProxy', 'FileField', 'ImageGridFsProxy',
'ImproperlyConfigured', 'ImageField', 'GeoPointField', 'ImproperlyConfigured', 'ImageField', 'GeoPointField', 'PointField',
'SequenceField', 'UUIDField'] 'LineStringField', 'PolygonField', 'SequenceField', 'UUIDField']
RECURSIVE_REFERENCE_CONSTANT = 'self' RECURSIVE_REFERENCE_CONSTANT = 'self'
@ -108,8 +108,8 @@ class URLField(StringField):
_URL_REGEX = re.compile( _URL_REGEX = re.compile(
r'^(?:http|ftp)s?://' # http:// or https:// r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain... r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' #localhost... r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE) r'(?:/?|[/?]\S+)$', re.IGNORECASE)
@ -128,8 +128,7 @@ class URLField(StringField):
warnings.warn( warnings.warn(
"The URLField verify_exists argument has intractable security " "The URLField verify_exists argument has intractable security "
"and performance issues. Accordingly, it has been deprecated.", "and performance issues. Accordingly, it has been deprecated.",
DeprecationWarning DeprecationWarning)
)
try: try:
request = urllib2.Request(value) request = urllib2.Request(value)
urllib2.urlopen(request) urllib2.urlopen(request)
@ -297,8 +296,9 @@ class DecimalField(BaseField):
if value is None: if value is None:
return value return value
return decimal.Decimal(value).quantize(self.precision, # Convert to string for python 2.6 before casting to Decimal
rounding=self.rounding) value = decimal.Decimal("%s" % value)
return value.quantize(self.precision, rounding=self.rounding)
def to_mongo(self, value): def to_mongo(self, value):
if value is None: if value is None:
@ -468,7 +468,7 @@ class ComplexDateTimeField(StringField):
def __get__(self, instance, owner): def __get__(self, instance, owner):
data = super(ComplexDateTimeField, self).__get__(instance, owner) data = super(ComplexDateTimeField, self).__get__(instance, owner)
if data == None: if data is None:
return datetime.datetime.now() return datetime.datetime.now()
if isinstance(data, datetime.datetime): if isinstance(data, datetime.datetime):
return data return data
@ -781,7 +781,7 @@ class ReferenceField(BaseField):
* NULLIFY - Updates the reference to null. * NULLIFY - Updates the reference to null.
* CASCADE - Deletes the documents associated with the reference. * CASCADE - Deletes the documents associated with the reference.
* DENY - Prevent the deletion of the reference object. * DENY - Prevent the deletion of the reference object.
* PULL - Pull the reference from a :class:`~mongoengine.ListField` * PULL - Pull the reference from a :class:`~mongoengine.fields.ListField`
of references of references
Alternative syntax for registering delete rules (useful when implementing Alternative syntax for registering delete rules (useful when implementing
@ -854,7 +854,7 @@ class ReferenceField(BaseField):
return document.id return document.id
return document return document
elif not self.dbref and isinstance(document, basestring): elif not self.dbref and isinstance(document, basestring):
return document return ObjectId(document)
id_field_name = self.document_type._meta['id_field'] id_field_name = self.document_type._meta['id_field']
id_field = self.document_type._fields[id_field_name] id_field = self.document_type._fields[id_field_name]
@ -1234,8 +1234,6 @@ class ImageGridFsProxy(GridFSProxy):
Insert a image in database Insert a image in database
applying field properties (size, thumbnail_size) applying field properties (size, thumbnail_size)
""" """
if not self.instance:
import ipdb; ipdb.set_trace();
field = self.instance._fields[self.key] field = self.instance._fields[self.key]
try: try:
@ -1307,6 +1305,7 @@ class ImageGridFsProxy(GridFSProxy):
height=h, height=h,
format=format, format=format,
**kwargs) **kwargs)
@property @property
def size(self): def size(self):
""" """
@ -1385,28 +1384,6 @@ class ImageField(FileField):
**kwargs) **kwargs)
class GeoPointField(BaseField):
"""A list storing a latitude and longitude.
.. versionadded:: 0.4
"""
_geo_index = True
def validate(self, value):
"""Make sure that a geo-value is of type (x, y)
"""
if not isinstance(value, (list, tuple)):
self.error('GeoPointField can only accept tuples or lists '
'of (x, y)')
if not len(value) == 2:
self.error('Value must be a two-dimensional point')
if (not isinstance(value[0], (float, int)) and
not isinstance(value[1], (float, int))):
self.error('Both values in point must be float or int')
class SequenceField(BaseField): class SequenceField(BaseField):
"""Provides a sequental counter see: """Provides a sequental counter see:
http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers
@ -1558,3 +1535,83 @@ class UUIDField(BaseField):
value = uuid.UUID(value) value = uuid.UUID(value)
except Exception, exc: except Exception, exc:
self.error('Could not convert to UUID: %s' % exc) self.error('Could not convert to UUID: %s' % exc)
class GeoPointField(BaseField):
"""A list storing a latitude and longitude.
.. versionadded:: 0.4
"""
_geo_index = pymongo.GEO2D
def validate(self, value):
"""Make sure that a geo-value is of type (x, y)
"""
if not isinstance(value, (list, tuple)):
self.error('GeoPointField can only accept tuples or lists '
'of (x, y)')
if not len(value) == 2:
self.error("Value (%s) must be a two-dimensional point" % repr(value))
elif (not isinstance(value[0], (float, int)) or
not isinstance(value[1], (float, int))):
self.error("Both values (%s) in point must be float or int" % repr(value))
class PointField(GeoJsonBaseField):
"""A geo json field storing a latitude and longitude.
The data is represented as:
.. code-block:: js
{ "type" : "Point" ,
"coordinates" : [x, y]}
You can either pass a dict with the full information or a list
to set the value.
Requires mongodb >= 2.4
.. versionadded:: 0.8
"""
_type = "Point"
class LineStringField(GeoJsonBaseField):
"""A geo json field storing a line of latitude and longitude coordinates.
The data is represented as:
.. code-block:: js
{ "type" : "LineString" ,
"coordinates" : [[x1, y1], [x1, y1] ... [xn, yn]]}
You can either pass a dict with the full information or a list of points.
Requires mongodb >= 2.4
.. versionadded:: 0.8
"""
_type = "LineString"
class PolygonField(GeoJsonBaseField):
"""A geo json field storing a polygon of latitude and longitude coordinates.
The data is represented as:
.. code-block:: js
{ "type" : "Polygon" ,
"coordinates" : [[[x1, y1], [x1, y1] ... [xn, yn]],
[[x1, y1], [x1, y1] ... [xn, yn]]}
You can either pass a dict with the full information or a list
of LineStrings. The first LineString being the outside and the rest being
holes.
Requires mongodb >= 2.4
.. versionadded:: 0.8
"""
_type = "Polygon"

View File

@ -26,6 +26,7 @@ __all__ = ('QuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL')
# The maximum number of items to display in a QuerySet.__repr__ # The maximum number of items to display in a QuerySet.__repr__
REPR_OUTPUT_SIZE = 20 REPR_OUTPUT_SIZE = 20
ITER_CHUNK_SIZE = 100
# Delete rules # Delete rules
DO_NOTHING = 0 DO_NOTHING = 0
@ -63,16 +64,18 @@ class QuerySet(object):
self._none = False self._none = False
self._as_pymongo = False self._as_pymongo = False
self._as_pymongo_coerce = False self._as_pymongo_coerce = False
self._result_cache = []
self._has_more = True
self._len = None
# If inheritance is allowed, only return instances and instances of # If inheritance is allowed, only return instances and instances of
# subclasses of the class being used # subclasses of the class being used
if document._meta.get('allow_inheritance') == True: if document._meta.get('allow_inheritance') is True:
self._initial_query = {"_cls": {"$in": self._document._subclasses}} self._initial_query = {"_cls": {"$in": self._document._subclasses}}
self._loaded_fields = QueryFieldList(always_include=['_cls']) self._loaded_fields = QueryFieldList(always_include=['_cls'])
self._cursor_obj = None self._cursor_obj = None
self._limit = None self._limit = None
self._skip = None self._skip = None
self._slice = None
self._hint = -1 # Using -1 as None is a valid value for hint self._hint = -1 # Using -1 as None is a valid value for hint
def __call__(self, q_obj=None, class_check=True, slave_okay=False, def __call__(self, q_obj=None, class_check=True, slave_okay=False,
@ -110,13 +113,60 @@ class QuerySet(object):
queryset._class_check = class_check queryset._class_check = class_check
return queryset return queryset
def __len__(self):
"""Since __len__ is called quite frequently (for example, as part of
list(qs) we populate the result cache and cache the length.
"""
if self._len is not None:
return self._len
if self._has_more:
# populate the cache
list(self._iter_results())
self._len = len(self._result_cache)
return self._len
def __iter__(self): def __iter__(self):
"""Support iterator protocol""" """Iteration utilises a results cache which iterates the cursor
queryset = self in batches of ``ITER_CHUNK_SIZE``.
if queryset._iter:
queryset = self.clone() If ``self._has_more`` the cursor hasn't been exhausted so cache then
queryset.rewind() batch. Otherwise iterate the result_cache.
return queryset """
self._iter = True
if self._has_more:
return self._iter_results()
# iterating over the cache.
return iter(self._result_cache)
def _iter_results(self):
"""A generator for iterating over the result cache.
Also populates the cache if there are more possible results to yield.
Raises StopIteration when there are no more results"""
pos = 0
while True:
upper = len(self._result_cache)
while pos < upper:
yield self._result_cache[pos]
pos = pos + 1
if not self._has_more:
raise StopIteration
if len(self._result_cache) <= pos:
self._populate_cache()
def _populate_cache(self):
"""
Populates the result cache with ``ITER_CHUNK_SIZE`` more entries
(until the cursor is exhausted).
"""
if self._has_more:
try:
for i in xrange(ITER_CHUNK_SIZE):
self._result_cache.append(self.next())
except StopIteration:
self._has_more = False
def __getitem__(self, key): def __getitem__(self, key):
"""Support skip and limit using getitem and slicing syntax. """Support skip and limit using getitem and slicing syntax.
@ -127,8 +177,10 @@ class QuerySet(object):
if isinstance(key, slice): if isinstance(key, slice):
try: try:
queryset._cursor_obj = queryset._cursor[key] queryset._cursor_obj = queryset._cursor[key]
queryset._slice = key
queryset._skip, queryset._limit = key.start, key.stop queryset._skip, queryset._limit = key.start, key.stop
queryset._limit
if key.start and key.stop:
queryset._limit = key.stop - key.start
except IndexError, err: except IndexError, err:
# PyMongo raises an error if key.start == key.stop, catch it, # PyMongo raises an error if key.start == key.stop, catch it,
# bin it, kill it. # bin it, kill it.
@ -156,22 +208,15 @@ class QuerySet(object):
def __repr__(self): def __repr__(self):
"""Provides the string representation of the QuerySet """Provides the string representation of the QuerySet
.. versionchanged:: 0.6.13 Now doesnt modify the cursor
""" """
if self._iter: if self._iter:
return '.. queryset mid-iteration ..' return '.. queryset mid-iteration ..'
data = [] self._populate_cache()
for i in xrange(REPR_OUTPUT_SIZE + 1): data = self._result_cache[:REPR_OUTPUT_SIZE + 1]
try:
data.append(self.next())
except StopIteration:
break
if len(data) > REPR_OUTPUT_SIZE: if len(data) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..." data[-1] = "...(remaining elements truncated)..."
self.rewind()
return repr(data) return repr(data)
# Core functions # Core functions
@ -351,7 +396,12 @@ class QuerySet(object):
""" """
if self._limit == 0: if self._limit == 0:
return 0 return 0
return self._cursor.count(with_limit_and_skip=with_limit_and_skip) if with_limit_and_skip and self._len is not None:
return self._len
count = self._cursor.count(with_limit_and_skip=with_limit_and_skip)
if with_limit_and_skip:
self._len = count
return count
def delete(self, write_concern=None): def delete(self, write_concern=None):
"""Delete the documents matched by the query. """Delete the documents matched by the query.
@ -426,7 +476,7 @@ class QuerySet(object):
.. versionadded:: 0.2 .. versionadded:: 0.2
""" """
if not update: if not update and not upsert:
raise OperationError("No update parameters, would remove data") raise OperationError("No update parameters, would remove data")
if not write_concern: if not write_concern:
@ -469,7 +519,8 @@ class QuerySet(object):
.. versionadded:: 0.2 .. versionadded:: 0.2
""" """
return self.update(upsert=upsert, multi=False, write_concern=None, **update) return self.update(
upsert=upsert, multi=False, write_concern=write_concern, **update)
def with_id(self, object_id): def with_id(self, object_id):
"""Retrieve the object matching the id provided. Uses `object_id` only """Retrieve the object matching the id provided. Uses `object_id` only
@ -518,6 +569,15 @@ class QuerySet(object):
queryset._none = True queryset._none = True
return queryset return queryset
def no_sub_classes(self):
"""
Only return instances of this document and not any inherited documents
"""
if self._document._meta.get('allow_inheritance') is True:
self._initial_query = {"_cls": self._document._class_name}
return self
def clone(self): def clone(self):
"""Creates a copy of the current """Creates a copy of the current
:class:`~mongoengine.queryset.QuerySet` :class:`~mongoengine.queryset.QuerySet`
@ -536,20 +596,15 @@ class QuerySet(object):
val = getattr(self, prop) val = getattr(self, prop)
setattr(c, prop, copy.copy(val)) setattr(c, prop, copy.copy(val))
if self._slice:
c._slice = self._slice
if self._cursor_obj: if self._cursor_obj:
c._cursor_obj = self._cursor_obj.clone() c._cursor_obj = self._cursor_obj.clone()
if self._slice:
c._cursor[self._slice]
return c return c
def select_related(self, max_depth=1): def select_related(self, max_depth=1):
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects to """Handles dereferencing of :class:`~bson.dbref.DBRef` objects or
a maximum depth in order to cut down the number queries to mongodb. :class:`~bson.object_id.ObjectId` a maximum depth in order to cut down
the number queries to mongodb.
.. versionadded:: 0.5 .. versionadded:: 0.5
""" """
@ -570,7 +625,6 @@ class QuerySet(object):
else: else:
queryset._cursor.limit(n) queryset._cursor.limit(n)
queryset._limit = n queryset._limit = n
# Return self to allow chaining # Return self to allow chaining
return queryset return queryset
@ -608,6 +662,9 @@ class QuerySet(object):
:param field: the field to select distinct values from :param field: the field to select distinct values from
.. note:: This is a command and won't take ordering or limit into
account.
.. versionadded:: 0.4 .. versionadded:: 0.4
.. versionchanged:: 0.5 - Fixed handling references .. versionchanged:: 0.5 - Fixed handling references
.. versionchanged:: 0.6 - Improved db_field refrence handling .. versionchanged:: 0.6 - Improved db_field refrence handling
@ -819,8 +876,7 @@ class QuerySet(object):
def to_json(self): def to_json(self):
"""Converts a queryset to JSON""" """Converts a queryset to JSON"""
queryset = self.clone() return json_util.dumps(self.as_pymongo())
return json_util.dumps(queryset._collection_obj.find(queryset._query))
def from_json(self, json_data): def from_json(self, json_data):
"""Converts json data to unsaved objects""" """Converts json data to unsaved objects"""
@ -1062,11 +1118,11 @@ class QuerySet(object):
.. note:: .. note::
Can only do direct simple mappings and cannot map across Can only do direct simple mappings and cannot map across
:class:`~mongoengine.ReferenceField` or :class:`~mongoengine.fields.ReferenceField` or
:class:`~mongoengine.GenericReferenceField` for more complex :class:`~mongoengine.fields.GenericReferenceField` for more complex
counting a manual map reduce call would is required. counting a manual map reduce call would is required.
If the field is a :class:`~mongoengine.ListField`, the items within If the field is a :class:`~mongoengine.fields.ListField`, the items within
each list will be counted individually. each list will be counted individually.
:param field: the field to use :param field: the field to use
@ -1086,20 +1142,18 @@ class QuerySet(object):
def next(self): def next(self):
"""Wrap the result in a :class:`~mongoengine.Document` object. """Wrap the result in a :class:`~mongoengine.Document` object.
""" """
self._iter = True
try:
if self._limit == 0 or self._none: if self._limit == 0 or self._none:
raise StopIteration raise StopIteration
if self._scalar:
return self._get_scalar(self._document._from_son(
self._cursor.next()))
if self._as_pymongo:
return self._get_as_pymongo(self._cursor.next())
return self._document._from_son(self._cursor.next()) raw_doc = self._cursor.next()
except StopIteration, e: if self._as_pymongo:
self.rewind() return self._get_as_pymongo(raw_doc)
raise e
doc = self._document._from_son(raw_doc)
if self._scalar:
return self._get_scalar(doc)
return doc
def rewind(self): def rewind(self):
"""Rewind the cursor to its unevaluated state. """Rewind the cursor to its unevaluated state.
@ -1152,7 +1206,7 @@ class QuerySet(object):
self._cursor_obj.sort(order) self._cursor_obj.sort(order)
if self._limit is not None: if self._limit is not None:
self._cursor_obj.limit(self._limit - (self._skip or 0)) self._cursor_obj.limit(self._limit)
if self._skip is not None: if self._skip is not None:
self._cursor_obj.skip(self._skip) self._cursor_obj.skip(self._skip)
@ -1367,7 +1421,15 @@ class QuerySet(object):
new_data = {} new_data = {}
for key, value in data.iteritems(): for key, value in data.iteritems():
new_path = '%s.%s' % (path, key) if path else key new_path = '%s.%s' % (path, key) if path else key
if all_fields or new_path in self.__as_pymongo_fields:
if all_fields:
include_field = True
elif self._loaded_fields.value == QueryFieldList.ONLY:
include_field = new_path in self.__as_pymongo_fields
else:
include_field = new_path not in self.__as_pymongo_fields
if include_field:
new_data[key] = clean(value, path=new_path) new_data[key] = clean(value, path=new_path)
data = new_data data = new_data
elif isinstance(data, list): elif isinstance(data, list):
@ -1416,7 +1478,6 @@ class QuerySet(object):
return code return code
# Deprecated # Deprecated
def ensure_index(self, **kwargs): def ensure_index(self, **kwargs):
"""Deprecated use :func:`~Document.ensure_index`""" """Deprecated use :func:`~Document.ensure_index`"""
msg = ("Doc.objects()._ensure_index() is deprecated. " msg = ("Doc.objects()._ensure_index() is deprecated. "

View File

@ -1,5 +1,6 @@
from collections import defaultdict from collections import defaultdict
import pymongo
from bson import SON from bson import SON
from mongoengine.common import _import_class from mongoengine.common import _import_class
@ -12,7 +13,9 @@ COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
'all', 'size', 'exists', 'not') 'all', 'size', 'exists', 'not')
GEO_OPERATORS = ('within_distance', 'within_spherical_distance', GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
'within_box', 'within_polygon', 'near', 'near_sphere', 'within_box', 'within_polygon', 'near', 'near_sphere',
'max_distance') 'max_distance', 'geo_within', 'geo_within_box',
'geo_within_polygon', 'geo_within_center',
'geo_within_sphere', 'geo_intersects')
STRING_OPERATORS = ('contains', 'icontains', 'startswith', STRING_OPERATORS = ('contains', 'icontains', 'startswith',
'istartswith', 'endswith', 'iendswith', 'istartswith', 'endswith', 'iendswith',
'exact', 'iexact') 'exact', 'iexact')
@ -21,7 +24,8 @@ MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
STRING_OPERATORS + CUSTOM_OPERATORS) STRING_OPERATORS + CUSTOM_OPERATORS)
UPDATE_OPERATORS = ('set', 'unset', 'inc', 'dec', 'pop', 'push', UPDATE_OPERATORS = ('set', 'unset', 'inc', 'dec', 'pop', 'push',
'push_all', 'pull', 'pull_all', 'add_to_set') 'push_all', 'pull', 'pull_all', 'add_to_set',
'set_on_insert')
def query(_doc_cls=None, _field_operation=False, **query): def query(_doc_cls=None, _field_operation=False, **query):
@ -81,30 +85,14 @@ def query(_doc_cls=None, _field_operation=False, **query):
value = field value = field
else: else:
value = field.prepare_query_value(op, value) value = field.prepare_query_value(op, value)
elif op in ('in', 'nin', 'all', 'near'): elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict):
# 'in', 'nin' and 'all' require a list of values # 'in', 'nin' and 'all' require a list of values
value = [field.prepare_query_value(op, v) for v in value] value = [field.prepare_query_value(op, v) for v in value]
# if op and op not in COMPARISON_OPERATORS: # if op and op not in COMPARISON_OPERATORS:
if op: if op:
if op in GEO_OPERATORS: if op in GEO_OPERATORS:
if op == "within_distance": value = _geo_operator(field, op, value)
value = {'$within': {'$center': value}}
elif op == "within_spherical_distance":
value = {'$within': {'$centerSphere': value}}
elif op == "within_polygon":
value = {'$within': {'$polygon': value}}
elif op == "near":
value = {'$near': value}
elif op == "near_sphere":
value = {'$nearSphere': value}
elif op == 'within_box':
value = {'$within': {'$box': value}}
elif op == "max_distance":
value = {'$maxDistance': value}
else:
raise NotImplementedError("Geo method '%s' has not "
"been implemented" % op)
elif op in CUSTOM_OPERATORS: elif op in CUSTOM_OPERATORS:
if op == 'match': if op == 'match':
value = {"$elemMatch": value} value = {"$elemMatch": value}
@ -176,7 +164,9 @@ def update(_doc_cls=None, **update):
if value > 0: if value > 0:
value = -value value = -value
elif op == 'add_to_set': elif op == 'add_to_set':
op = op.replace('_to_set', 'ToSet') op = 'addToSet'
elif op == 'set_on_insert':
op = "setOnInsert"
match = None match = None
if parts[-1] in COMPARISON_OPERATORS: if parts[-1] in COMPARISON_OPERATORS:
@ -250,3 +240,76 @@ def update(_doc_cls=None, **update):
mongo_update[key].update(value) mongo_update[key].update(value)
return mongo_update return mongo_update
def _geo_operator(field, op, value):
"""Helper to return the query for a given geo query"""
if field._geo_index == pymongo.GEO2D:
if op == "within_distance":
value = {'$within': {'$center': value}}
elif op == "within_spherical_distance":
value = {'$within': {'$centerSphere': value}}
elif op == "within_polygon":
value = {'$within': {'$polygon': value}}
elif op == "near":
value = {'$near': value}
elif op == "near_sphere":
value = {'$nearSphere': value}
elif op == 'within_box':
value = {'$within': {'$box': value}}
elif op == "max_distance":
value = {'$maxDistance': value}
else:
raise NotImplementedError("Geo method '%s' has not "
"been implemented for a GeoPointField" % op)
else:
if op == "geo_within":
value = {"$geoWithin": _infer_geometry(value)}
elif op == "geo_within_box":
value = {"$geoWithin": {"$box": value}}
elif op == "geo_within_polygon":
value = {"$geoWithin": {"$polygon": value}}
elif op == "geo_within_center":
value = {"$geoWithin": {"$center": value}}
elif op == "geo_within_sphere":
value = {"$geoWithin": {"$centerSphere": value}}
elif op == "geo_intersects":
value = {"$geoIntersects": _infer_geometry(value)}
elif op == "near":
value = {'$near': _infer_geometry(value)}
elif op == "max_distance":
value = {'$maxDistance': value}
else:
raise NotImplementedError("Geo method '%s' has not "
"been implemented for a %s " % (op, field._name))
return value
def _infer_geometry(value):
"""Helper method that tries to infer the $geometry shape for a given value"""
if isinstance(value, dict):
if "$geometry" in value:
return value
elif 'coordinates' in value and 'type' in value:
return {"$geometry": value}
raise InvalidQueryError("Invalid $geometry dictionary should have "
"type and coordinates keys")
elif isinstance(value, (list, set)):
try:
value[0][0][0]
return {"$geometry": {"type": "Polygon", "coordinates": value}}
except:
pass
try:
value[0][0]
return {"$geometry": {"type": "LineString", "coordinates": value}}
except:
pass
try:
value[0]
return {"$geometry": {"type": "Point", "coordinates": value}}
except:
pass
raise InvalidQueryError("Invalid $geometry data. Can be either a dictionary "
"or (nested) lists of coordinate(s)")

View File

@ -5,7 +5,7 @@
%define srcname mongoengine %define srcname mongoengine
Name: python-%{srcname} Name: python-%{srcname}
Version: 0.7.10 Version: 0.8.0.RC4
Release: 1%{?dist} Release: 1%{?dist}
Summary: A Python Document-Object Mapper for working with MongoDB Summary: A Python Document-Object Mapper for working with MongoDB

View File

@ -51,13 +51,13 @@ CLASSIFIERS = [
extra_opts = {} extra_opts = {}
if sys.version_info[0] == 3: if sys.version_info[0] == 3:
extra_opts['use_2to3'] = True extra_opts['use_2to3'] = True
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker'] extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2']
extra_opts['packages'] = find_packages(exclude=('tests',)) extra_opts['packages'] = find_packages(exclude=('tests',))
if "test" in sys.argv or "nosetests" in sys.argv: if "test" in sys.argv or "nosetests" in sys.argv:
extra_opts['packages'].append("tests") extra_opts['packages'].append("tests")
extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]} extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]}
else: else:
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL'] extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2']
extra_opts['packages'] = find_packages(exclude=('tests',)) extra_opts['packages'] = find_packages(exclude=('tests',))
setup(name='mongoengine', setup(name='mongoengine',
@ -74,7 +74,7 @@ setup(name='mongoengine',
long_description=LONG_DESCRIPTION, long_description=LONG_DESCRIPTION,
platforms=['any'], platforms=['any'],
classifiers=CLASSIFIERS, classifiers=CLASSIFIERS,
install_requires=['pymongo'], install_requires=['pymongo>=2.5'],
test_suite='nose.collector', test_suite='nose.collector',
**extra_opts **extra_opts
) )

View File

@ -1,12 +1,11 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import with_statement
import sys import sys
sys.path[0:0] = [""] sys.path[0:0] = [""]
import unittest import unittest
from mongoengine import * from mongoengine import *
from mongoengine.queryset import NULLIFY from mongoengine.queryset import NULLIFY, PULL
from mongoengine.connection import get_db from mongoengine.connection import get_db
__all__ = ("ClassMethodsTest", ) __all__ = ("ClassMethodsTest", )
@ -86,6 +85,25 @@ class ClassMethodsTest(unittest.TestCase):
self.assertEqual(self.Person._meta['delete_rules'], self.assertEqual(self.Person._meta['delete_rules'],
{(Job, 'employee'): NULLIFY}) {(Job, 'employee'): NULLIFY})
def test_register_delete_rule_inherited(self):
class Vaccine(Document):
name = StringField(required=True)
meta = {"indexes": ["name"]}
class Animal(Document):
family = StringField(required=True)
vaccine_made = ListField(ReferenceField("Vaccine", reverse_delete_rule=PULL))
meta = {"allow_inheritance": True, "indexes": ["family"]}
class Cat(Animal):
name = StringField(required=True)
self.assertEqual(Vaccine._meta['delete_rules'][(Animal, 'vaccine_made')], PULL)
self.assertEqual(Vaccine._meta['delete_rules'][(Cat, 'vaccine_made')], PULL)
def test_collection_naming(self): def test_collection_naming(self):
"""Ensure that a collection with a specified name may be used. """Ensure that a collection with a specified name may be used.
""" """

View File

@ -31,8 +31,9 @@ class DynamicTest(unittest.TestCase):
self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James", self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James",
"age": 34}) "age": 34})
self.assertEqual(p.to_mongo().keys(), ["_cls", "name", "age"])
p.save() p.save()
self.assertEqual(p.to_mongo().keys(), ["_id", "_cls", "name", "age"])
self.assertEqual(self.Person.objects.first().age, 34) self.assertEqual(self.Person.objects.first().age, 34)

View File

@ -1,5 +1,4 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import with_statement
import unittest import unittest
import sys import sys
sys.path[0:0] = [""] sys.path[0:0] = [""]
@ -231,8 +230,7 @@ class IndexesTest(unittest.TestCase):
location = DictField() location = DictField()
class Place(Document): class Place(Document):
current = DictField( current = DictField(field=EmbeddedDocumentField('EmbeddedLocation'))
field=EmbeddedDocumentField('EmbeddedLocation'))
meta = { meta = {
'allow_inheritance': True, 'allow_inheritance': True,
'indexes': [ 'indexes': [
@ -382,8 +380,7 @@ class IndexesTest(unittest.TestCase):
self.assertEqual(sorted(info.keys()), ['_id_', 'tags.tag_1']) self.assertEqual(sorted(info.keys()), ['_id_', 'tags.tag_1'])
post1 = BlogPost(title="Embedded Indexes tests in place", post1 = BlogPost(title="Embedded Indexes tests in place",
tags=[Tag(name="about"), Tag(name="time")] tags=[Tag(name="about"), Tag(name="time")])
)
post1.save() post1.save()
BlogPost.drop_collection() BlogPost.drop_collection()
@ -400,29 +397,6 @@ class IndexesTest(unittest.TestCase):
info = RecursiveDocument._get_collection().index_information() info = RecursiveDocument._get_collection().index_information()
self.assertEqual(sorted(info.keys()), ['_cls_1', '_id_']) self.assertEqual(sorted(info.keys()), ['_cls_1', '_id_'])
def test_geo_indexes_recursion(self):
class Location(Document):
name = StringField()
location = GeoPointField()
class Parent(Document):
name = StringField()
location = ReferenceField(Location, dbref=False)
Location.drop_collection()
Parent.drop_collection()
list(Parent.objects)
collection = Parent._get_collection()
info = collection.index_information()
self.assertFalse('location_2d' in info)
self.assertEqual(len(Parent._geo_indices()), 0)
self.assertEqual(len(Location._geo_indices()), 1)
def test_covered_index(self): def test_covered_index(self):
"""Ensure that covered indexes can be used """Ensure that covered indexes can be used
""" """

View File

@ -168,6 +168,26 @@ class InheritanceTest(unittest.TestCase):
self.assertEqual(Employee._get_collection_name(), self.assertEqual(Employee._get_collection_name(),
Person._get_collection_name()) Person._get_collection_name())
def test_inheritance_to_mongo_keys(self):
"""Ensure that document may inherit fields from a superclass document.
"""
class Person(Document):
name = StringField()
age = IntField()
meta = {'allow_inheritance': True}
class Employee(Person):
salary = IntField()
self.assertEqual(['age', 'id', 'name', 'salary'],
sorted(Employee._fields.keys()))
self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(),
['_cls', 'name', 'age'])
self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(),
['_cls', 'name', 'age', 'salary'])
self.assertEqual(Employee._get_collection_name(),
Person._get_collection_name())
def test_polymorphic_queries(self): def test_polymorphic_queries(self):
"""Ensure that the correct subclasses are returned from a query """Ensure that the correct subclasses are returned from a query
@ -197,7 +217,6 @@ class InheritanceTest(unittest.TestCase):
classes = [obj.__class__ for obj in Human.objects] classes = [obj.__class__ for obj in Human.objects]
self.assertEqual(classes, [Human]) self.assertEqual(classes, [Human])
def test_allow_inheritance(self): def test_allow_inheritance(self):
"""Ensure that inheritance may be disabled on simple classes and that """Ensure that inheritance may be disabled on simple classes and that
_cls and _subclasses will not be used. _cls and _subclasses will not be used.
@ -213,8 +232,8 @@ class InheritanceTest(unittest.TestCase):
self.assertRaises(ValueError, create_dog_class) self.assertRaises(ValueError, create_dog_class)
# Check that _cls etc aren't present on simple documents # Check that _cls etc aren't present on simple documents
dog = Animal(name='dog') dog = Animal(name='dog').save()
dog.save() self.assertEqual(dog.to_mongo().keys(), ['_id', 'name'])
collection = self.db[Animal._get_collection_name()] collection = self.db[Animal._get_collection_name()]
obj = collection.find_one() obj = collection.find_one()

View File

@ -1,5 +1,4 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import with_statement
import sys import sys
sys.path[0:0] = [""] sys.path[0:0] = [""]
@ -320,8 +319,8 @@ class InstanceTest(unittest.TestCase):
Location.drop_collection() Location.drop_collection()
self.assertEquals(Area, get_document("Area")) self.assertEqual(Area, get_document("Area"))
self.assertEquals(Area, get_document("Location.Area")) self.assertEqual(Area, get_document("Location.Area"))
def test_creation(self): def test_creation(self):
"""Ensure that document may be created using keyword arguments. """Ensure that document may be created using keyword arguments.
@ -428,6 +427,21 @@ class InstanceTest(unittest.TestCase):
self.assertFalse('age' in person) self.assertFalse('age' in person)
self.assertFalse('nationality' in person) self.assertFalse('nationality' in person)
def test_embedded_document_to_mongo(self):
class Person(EmbeddedDocument):
name = StringField()
age = IntField()
meta = {"allow_inheritance": True}
class Employee(Person):
salary = IntField()
self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(),
['_cls', 'name', 'age'])
self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(),
['_cls', 'name', 'age', 'salary'])
def test_embedded_document(self): def test_embedded_document(self):
"""Ensure that embedded documents are set up correctly. """Ensure that embedded documents are set up correctly.
""" """
@ -494,12 +508,12 @@ class InstanceTest(unittest.TestCase):
t = TestDocument(status="published") t = TestDocument(status="published")
t.save(clean=False) t.save(clean=False)
self.assertEquals(t.pub_date, None) self.assertEqual(t.pub_date, None)
t = TestDocument(status="published") t = TestDocument(status="published")
t.save(clean=True) t.save(clean=True)
self.assertEquals(type(t.pub_date), datetime) self.assertEqual(type(t.pub_date), datetime)
def test_document_embedded_clean(self): def test_document_embedded_clean(self):
class TestEmbeddedDocument(EmbeddedDocument): class TestEmbeddedDocument(EmbeddedDocument):
@ -531,7 +545,7 @@ class InstanceTest(unittest.TestCase):
self.assertEqual(e.to_dict(), {'doc': {'__all__': expect_msg}}) self.assertEqual(e.to_dict(), {'doc': {'__all__': expect_msg}})
t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25)).save() t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25)).save()
self.assertEquals(t.doc.z, 35) self.assertEqual(t.doc.z, 35)
# Asserts not raises # Asserts not raises
t = TestDocument(doc=TestEmbeddedDocument(x=15, y=35, z=5)) t = TestDocument(doc=TestEmbeddedDocument(x=15, y=35, z=5))
@ -838,6 +852,14 @@ class InstanceTest(unittest.TestCase):
self.assertEqual(person.name, None) self.assertEqual(person.name, None)
self.assertEqual(person.age, None) self.assertEqual(person.age, None)
def test_inserts_if_you_set_the_pk(self):
p1 = self.Person(name='p1', id=bson.ObjectId()).save()
p2 = self.Person(name='p2')
p2.id = bson.ObjectId()
p2.save()
self.assertEqual(2, self.Person.objects.count())
def test_can_save_if_not_included(self): def test_can_save_if_not_included(self):
class EmbeddedDoc(EmbeddedDocument): class EmbeddedDoc(EmbeddedDocument):
@ -1881,10 +1903,10 @@ class InstanceTest(unittest.TestCase):
A.objects.all() A.objects.all()
self.assertEquals('testdb-2', B._meta.get('db_alias')) self.assertEqual('testdb-2', B._meta.get('db_alias'))
self.assertEquals('mongoenginetest', self.assertEqual('mongoenginetest',
A._get_collection().database.name) A._get_collection().database.name)
self.assertEquals('mongoenginetest2', self.assertEqual('mongoenginetest2',
B._get_collection().database.name) B._get_collection().database.name)
def test_db_alias_propagates(self): def test_db_alias_propagates(self):

View File

@ -1,2 +1,3 @@
from fields import * from fields import *
from file_tests import * from file_tests import *
from geo import *

View File

@ -1,5 +1,4 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import with_statement
import sys import sys
sys.path[0:0] = [""] sys.path[0:0] = [""]
@ -409,6 +408,27 @@ class FieldTest(unittest.TestCase):
log.time = '1pm' log.time = '1pm'
self.assertRaises(ValidationError, log.validate) self.assertRaises(ValidationError, log.validate)
def test_datetime_tz_aware_mark_as_changed(self):
from mongoengine import connection
# Reset the connections
connection._connection_settings = {}
connection._connections = {}
connection._dbs = {}
connect(db='mongoenginetest', tz_aware=True)
class LogEntry(Document):
time = DateTimeField()
LogEntry.drop_collection()
LogEntry(time=datetime.datetime(2013, 1, 1, 0, 0, 0)).save()
log = LogEntry.objects.first()
log.time = datetime.datetime(2013, 1, 1, 0, 0, 0)
self.assertEqual(['time'], log._changed_fields)
def test_datetime(self): def test_datetime(self):
"""Tests showing pymongo datetime fields handling of microseconds. """Tests showing pymongo datetime fields handling of microseconds.
Microseconds are rounded to the nearest millisecond and pre UTC Microseconds are rounded to the nearest millisecond and pre UTC
@ -1841,45 +1861,6 @@ class FieldTest(unittest.TestCase):
Shirt.drop_collection() Shirt.drop_collection()
def test_geo_indexes(self):
"""Ensure that indexes are created automatically for GeoPointFields.
"""
class Event(Document):
title = StringField()
location = GeoPointField()
Event.drop_collection()
event = Event(title="Coltrane Motion @ Double Door",
location=[41.909889, -87.677137])
event.save()
info = Event.objects._collection.index_information()
self.assertTrue(u'location_2d' in info)
self.assertTrue(info[u'location_2d']['key'] == [(u'location', u'2d')])
Event.drop_collection()
def test_geo_embedded_indexes(self):
"""Ensure that indexes are created automatically for GeoPointFields on
embedded documents.
"""
class Venue(EmbeddedDocument):
location = GeoPointField()
name = StringField()
class Event(Document):
title = StringField()
venue = EmbeddedDocumentField(Venue)
Event.drop_collection()
venue = Venue(name="Double Door", location=[41.909889, -87.677137])
event = Event(title="Coltrane Motion", venue=venue)
event.save()
info = Event.objects._collection.index_information()
self.assertTrue(u'location_2d' in info)
self.assertTrue(info[u'location_2d']['key'] == [(u'location', u'2d')])
def test_ensure_unique_default_instances(self): def test_ensure_unique_default_instances(self):
"""Ensure that every field has it's own unique default instance.""" """Ensure that every field has it's own unique default instance."""
class D(Document): class D(Document):

View File

@ -1,5 +1,4 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import with_statement
import sys import sys
sys.path[0:0] = [""] sys.path[0:0] = [""]

274
tests/fields/geo.py Normal file
View File

@ -0,0 +1,274 @@
# -*- coding: utf-8 -*-
import sys
sys.path[0:0] = [""]
import unittest
from mongoengine import *
from mongoengine.connection import get_db
__all__ = ("GeoFieldTest", )
class GeoFieldTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
self.db = get_db()
def _test_for_expected_error(self, Cls, loc, expected):
try:
Cls(loc=loc).validate()
self.fail()
except ValidationError, e:
self.assertEqual(expected, e.to_dict()['loc'])
def test_geopoint_validation(self):
class Location(Document):
loc = GeoPointField()
invalid_coords = [{"x": 1, "y": 2}, 5, "a"]
expected = 'GeoPointField can only accept tuples or lists of (x, y)'
for coord in invalid_coords:
self._test_for_expected_error(Location, coord, expected)
invalid_coords = [[], [1], [1, 2, 3]]
for coord in invalid_coords:
expected = "Value (%s) must be a two-dimensional point" % repr(coord)
self._test_for_expected_error(Location, coord, expected)
invalid_coords = [[{}, {}], ("a", "b")]
for coord in invalid_coords:
expected = "Both values (%s) in point must be float or int" % repr(coord)
self._test_for_expected_error(Location, coord, expected)
def test_point_validation(self):
class Location(Document):
loc = PointField()
invalid_coords = {"x": 1, "y": 2}
expected = 'PointField can only accept a valid GeoJson dictionary or lists of (x, y)'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MadeUp", "coordinates": []}
expected = 'PointField type must be "Point"'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "Point", "coordinates": [1, 2, 3]}
expected = "Value ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [5, "a"]
expected = "PointField can only accept lists of [x, y]"
for coord in invalid_coords:
self._test_for_expected_error(Location, coord, expected)
invalid_coords = [[], [1], [1, 2, 3]]
for coord in invalid_coords:
expected = "Value (%s) must be a two-dimensional point" % repr(coord)
self._test_for_expected_error(Location, coord, expected)
invalid_coords = [[{}, {}], ("a", "b")]
for coord in invalid_coords:
expected = "Both values (%s) in point must be float or int" % repr(coord)
self._test_for_expected_error(Location, coord, expected)
Location(loc=[1, 2]).validate()
def test_linestring_validation(self):
class Location(Document):
loc = LineStringField()
invalid_coords = {"x": 1, "y": 2}
expected = 'LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
expected = 'LineStringField type must be "LineString"'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "LineString", "coordinates": [[1, 2, 3]]}
expected = "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [5, "a"]
expected = "Invalid LineString must contain at least one valid point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[1]]
expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0])
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[1, 2, 3]]
expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0])
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[{}, {}]], [("a", "b")]]
for coord in invalid_coords:
expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0])
self._test_for_expected_error(Location, coord, expected)
Location(loc=[[1, 2], [3, 4], [5, 6], [1,2]]).validate()
def test_polygon_validation(self):
class Location(Document):
loc = PolygonField()
invalid_coords = {"x": 1, "y": 2}
expected = 'PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
expected = 'PolygonField type must be "Polygon"'
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = {"type": "Polygon", "coordinates": [[[1, 2, 3]]]}
expected = "Invalid Polygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[5, "a"]]]
expected = "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[]]]
expected = "Invalid Polygon must contain at least one valid linestring"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[1, 2, 3]]]
expected = "Invalid Polygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[{}, {}]], [("a", "b")]]
expected = "Invalid Polygon:\nBoth values ([{}, {}]) in point must be float or int, Both values (('a', 'b')) in point must be float or int"
self._test_for_expected_error(Location, invalid_coords, expected)
invalid_coords = [[[1, 2], [3, 4]]]
expected = "Invalid Polygon:\nLineStrings must start and end at the same point"
self._test_for_expected_error(Location, invalid_coords, expected)
Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate()
def test_indexes_geopoint(self):
"""Ensure that indexes are created automatically for GeoPointFields.
"""
class Event(Document):
title = StringField()
location = GeoPointField()
geo_indicies = Event._geo_indices()
self.assertEqual(geo_indicies, [{'fields': [('location', '2d')]}])
def test_geopoint_embedded_indexes(self):
"""Ensure that indexes are created automatically for GeoPointFields on
embedded documents.
"""
class Venue(EmbeddedDocument):
location = GeoPointField()
name = StringField()
class Event(Document):
title = StringField()
venue = EmbeddedDocumentField(Venue)
geo_indicies = Event._geo_indices()
self.assertEqual(geo_indicies, [{'fields': [('venue.location', '2d')]}])
def test_indexes_2dsphere(self):
"""Ensure that indexes are created automatically for GeoPointFields.
"""
class Event(Document):
title = StringField()
point = PointField()
line = LineStringField()
polygon = PolygonField()
geo_indicies = Event._geo_indices()
self.assertTrue({'fields': [('line', '2dsphere')]} in geo_indicies)
self.assertTrue({'fields': [('polygon', '2dsphere')]} in geo_indicies)
self.assertTrue({'fields': [('point', '2dsphere')]} in geo_indicies)
def test_indexes_2dsphere_embedded(self):
"""Ensure that indexes are created automatically for GeoPointFields.
"""
class Venue(EmbeddedDocument):
name = StringField()
point = PointField()
line = LineStringField()
polygon = PolygonField()
class Event(Document):
title = StringField()
venue = EmbeddedDocumentField(Venue)
geo_indicies = Event._geo_indices()
self.assertTrue({'fields': [('venue.line', '2dsphere')]} in geo_indicies)
self.assertTrue({'fields': [('venue.polygon', '2dsphere')]} in geo_indicies)
self.assertTrue({'fields': [('venue.point', '2dsphere')]} in geo_indicies)
def test_geo_indexes_recursion(self):
class Location(Document):
name = StringField()
location = GeoPointField()
class Parent(Document):
name = StringField()
location = ReferenceField(Location)
Location.drop_collection()
Parent.drop_collection()
list(Parent.objects)
collection = Parent._get_collection()
info = collection.index_information()
self.assertFalse('location_2d' in info)
self.assertEqual(len(Parent._geo_indices()), 0)
self.assertEqual(len(Location._geo_indices()), 1)
def test_geo_indexes_auto_index(self):
# Test just listing the fields
class Log(Document):
location = PointField(auto_index=False)
datetime = DateTimeField()
meta = {
'indexes': [[("location", "2dsphere"), ("datetime", 1)]]
}
self.assertEqual([], Log._geo_indices())
Log.drop_collection()
Log.ensure_indexes()
info = Log._get_collection().index_information()
self.assertEqual(info["location_2dsphere_datetime_1"]["key"],
[('location', '2dsphere'), ('datetime', 1)])
# Test listing explicitly
class Log(Document):
location = PointField(auto_index=False)
datetime = DateTimeField()
meta = {
'indexes': [
{'fields': [("location", "2dsphere"), ("datetime", 1)]}
]
}
self.assertEqual([], Log._geo_indices())
Log.drop_collection()
Log.ensure_indexes()
info = Log._get_collection().index_information()
self.assertEqual(info["location_2dsphere_datetime_1"]["key"],
[('location', '2dsphere'), ('datetime', 1)])
if __name__ == '__main__':
unittest.main()

View File

@ -1,5 +1,5 @@
from transform import * from transform import *
from field_list import * from field_list import *
from queryset import * from queryset import *
from visitor import * from visitor import *
from geo import *

418
tests/queryset/geo.py Normal file
View File

@ -0,0 +1,418 @@
import sys
sys.path[0:0] = [""]
import unittest
from datetime import datetime, timedelta
from mongoengine import *
__all__ = ("GeoQueriesTest",)
class GeoQueriesTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
def test_geospatial_operators(self):
"""Ensure that geospatial queries are working.
"""
class Event(Document):
title = StringField()
date = DateTimeField()
location = GeoPointField()
def __unicode__(self):
return self.title
Event.drop_collection()
event1 = Event(title="Coltrane Motion @ Double Door",
date=datetime.now() - timedelta(days=1),
location=[-87.677137, 41.909889]).save()
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
date=datetime.now() - timedelta(days=10),
location=[-122.4194155, 37.7749295]).save()
event3 = Event(title="Coltrane Motion @ Empty Bottle",
date=datetime.now(),
location=[-87.686638, 41.900474]).save()
# find all events "near" pitchfork office, chicago.
# note that "near" will show the san francisco event, too,
# although it sorts to last.
events = Event.objects(location__near=[-87.67892, 41.9120459])
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event1, event3, event2])
# find events within 5 degrees of pitchfork office, chicago
point_and_distance = [[-87.67892, 41.9120459], 5]
events = Event.objects(location__within_distance=point_and_distance)
self.assertEqual(events.count(), 2)
events = list(events)
self.assertTrue(event2 not in events)
self.assertTrue(event1 in events)
self.assertTrue(event3 in events)
# ensure ordering is respected by "near"
events = Event.objects(location__near=[-87.67892, 41.9120459])
events = events.order_by("-date")
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event3, event1, event2])
# find events within 10 degrees of san francisco
point = [-122.415579, 37.7566023]
events = Event.objects(location__near=point, location__max_distance=10)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event2)
# find events within 10 degrees of san francisco
point_and_distance = [[-122.415579, 37.7566023], 10]
events = Event.objects(location__within_distance=point_and_distance)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event2)
# find events within 1 degree of greenpoint, broolyn, nyc, ny
point_and_distance = [[-73.9509714, 40.7237134], 1]
events = Event.objects(location__within_distance=point_and_distance)
self.assertEqual(events.count(), 0)
# ensure ordering is respected by "within_distance"
point_and_distance = [[-87.67892, 41.9120459], 10]
events = Event.objects(location__within_distance=point_and_distance)
events = events.order_by("-date")
self.assertEqual(events.count(), 2)
self.assertEqual(events[0], event3)
# check that within_box works
box = [(-125.0, 35.0), (-100.0, 40.0)]
events = Event.objects(location__within_box=box)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].id, event2.id)
polygon = [
(-87.694445, 41.912114),
(-87.69084, 41.919395),
(-87.681742, 41.927186),
(-87.654276, 41.911731),
(-87.656164, 41.898061),
]
events = Event.objects(location__within_polygon=polygon)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].id, event1.id)
polygon2 = [
(-1.742249, 54.033586),
(-1.225891, 52.792797),
(-4.40094, 53.389881)
]
events = Event.objects(location__within_polygon=polygon2)
self.assertEqual(events.count(), 0)
def test_geo_spatial_embedded(self):
class Venue(EmbeddedDocument):
location = GeoPointField()
name = StringField()
class Event(Document):
title = StringField()
venue = EmbeddedDocumentField(Venue)
Event.drop_collection()
venue1 = Venue(name="The Rock", location=[-87.677137, 41.909889])
venue2 = Venue(name="The Bridge", location=[-122.4194155, 37.7749295])
event1 = Event(title="Coltrane Motion @ Double Door",
venue=venue1).save()
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
venue=venue2).save()
event3 = Event(title="Coltrane Motion @ Empty Bottle",
venue=venue1).save()
# find all events "near" pitchfork office, chicago.
# note that "near" will show the san francisco event, too,
# although it sorts to last.
events = Event.objects(venue__location__near=[-87.67892, 41.9120459])
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event1, event3, event2])
def test_spherical_geospatial_operators(self):
"""Ensure that spherical geospatial queries are working
"""
class Point(Document):
location = GeoPointField()
Point.drop_collection()
# These points are one degree apart, which (according to Google Maps)
# is about 110 km apart at this place on the Earth.
north_point = Point(location=[-122, 38]).save() # Near Concord, CA
south_point = Point(location=[-122, 37]).save() # Near Santa Cruz, CA
earth_radius = 6378.009 # in km (needs to be a float for dividing by)
# Finds both points because they are within 60 km of the reference
# point equidistant between them.
points = Point.objects(location__near_sphere=[-122, 37.5])
self.assertEqual(points.count(), 2)
# Same behavior for _within_spherical_distance
points = Point.objects(
location__within_spherical_distance=[[-122, 37.5], 60/earth_radius]
)
self.assertEqual(points.count(), 2)
points = Point.objects(location__near_sphere=[-122, 37.5],
location__max_distance=60 / earth_radius)
self.assertEqual(points.count(), 2)
# Finds both points, but orders the north point first because it's
# closer to the reference point to the north.
points = Point.objects(location__near_sphere=[-122, 38.5])
self.assertEqual(points.count(), 2)
self.assertEqual(points[0].id, north_point.id)
self.assertEqual(points[1].id, south_point.id)
# Finds both points, but orders the south point first because it's
# closer to the reference point to the south.
points = Point.objects(location__near_sphere=[-122, 36.5])
self.assertEqual(points.count(), 2)
self.assertEqual(points[0].id, south_point.id)
self.assertEqual(points[1].id, north_point.id)
# Finds only one point because only the first point is within 60km of
# the reference point to the south.
points = Point.objects(
location__within_spherical_distance=[[-122, 36.5], 60/earth_radius])
self.assertEqual(points.count(), 1)
self.assertEqual(points[0].id, south_point.id)
def test_2dsphere_point(self):
class Event(Document):
title = StringField()
date = DateTimeField()
location = PointField()
def __unicode__(self):
return self.title
Event.drop_collection()
event1 = Event(title="Coltrane Motion @ Double Door",
date=datetime.now() - timedelta(days=1),
location=[-87.677137, 41.909889])
event1.save()
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
date=datetime.now() - timedelta(days=10),
location=[-122.4194155, 37.7749295]).save()
event3 = Event(title="Coltrane Motion @ Empty Bottle",
date=datetime.now(),
location=[-87.686638, 41.900474]).save()
# find all events "near" pitchfork office, chicago.
# note that "near" will show the san francisco event, too,
# although it sorts to last.
events = Event.objects(location__near=[-87.67892, 41.9120459])
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event1, event3, event2])
# find events within 5 degrees of pitchfork office, chicago
point_and_distance = [[-87.67892, 41.9120459], 2]
events = Event.objects(location__geo_within_center=point_and_distance)
self.assertEqual(events.count(), 2)
events = list(events)
self.assertTrue(event2 not in events)
self.assertTrue(event1 in events)
self.assertTrue(event3 in events)
# ensure ordering is respected by "near"
events = Event.objects(location__near=[-87.67892, 41.9120459])
events = events.order_by("-date")
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event3, event1, event2])
# find events within 10km of san francisco
point = [-122.415579, 37.7566023]
events = Event.objects(location__near=point, location__max_distance=10000)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event2)
# find events within 1km of greenpoint, broolyn, nyc, ny
events = Event.objects(location__near=[-73.9509714, 40.7237134], location__max_distance=1000)
self.assertEqual(events.count(), 0)
# ensure ordering is respected by "near"
events = Event.objects(location__near=[-87.67892, 41.9120459],
location__max_distance=10000).order_by("-date")
self.assertEqual(events.count(), 2)
self.assertEqual(events[0], event3)
# check that within_box works
box = [(-125.0, 35.0), (-100.0, 40.0)]
events = Event.objects(location__geo_within_box=box)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].id, event2.id)
polygon = [
(-87.694445, 41.912114),
(-87.69084, 41.919395),
(-87.681742, 41.927186),
(-87.654276, 41.911731),
(-87.656164, 41.898061),
]
events = Event.objects(location__geo_within_polygon=polygon)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].id, event1.id)
polygon2 = [
(-1.742249, 54.033586),
(-1.225891, 52.792797),
(-4.40094, 53.389881)
]
events = Event.objects(location__geo_within_polygon=polygon2)
self.assertEqual(events.count(), 0)
def test_2dsphere_point_embedded(self):
class Venue(EmbeddedDocument):
location = GeoPointField()
name = StringField()
class Event(Document):
title = StringField()
venue = EmbeddedDocumentField(Venue)
Event.drop_collection()
venue1 = Venue(name="The Rock", location=[-87.677137, 41.909889])
venue2 = Venue(name="The Bridge", location=[-122.4194155, 37.7749295])
event1 = Event(title="Coltrane Motion @ Double Door",
venue=venue1).save()
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
venue=venue2).save()
event3 = Event(title="Coltrane Motion @ Empty Bottle",
venue=venue1).save()
# find all events "near" pitchfork office, chicago.
# note that "near" will show the san francisco event, too,
# although it sorts to last.
events = Event.objects(venue__location__near=[-87.67892, 41.9120459])
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event1, event3, event2])
def test_linestring(self):
class Road(Document):
name = StringField()
line = LineStringField()
Road.drop_collection()
Road(name="66", line=[[40, 5], [41, 6]]).save()
# near
point = {"type": "Point", "coordinates": [40, 5]}
roads = Road.objects.filter(line__near=point["coordinates"]).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(line__near=point).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(line__near={"$geometry": point}).count()
self.assertEqual(1, roads)
# Within
polygon = {"type": "Polygon",
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
roads = Road.objects.filter(line__geo_within=polygon["coordinates"]).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(line__geo_within=polygon).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(line__geo_within={"$geometry": polygon}).count()
self.assertEqual(1, roads)
# Intersects
line = {"type": "LineString",
"coordinates": [[40, 5], [40, 6]]}
roads = Road.objects.filter(line__geo_intersects=line["coordinates"]).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(line__geo_intersects=line).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(line__geo_intersects={"$geometry": line}).count()
self.assertEqual(1, roads)
polygon = {"type": "Polygon",
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
roads = Road.objects.filter(line__geo_intersects=polygon["coordinates"]).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(line__geo_intersects=polygon).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(line__geo_intersects={"$geometry": polygon}).count()
self.assertEqual(1, roads)
def test_polygon(self):
class Road(Document):
name = StringField()
poly = PolygonField()
Road.drop_collection()
Road(name="66", poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]).save()
# near
point = {"type": "Point", "coordinates": [40, 5]}
roads = Road.objects.filter(poly__near=point["coordinates"]).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(poly__near=point).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(poly__near={"$geometry": point}).count()
self.assertEqual(1, roads)
# Within
polygon = {"type": "Polygon",
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
roads = Road.objects.filter(poly__geo_within=polygon["coordinates"]).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(poly__geo_within=polygon).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(poly__geo_within={"$geometry": polygon}).count()
self.assertEqual(1, roads)
# Intersects
line = {"type": "LineString",
"coordinates": [[40, 5], [41, 6]]}
roads = Road.objects.filter(poly__geo_intersects=line["coordinates"]).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(poly__geo_intersects=line).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(poly__geo_intersects={"$geometry": line}).count()
self.assertEqual(1, roads)
polygon = {"type": "Polygon",
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
roads = Road.objects.filter(poly__geo_intersects=polygon["coordinates"]).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(poly__geo_intersects=polygon).count()
self.assertEqual(1, roads)
roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count()
self.assertEqual(1, roads)
if __name__ == '__main__':
unittest.main()

View File

@ -1,4 +1,3 @@
from __future__ import with_statement
import sys import sys
sys.path[0:0] = [""] sys.path[0:0] = [""]
@ -116,6 +115,15 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(len(people), 1) self.assertEqual(len(people), 1)
self.assertEqual(people[0].name, 'User B') self.assertEqual(people[0].name, 'User B')
# Test slice limit and skip cursor reset
qs = self.Person.objects[1:2]
# fetch then delete the cursor
qs._cursor
qs._cursor_obj = None
people = list(qs)
self.assertEqual(len(people), 1)
self.assertEqual(people[0].name, 'User B')
people = list(self.Person.objects[1:1]) people = list(self.Person.objects[1:1])
self.assertEqual(len(people), 0) self.assertEqual(len(people), 0)
@ -274,7 +282,7 @@ class QuerySetTest(unittest.TestCase):
a_objects = A.objects(s='test1') a_objects = A.objects(s='test1')
query = B.objects(ref__in=a_objects) query = B.objects(ref__in=a_objects)
query = query.filter(boolfield=True) query = query.filter(boolfield=True)
self.assertEquals(query.count(), 1) self.assertEqual(query.count(), 1)
def test_update_write_concern(self): def test_update_write_concern(self):
"""Test that passing write_concern works""" """Test that passing write_concern works"""
@ -287,15 +295,19 @@ class QuerySetTest(unittest.TestCase):
name='Test User', write_concern=write_concern) name='Test User', write_concern=write_concern)
author.save(write_concern=write_concern) author.save(write_concern=write_concern)
self.Person.objects.update(set__name='Ross', result = self.Person.objects.update(
write_concern=write_concern) set__name='Ross', write_concern={"w": 1})
self.assertEqual(result, 1)
result = self.Person.objects.update(
set__name='Ross', write_concern={"w": 0})
self.assertEqual(result, None)
author = self.Person.objects.first() result = self.Person.objects.update_one(
self.assertEqual(author.name, 'Ross') set__name='Test User', write_concern={"w": 1})
self.assertEqual(result, 1)
self.Person.objects.update_one(set__name='Test User', write_concern=write_concern) result = self.Person.objects.update_one(
author = self.Person.objects.first() set__name='Test User', write_concern={"w": 0})
self.assertEqual(author.name, 'Test User') self.assertEqual(result, None)
def test_update_update_has_a_value(self): def test_update_update_has_a_value(self):
"""Test to ensure that update is passed a value to update to""" """Test to ensure that update is passed a value to update to"""
@ -524,6 +536,24 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(club.members['John']['gender'], "F") self.assertEqual(club.members['John']['gender'], "F")
self.assertEqual(club.members['John']['age'], 14) self.assertEqual(club.members['John']['age'], 14)
def test_upsert(self):
self.Person.drop_collection()
self.Person.objects(pk=ObjectId(), name="Bob", age=30).update(upsert=True)
bob = self.Person.objects.first()
self.assertEqual("Bob", bob.name)
self.assertEqual(30, bob.age)
def test_set_on_insert(self):
self.Person.drop_collection()
self.Person.objects(pk=ObjectId()).update(set__name='Bob', set_on_insert__age=30, upsert=True)
bob = self.Person.objects.first()
self.assertEqual("Bob", bob.name)
self.assertEqual(30, bob.age)
def test_get_or_create(self): def test_get_or_create(self):
"""Ensure that ``get_or_create`` returns one result or creates a new """Ensure that ``get_or_create`` returns one result or creates a new
document. document.
@ -805,6 +835,7 @@ class QuerySetTest(unittest.TestCase):
self.assertTrue("Doc: 0" in docs_string) self.assertTrue("Doc: 0" in docs_string)
self.assertEqual(docs.count(), 1000) self.assertEqual(docs.count(), 1000)
self.assertTrue('(remaining elements truncated)' in "%s" % docs)
# Limit and skip # Limit and skip
docs = docs[1:4] docs = docs[1:4]
@ -1291,6 +1322,49 @@ class QuerySetTest(unittest.TestCase):
self.Person.objects()[:1].delete() self.Person.objects()[:1].delete()
self.assertEqual(1, BlogPost.objects.count()) self.assertEqual(1, BlogPost.objects.count())
def test_reference_field_find(self):
"""Ensure cascading deletion of referring documents from the database.
"""
class BlogPost(Document):
content = StringField()
author = ReferenceField(self.Person)
BlogPost.drop_collection()
self.Person.drop_collection()
me = self.Person(name='Test User').save()
BlogPost(content="test 123", author=me).save()
self.assertEqual(1, BlogPost.objects(author=me).count())
self.assertEqual(1, BlogPost.objects(author=me.pk).count())
self.assertEqual(1, BlogPost.objects(author="%s" % me.pk).count())
self.assertEqual(1, BlogPost.objects(author__in=[me]).count())
self.assertEqual(1, BlogPost.objects(author__in=[me.pk]).count())
self.assertEqual(1, BlogPost.objects(author__in=["%s" % me.pk]).count())
def test_reference_field_find_dbref(self):
"""Ensure cascading deletion of referring documents from the database.
"""
class BlogPost(Document):
content = StringField()
author = ReferenceField(self.Person, dbref=True)
BlogPost.drop_collection()
self.Person.drop_collection()
me = self.Person(name='Test User').save()
BlogPost(content="test 123", author=me).save()
self.assertEqual(1, BlogPost.objects(author=me).count())
self.assertEqual(1, BlogPost.objects(author=me.pk).count())
self.assertEqual(1, BlogPost.objects(author="%s" % me.pk).count())
self.assertEqual(1, BlogPost.objects(author__in=[me]).count())
self.assertEqual(1, BlogPost.objects(author__in=[me.pk]).count())
self.assertEqual(1, BlogPost.objects(author__in=["%s" % me.pk]).count())
def test_update(self): def test_update(self):
"""Ensure that atomic updates work properly. """Ensure that atomic updates work properly.
""" """
@ -2380,167 +2454,6 @@ class QuerySetTest(unittest.TestCase):
def tearDown(self): def tearDown(self):
self.Person.drop_collection() self.Person.drop_collection()
def test_geospatial_operators(self):
"""Ensure that geospatial queries are working.
"""
class Event(Document):
title = StringField()
date = DateTimeField()
location = GeoPointField()
def __unicode__(self):
return self.title
Event.drop_collection()
event1 = Event(title="Coltrane Motion @ Double Door",
date=datetime.now() - timedelta(days=1),
location=[41.909889, -87.677137])
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
date=datetime.now() - timedelta(days=10),
location=[37.7749295, -122.4194155])
event3 = Event(title="Coltrane Motion @ Empty Bottle",
date=datetime.now(),
location=[41.900474, -87.686638])
event1.save()
event2.save()
event3.save()
# find all events "near" pitchfork office, chicago.
# note that "near" will show the san francisco event, too,
# although it sorts to last.
events = Event.objects(location__near=[41.9120459, -87.67892])
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event1, event3, event2])
# find events within 5 degrees of pitchfork office, chicago
point_and_distance = [[41.9120459, -87.67892], 5]
events = Event.objects(location__within_distance=point_and_distance)
self.assertEqual(events.count(), 2)
events = list(events)
self.assertTrue(event2 not in events)
self.assertTrue(event1 in events)
self.assertTrue(event3 in events)
# ensure ordering is respected by "near"
events = Event.objects(location__near=[41.9120459, -87.67892])
events = events.order_by("-date")
self.assertEqual(events.count(), 3)
self.assertEqual(list(events), [event3, event1, event2])
# find events within 10 degrees of san francisco
point = [37.7566023, -122.415579]
events = Event.objects(location__near=point, location__max_distance=10)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event2)
# find events within 10 degrees of san francisco
point_and_distance = [[37.7566023, -122.415579], 10]
events = Event.objects(location__within_distance=point_and_distance)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0], event2)
# find events within 1 degree of greenpoint, broolyn, nyc, ny
point_and_distance = [[40.7237134, -73.9509714], 1]
events = Event.objects(location__within_distance=point_and_distance)
self.assertEqual(events.count(), 0)
# ensure ordering is respected by "within_distance"
point_and_distance = [[41.9120459, -87.67892], 10]
events = Event.objects(location__within_distance=point_and_distance)
events = events.order_by("-date")
self.assertEqual(events.count(), 2)
self.assertEqual(events[0], event3)
# check that within_box works
box = [(35.0, -125.0), (40.0, -100.0)]
events = Event.objects(location__within_box=box)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].id, event2.id)
# check that polygon works for users who have a server >= 1.9
server_version = tuple(
get_connection().server_info()['version'].split('.')
)
required_version = tuple("1.9.0".split("."))
if server_version >= required_version:
polygon = [
(41.912114,-87.694445),
(41.919395,-87.69084),
(41.927186,-87.681742),
(41.911731,-87.654276),
(41.898061,-87.656164),
]
events = Event.objects(location__within_polygon=polygon)
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].id, event1.id)
polygon2 = [
(54.033586,-1.742249),
(52.792797,-1.225891),
(53.389881,-4.40094)
]
events = Event.objects(location__within_polygon=polygon2)
self.assertEqual(events.count(), 0)
Event.drop_collection()
def test_spherical_geospatial_operators(self):
"""Ensure that spherical geospatial queries are working
"""
class Point(Document):
location = GeoPointField()
Point.drop_collection()
# These points are one degree apart, which (according to Google Maps)
# is about 110 km apart at this place on the Earth.
north_point = Point(location=[-122, 38]) # Near Concord, CA
south_point = Point(location=[-122, 37]) # Near Santa Cruz, CA
north_point.save()
south_point.save()
earth_radius = 6378.009; # in km (needs to be a float for dividing by)
# Finds both points because they are within 60 km of the reference
# point equidistant between them.
points = Point.objects(location__near_sphere=[-122, 37.5])
self.assertEqual(points.count(), 2)
# Same behavior for _within_spherical_distance
points = Point.objects(
location__within_spherical_distance=[[-122, 37.5], 60/earth_radius]
);
self.assertEqual(points.count(), 2)
points = Point.objects(location__near_sphere=[-122, 37.5],
location__max_distance=60 / earth_radius);
self.assertEqual(points.count(), 2)
# Finds both points, but orders the north point first because it's
# closer to the reference point to the north.
points = Point.objects(location__near_sphere=[-122, 38.5])
self.assertEqual(points.count(), 2)
self.assertEqual(points[0].id, north_point.id)
self.assertEqual(points[1].id, south_point.id)
# Finds both points, but orders the south point first because it's
# closer to the reference point to the south.
points = Point.objects(location__near_sphere=[-122, 36.5])
self.assertEqual(points.count(), 2)
self.assertEqual(points[0].id, south_point.id)
self.assertEqual(points[1].id, north_point.id)
# Finds only one point because only the first point is within 60km of
# the reference point to the south.
points = Point.objects(
location__within_spherical_distance=[[-122, 36.5], 60/earth_radius])
self.assertEqual(points.count(), 1)
self.assertEqual(points[0].id, south_point.id)
Point.drop_collection()
def test_custom_querysets(self): def test_custom_querysets(self):
"""Ensure that custom QuerySet classes may be used. """Ensure that custom QuerySet classes may be used.
""" """
@ -3276,6 +3189,28 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(results[1]['name'], 'Barack Obama') self.assertEqual(results[1]['name'], 'Barack Obama')
self.assertEqual(results[1]['price'], Decimal('2.22')) self.assertEqual(results[1]['price'], Decimal('2.22'))
def test_as_pymongo_json_limit_fields(self):
class User(Document):
email = EmailField(unique=True, required=True)
password_hash = StringField(db_field='password_hash', required=True)
password_salt = StringField(db_field='password_salt', required=True)
User.drop_collection()
User(email="ross@example.com", password_salt="SomeSalt", password_hash="SomeHash").save()
serialized_user = User.objects.exclude('password_salt', 'password_hash').as_pymongo()[0]
self.assertEqual(set(['_id', 'email']), set(serialized_user.keys()))
serialized_user = User.objects.exclude('id', 'password_salt', 'password_hash').to_json()
self.assertEqual('[{"email": "ross@example.com"}]', serialized_user)
serialized_user = User.objects.exclude('password_salt').only('email').as_pymongo()[0]
self.assertEqual(set(['email']), set(serialized_user.keys()))
serialized_user = User.objects.exclude('password_salt').only('email').to_json()
self.assertEqual('[{"email": "ross@example.com"}]', serialized_user)
def test_no_dereference(self): def test_no_dereference(self):
class Organization(Document): class Organization(Document):
@ -3297,6 +3232,51 @@ class QuerySetTest(unittest.TestCase):
Organization)) Organization))
self.assertTrue(isinstance(qs.first().organization, Organization)) self.assertTrue(isinstance(qs.first().organization, Organization))
def test_cached_queryset(self):
class Person(Document):
name = StringField()
Person.drop_collection()
for i in xrange(100):
Person(name="No: %s" % i).save()
with query_counter() as q:
self.assertEqual(q, 0)
people = Person.objects
[x for x in people]
self.assertEqual(100, len(people._result_cache))
self.assertEqual(None, people._len)
self.assertEqual(q, 1)
list(people)
self.assertEqual(100, people._len) # Caused by list calling len
self.assertEqual(q, 1)
people.count() # count is cached
self.assertEqual(q, 1)
def test_cache_not_cloned(self):
class User(Document):
name = StringField()
def __unicode__(self):
return self.name
User.drop_collection()
User(name="Alice").save()
User(name="Bob").save()
users = User.objects.all().order_by('name')
self.assertEqual("%s" % users, "[<User: Alice>, <User: Bob>]")
self.assertEqual(2, len(users._result_cache))
users = users.filter(name="Bob")
self.assertEqual("%s" % users, "[<User: Bob>]")
self.assertEqual(1, len(users._result_cache))
def test_nested_queryset_iterator(self): def test_nested_queryset_iterator(self):
# Try iterating the same queryset twice, nested. # Try iterating the same queryset twice, nested.
names = ['Alice', 'Bob', 'Chuck', 'David', 'Eric', 'Francis', 'George'] names = ['Alice', 'Bob', 'Chuck', 'David', 'Eric', 'Francis', 'George']
@ -3313,11 +3293,13 @@ class QuerySetTest(unittest.TestCase):
User(name=name).save() User(name=name).save()
users = User.objects.all().order_by('name') users = User.objects.all().order_by('name')
outer_count = 0 outer_count = 0
inner_count = 0 inner_count = 0
inner_total_count = 0 inner_total_count = 0
with query_counter() as q:
self.assertEqual(q, 0)
self.assertEqual(users.count(), 7) self.assertEqual(users.count(), 7)
for i, outer_user in enumerate(users): for i, outer_user in enumerate(users):
@ -3338,5 +3320,46 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(outer_count, 7) # outer loop should be executed seven times total self.assertEqual(outer_count, 7) # outer loop should be executed seven times total
self.assertEqual(inner_total_count, 7 * 7) # inner loop should be executed fourtynine times total self.assertEqual(inner_total_count, 7 * 7) # inner loop should be executed fourtynine times total
self.assertEqual(q, 2)
def test_no_sub_classes(self):
class A(Document):
x = IntField()
y = IntField()
meta = {'allow_inheritance': True}
class B(A):
z = IntField()
class C(B):
zz = IntField()
A.drop_collection()
A(x=10, y=20).save()
A(x=15, y=30).save()
B(x=20, y=40).save()
B(x=30, y=50).save()
C(x=40, y=60).save()
self.assertEqual(A.objects.no_sub_classes().count(), 2)
self.assertEqual(A.objects.count(), 5)
self.assertEqual(B.objects.no_sub_classes().count(), 2)
self.assertEqual(B.objects.count(), 3)
self.assertEqual(C.objects.no_sub_classes().count(), 1)
self.assertEqual(C.objects.count(), 1)
for obj in A.objects.no_sub_classes():
self.assertEqual(obj.__class__, A)
for obj in B.objects.no_sub_classes():
self.assertEqual(obj.__class__, B)
for obj in C.objects.no_sub_classes():
self.assertEqual(obj.__class__, C)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@ -1,4 +1,3 @@
from __future__ import with_statement
import sys import sys
sys.path[0:0] = [""] sys.path[0:0] = [""]

View File

@ -1,4 +1,3 @@
from __future__ import with_statement
import sys import sys
sys.path[0:0] = [""] sys.path[0:0] = [""]

View File

@ -1,4 +1,3 @@
from __future__ import with_statement
import sys import sys
sys.path[0:0] = [""] sys.path[0:0] = [""]
import unittest import unittest

View File

@ -1,4 +1,3 @@
from __future__ import with_statement
import sys import sys
sys.path[0:0] = [""] sys.path[0:0] = [""]
import unittest import unittest
@ -6,7 +5,8 @@ import unittest
from mongoengine import * from mongoengine import *
from mongoengine.connection import get_db from mongoengine.connection import get_db
from mongoengine.context_managers import (switch_db, switch_collection, from mongoengine.context_managers import (switch_db, switch_collection,
no_dereference, query_counter) no_sub_classes, no_dereference,
query_counter)
class ContextManagersTest(unittest.TestCase): class ContextManagersTest(unittest.TestCase):
@ -139,6 +139,54 @@ class ContextManagersTest(unittest.TestCase):
self.assertTrue(isinstance(group.ref, User)) self.assertTrue(isinstance(group.ref, User))
self.assertTrue(isinstance(group.generic, User)) self.assertTrue(isinstance(group.generic, User))
def test_no_sub_classes(self):
class A(Document):
x = IntField()
y = IntField()
meta = {'allow_inheritance': True}
class B(A):
z = IntField()
class C(B):
zz = IntField()
A.drop_collection()
A(x=10, y=20).save()
A(x=15, y=30).save()
B(x=20, y=40).save()
B(x=30, y=50).save()
C(x=40, y=60).save()
self.assertEqual(A.objects.count(), 5)
self.assertEqual(B.objects.count(), 3)
self.assertEqual(C.objects.count(), 1)
with no_sub_classes(A) as A:
self.assertEqual(A.objects.count(), 2)
for obj in A.objects:
self.assertEqual(obj.__class__, A)
with no_sub_classes(B) as B:
self.assertEqual(B.objects.count(), 2)
for obj in B.objects:
self.assertEqual(obj.__class__, B)
with no_sub_classes(C) as C:
self.assertEqual(C.objects.count(), 1)
for obj in C.objects:
self.assertEqual(obj.__class__, C)
# Confirm context manager exit correctly
self.assertEqual(A.objects.count(), 5)
self.assertEqual(B.objects.count(), 3)
self.assertEqual(C.objects.count(), 1)
def test_query_counter(self): def test_query_counter(self):
connect('mongoenginetest') connect('mongoenginetest')
db = get_db() db = get_db()

View File

@ -1,5 +1,4 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import with_statement
import sys import sys
sys.path[0:0] = [""] sys.path[0:0] = [""]
import unittest import unittest

View File

@ -1,4 +1,3 @@
from __future__ import with_statement
import sys import sys
sys.path[0:0] = [""] sys.path[0:0] = [""]
import unittest import unittest
@ -151,22 +150,74 @@ class QuerySetTest(unittest.TestCase):
# Try iterating the same queryset twice, nested, in a Django template. # Try iterating the same queryset twice, nested, in a Django template.
names = ['A', 'B', 'C', 'D'] names = ['A', 'B', 'C', 'D']
class User(Document): class CustomUser(Document):
name = StringField() name = StringField()
def __unicode__(self): def __unicode__(self):
return self.name return self.name
User.drop_collection() CustomUser.drop_collection()
for name in names: for name in names:
User(name=name).save() CustomUser(name=name).save()
users = User.objects.all().order_by('name') users = CustomUser.objects.all().order_by('name')
template = Template("{% for user in users %}{{ user.name }}{% ifequal forloop.counter 2 %} {% for inner_user in users %}{{ inner_user.name }}{% endfor %} {% endifequal %}{% endfor %}") template = Template("{% for user in users %}{{ user.name }}{% ifequal forloop.counter 2 %} {% for inner_user in users %}{{ inner_user.name }}{% endfor %} {% endifequal %}{% endfor %}")
rendered = template.render(Context({'users': users})) rendered = template.render(Context({'users': users}))
self.assertEqual(rendered, 'AB ABCD CD') self.assertEqual(rendered, 'AB ABCD CD')
def test_filter(self):
"""Ensure that a queryset and filters work as expected
"""
class Note(Document):
text = StringField()
for i in xrange(1, 101):
Note(name="Note: %s" % i).save()
# Check the count
self.assertEqual(Note.objects.count(), 100)
# Get the first 10 and confirm
notes = Note.objects[:10]
self.assertEqual(notes.count(), 10)
# Test djangos template filters
# self.assertEqual(length(notes), 10)
t = Template("{{ notes.count }}")
c = Context({"notes": notes})
self.assertEqual(t.render(c), "10")
# Test with skip
notes = Note.objects.skip(90)
self.assertEqual(notes.count(), 10)
# Test djangos template filters
self.assertEqual(notes.count(), 10)
t = Template("{{ notes.count }}")
c = Context({"notes": notes})
self.assertEqual(t.render(c), "10")
# Test with limit
notes = Note.objects.skip(90)
self.assertEqual(notes.count(), 10)
# Test djangos template filters
self.assertEqual(notes.count(), 10)
t = Template("{{ notes.count }}")
c = Context({"notes": notes})
self.assertEqual(t.render(c), "10")
# Test with skip and limit
notes = Note.objects.skip(10).limit(10)
# Test djangos template filters
self.assertEqual(notes.count(), 10)
t = Template("{{ notes.count }}")
c = Context({"notes": notes})
self.assertEqual(t.render(c), "10")
class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase):
backend = SessionStore backend = SessionStore

47
tests/test_jinja.py Normal file
View File

@ -0,0 +1,47 @@
import sys
sys.path[0:0] = [""]
import unittest
from mongoengine import *
import jinja2
class TemplateFilterTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
def test_jinja2(self):
env = jinja2.Environment()
class TestData(Document):
title = StringField()
description = StringField()
TestData.drop_collection()
examples = [('A', '1'),
('B', '2'),
('C', '3')]
for title, description in examples:
TestData(title=title, description=description).save()
tmpl = """
{%- for record in content -%}
{%- if loop.first -%}{ {%- endif -%}
"{{ record.title }}": "{{ record.description }}"
{%- if loop.last -%} }{%- else -%},{% endif -%}
{%- endfor -%}
"""
ctx = {'content': TestData.objects}
template = env.from_string(tmpl)
rendered = template.render(**ctx)
self.assertEqual('{"A": "1","B": "2","C": "3"}', rendered)
if __name__ == '__main__':
unittest.main()